Search is not available for this dataset
content
stringlengths 60
399M
| max_stars_repo_name
stringlengths 6
110
|
---|---|
<|start_filename|>AppCompatCache/AppCompatCache.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Alphaleonis.Win32.Security;
using NLog;
using RawCopy;
using Registry;
using Registry.Abstractions;
using Directory = Alphaleonis.Win32.Filesystem.Directory;
using File = Alphaleonis.Win32.Filesystem.File;
using Path = Alphaleonis.Win32.Filesystem.Path;
namespace AppCompatCache
{
public class AppCompatCache
{
public enum Execute
{
Yes,
No,
NA
}
[Flags]
public enum InsertFlag
{
Unknown1 = 0x00000001,
Executed = 0x00000002,
Unknown4 = 0x00000004,
Unknown8 = 0x00000008,
Unknown10 = 0x00000010,
Unknown20 = 0x00000020,
Unknown40 = 0x00000040,
Unknown80 = 0x00000080,
Unknown10000 = 0x00010000,
Unknown20000 = 0x00020000,
Unknown30000 = 0x00030000,
Unknown40000 = 0x00040000,
Unknown100000 = 0x00100000,
Unknown200000 = 0x00200000,
Unknown400000 = 0x00400000,
Unknown800000 = 0x00800000
}
public enum OperatingSystemVersion
{
WindowsXP,
WindowsVistaWin2k3Win2k8,
Windows7x86,
Windows7x64_Windows2008R2,
Windows80_Windows2012,
Windows81_Windows2012R2,
Windows10,
Windows10Creators,
Unknown
}
private readonly Logger _logger = LogManager.GetLogger("AppCompatCache");
public AppCompatCache(byte[] rawBytes, int controlSet, bool is32Bit)
{
Caches = new List<IAppCompatCache>();
var cache = Init(rawBytes, is32Bit, controlSet);
Caches.Add(cache);
}
public AppCompatCache(string filename, int controlSet, bool noLogs)
{
byte[] rawBytes = null;
Caches = new List<IAppCompatCache>();
var controlSetIds = new List<int>();
RegistryKey subKey = null;
var isLiveRegistry = string.IsNullOrEmpty(filename);
if (isLiveRegistry)
{
var keyCurrUser = Microsoft.Win32.Registry.LocalMachine;
var subKey2 =
keyCurrUser.OpenSubKey(@"SYSTEM\CurrentControlSet\Control\Session Manager\AppCompatCache");
if (subKey2 == null)
{
subKey2 =
keyCurrUser.OpenSubKey(@"SYSTEM\CurrentControlSet\Control\Session Manager\AppCompatibility");
if (subKey2 == null)
{
Console.WriteLine(
@"'CurrentControlSet\Control\Session Manager\AppCompatCache' key not found! Exiting");
return;
}
}
rawBytes = (byte[]) subKey2.GetValue("AppCompatCache", null);
subKey2 = keyCurrUser.OpenSubKey(@"SYSTEM\Select");
ControlSet = (int) subKey2.GetValue("Current");
var is32Bit = Is32Bit(filename, null);
var cache = Init(rawBytes, is32Bit, ControlSet);
Caches.Add(cache);
return;
}
RegistryHive reg;
Privilege[] privileges = {Privilege.EnableDelegation, Privilege.Impersonate, Privilege.Tcb};
using (new PrivilegeEnabler(Privilege.Backup, privileges))
{
ControlSet = controlSet;
if (File.Exists(filename) == false && Helper.RawFileExists(filename) == false)
{
throw new FileNotFoundException($"File not found ({filename})!");
}
var dirname = Path.GetDirectoryName(Path.GetFullPath(filename));
var hiveBase = Path.GetFileName(filename);
List<RawCopyReturn> rawFiles = null;
try
{
reg = new RegistryHive(filename)
{
RecoverDeleted = true
};
}
catch (IOException)
{
//file is in use
if (Helper.IsAdministrator() == false)
{
throw new UnauthorizedAccessException("Administrator privileges not found!");
}
_logger.Warn($"'{filename}' is in use. Rerouting...\r\n");
var files = new List<string>();
files.Add(filename);
var logFiles = Directory.GetFiles(dirname, $"{hiveBase}.LOG?").ToList();
var log1 = $"{dirname}\\{hiveBase}.LOG1";
var log2 = $"{dirname}\\{hiveBase}.LOG2";
if (logFiles.Count == 0)
{
if (Helper.RawFileExists(log1))
{
logFiles.Add(log1);
}
if (Helper.RawFileExists(log2))
{
logFiles.Add(log2);
}
}
foreach (var logFile in logFiles)
{
files.Add(logFile);
}
rawFiles = Helper.GetFiles(files);
var b = new byte[rawFiles.First().FileStream.Length];
rawFiles.First().FileStream.Read(b, 0, (int) rawFiles.First().FileStream.Length);
reg = new RegistryHive(b, rawFiles.First().InputFilename);
}
if (reg.Header.PrimarySequenceNumber != reg.Header.SecondarySequenceNumber)
{
if (string.IsNullOrEmpty(dirname))
{
dirname = ".";
}
var logFiles = Directory.GetFiles(dirname, $"{hiveBase}.LOG?").ToList();
var log1 = $"{dirname}\\{hiveBase}.LOG1";
var log2 = $"{dirname}\\{hiveBase}.LOG2";
if (logFiles.Count == 0)
{
if (File.Exists(log1))
{
logFiles.Add(log1);
}
if (File.Exists(log2))
{
logFiles.Add(log2);
}
}
if (logFiles.Count == 0 )
{
if (Helper.IsAdministrator())
{
if (Helper.RawFileExists(log1))
{
logFiles.Add(log1);
}
if (Helper.RawFileExists(log2))
{
logFiles.Add(log2);
}
}
else
{
_logger.Fatal($"Log files not found and no administrator access to look for them!");
Console.WriteLine();
}
}
if (logFiles.Count == 0)
{
if (noLogs == false)
{
_logger.Warn(
"Registry hive is dirty and no transaction logs were found in the same directory! LOGs should have same base name as the hive. Aborting!!");
throw new Exception(
"Sequence numbers do not match and transaction logs were not found in the same directory as the hive. Aborting");
}
_logger.Warn(
"Registry hive is dirty and no transaction logs were found in the same directory. Data may be missing! Continuing anyways...");
}
else
{
if (noLogs == false)
{
if (rawFiles != null)
{
var lt = new List<TransactionLogFileInfo>();
foreach (var rawCopyReturn in rawFiles.Skip(1).ToList())
{
var b = new byte[rawCopyReturn.FileStream.Length];
var tt = new TransactionLogFileInfo(rawCopyReturn.InputFilename,
b);
lt.Add(tt);
}
reg.ProcessTransactionLogs(lt, true);
}
else
{
reg.ProcessTransactionLogs(logFiles.ToList(), true);
}
}
else
{
_logger.Warn(
"Registry hive is dirty and transaction logs were found in the same directory, but --nl was provided. Data may be missing! Continuing anyways...");
}
}
}
reg.ParseHive();
}
if (controlSet == -1)
{
for (var i = 0; i < 10; i++)
{
subKey = reg.GetKey($@"ControlSet00{i}\Control\Session Manager\AppCompatCache");
if (subKey == null)
{
subKey = reg.GetKey($@"ControlSet00{i}\Control\Session Manager\AppCompatibility");
}
if (subKey != null)
{
controlSetIds.Add(i);
}
}
if (controlSetIds.Count > 1)
{
_logger.Warn(
$"***The following ControlSet00x keys will be exported: {string.Join(",", controlSetIds)}. Use -c to process keys individually\r\n");
}
}
else
{
//a control set was passed in
subKey = reg.GetKey($@"ControlSet00{ControlSet}\Control\Session Manager\AppCompatCache");
if (subKey == null)
{
subKey = reg.GetKey($@"ControlSet00{ControlSet}\Control\Session Manager\AppCompatibility");
}
if (subKey == null)
{
throw new Exception($"Could not find ControlSet00{ControlSet}. Exiting");
}
controlSetIds.Add(ControlSet);
}
var is32 = Is32Bit(filename, reg);
_logger.Debug($@"**** Found {controlSetIds.Count} ids to process");
foreach (var id in controlSetIds)
{
_logger.Debug($@"**** Processing id {id}");
// var hive2 = new RegistryHiveOnDemand(filename);
subKey = reg.GetKey($@"ControlSet00{id}\Control\Session Manager\AppCompatCache");
if (subKey == null)
{
_logger.Debug(@"**** Initial subkey null, getting appCompatability key");
subKey = reg.GetKey($@"ControlSet00{id}\Control\Session Manager\AppCompatibility");
}
_logger.Debug(@"**** Looking AppCompatcache value");
var val = subKey?.Values.SingleOrDefault(c => c.ValueName == "AppCompatCache");
if (val != null)
{
_logger.Debug(@"**** Found AppCompatcache value");
rawBytes = val.ValueDataRaw;
}
if (rawBytes == null)
{
_logger.Error($@"'AppCompatCache' value not found for 'ControlSet00{id}'! Exiting");
}
var cache = Init(rawBytes, is32, id);
Caches.Add(cache);
}
}
public int ControlSet { get; }
public List<IAppCompatCache> Caches { get; }
public OperatingSystemVersion OperatingSystem { get; private set; }
//https://github.com/libyal/winreg-kb/wiki/Application-Compatibility-Cache-key
//https://dl.mandiant.com/EE/library/Whitepaper_ShimCacheParser.pdf
private IAppCompatCache Init(byte[] rawBytes, bool is32, int controlSet)
{
IAppCompatCache appCache = null;
OperatingSystem = OperatingSystemVersion.Unknown;
string signature;
var sigNum = BitConverter.ToUInt32(rawBytes, 0);
//TODO check minimum length of rawBytes and throw exception if not enough data
signature = Encoding.ASCII.GetString(rawBytes, 128, 4);
var log1 = LogManager.GetCurrentClassLogger();
log1.Debug($@"**** Signature {signature}, Sig num 0x{sigNum:X}");
if (sigNum == 0xDEADBEEF) //DEADBEEF, WinXp
{
OperatingSystem = OperatingSystemVersion.WindowsXP;
log1.Debug(@"**** Processing XP hive");
appCache = new WindowsXP(rawBytes, is32, controlSet);
}
else if (sigNum == 0xbadc0ffe)
{
OperatingSystem = OperatingSystemVersion.WindowsVistaWin2k3Win2k8;
appCache = new VistaWin2k3Win2k8(rawBytes, is32, controlSet);
}
else if (sigNum == 0xBADC0FEE) //BADC0FEE, Win7
{
if (is32)
{
OperatingSystem = OperatingSystemVersion.Windows7x86;
}
else
{
OperatingSystem = OperatingSystemVersion.Windows7x64_Windows2008R2;
}
appCache = new Windows7(rawBytes, is32, controlSet);
}
else if (signature == "00ts")
{
OperatingSystem = OperatingSystemVersion.Windows80_Windows2012;
appCache = new Windows8x(rawBytes, OperatingSystem, controlSet);
}
else if (signature == "10ts")
{
OperatingSystem = OperatingSystemVersion.Windows81_Windows2012R2;
appCache = new Windows8x(rawBytes, OperatingSystem, controlSet);
}
else
{
//is it windows 10?
var offsetToEntries = BitConverter.ToInt32(rawBytes, 0);
OperatingSystem = OperatingSystemVersion.Windows10;
if (offsetToEntries == 0x34)
{
OperatingSystem = OperatingSystemVersion.Windows10Creators;
}
signature = Encoding.ASCII.GetString(rawBytes, offsetToEntries, 4);
if (signature == "10ts")
{
appCache = new Windows10(rawBytes, controlSet);
}
}
if (appCache == null)
{
throw new Exception(
"Unable to determine operating system! Please send the hive to <EMAIL>");
}
return appCache;
}
public static bool Is32Bit(string fileName, RegistryHive reg)
{
if (fileName.Length == 0)
{
var keyCurrUser = Microsoft.Win32.Registry.LocalMachine;
var subKey = keyCurrUser.OpenSubKey(@"SYSTEM\CurrentControlSet\Control\Session Manager\Environment");
var val = subKey?.GetValue("PROCESSOR_ARCHITECTURE");
if (val != null)
{
return val.ToString().Equals("x86");
}
}
else
{
try
{
var subKey1 = reg.GetKey("Select");
var currentCtlSet = int.Parse(subKey1.Values.Single(c => c.ValueName == "Current").ValueData);
subKey1 = reg.GetKey($"ControlSet00{currentCtlSet}\\Control\\Session Manager\\Environment");
var val = subKey1?.Values.SingleOrDefault(c => c.ValueName == "PROCESSOR_ARCHITECTURE");
if (val != null)
{
return val.ValueData.Equals("x86");
}
}
catch (Exception)
{
var l = new List<string>();
l.Add(fileName);
var ff = Helper.GetFiles(l);
var b = new byte[ff.First().FileStream.Length];
var hive = new RegistryHiveOnDemand(b, fileName);
var subKey = hive.GetKey("Select");
var currentCtlSet = int.Parse(subKey.Values.Single(c => c.ValueName == "Current").ValueData);
subKey = hive.GetKey($"ControlSet00{currentCtlSet}\\Control\\Session Manager\\Environment");
var val = subKey?.Values.SingleOrDefault(c => c.ValueName == "PROCESSOR_ARCHITECTURE");
if (val != null)
{
return val.ValueData.Equals("x86");
}
}
}
throw new NullReferenceException("Unable to determine CPU architecture!");
}
}
}
<|start_filename|>AppCompatCacheParser/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Security.Principal;
using AppCompatCache;
using CsvHelper.Configuration;
using CsvHelper.TypeConversion;
using Exceptionless;
using Fclp;
using Microsoft.Win32;
using NLog;
using NLog.Config;
using NLog.Targets;
using ServiceStack;
using ServiceStack.Text;
using CsvWriter = CsvHelper.CsvWriter;
namespace AppCompatCacheParser
{
internal class Program
{
private static FluentCommandLineParser<ApplicationArguments> _fluentCommandLineParser;
private static readonly string BaseDirectory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location);
private static void SetupNLog()
{
if (File.Exists( Path.Combine(BaseDirectory,"Nlog.config")))
{
return;
}
var config = new LoggingConfiguration();
var loglevel = LogLevel.Info;
var layout = @"${message}";
var consoleTarget = new ColoredConsoleTarget();
config.AddTarget("console", consoleTarget);
consoleTarget.Layout = layout;
var rule1 = new LoggingRule("*", loglevel, consoleTarget);
config.LoggingRules.Add(rule1);
LogManager.Configuration = config;
}
public static bool IsAdministrator()
{
var identity = WindowsIdentity.GetCurrent();
var principal = new WindowsPrincipal(identity);
return principal.IsInRole(WindowsBuiltInRole.Administrator);
}
private static void Main(string[] args)
{
ExceptionlessClient.Default.Startup("7iL4b0Me7W8PbFflftqWgfQCIdf55flrT2O11zIP");
SetupNLog();
var logger = LogManager.GetCurrentClassLogger();
_fluentCommandLineParser = new FluentCommandLineParser<ApplicationArguments>();
_fluentCommandLineParser.Setup(arg => arg.CsvDirectory)
.As("csv")
.WithDescription("Directory to save CSV formatted results to. Required")
.Required();
_fluentCommandLineParser.Setup(arg => arg.CsvName)
.As("csvf")
.WithDescription("File name to save CSV formatted results to. When present, overrides default name\r\n");
_fluentCommandLineParser.Setup(arg => arg.HiveFile)
.As('f')
.WithDescription(
"Full path to SYSTEM hive to process. If this option is not specified, the live Registry will be used")
.SetDefault(string.Empty);
_fluentCommandLineParser.Setup(arg => arg.SortTimestamps)
.As('t')
.WithDescription("Sorts last modified timestamps in descending order\r\n")
.SetDefault(false);
_fluentCommandLineParser.Setup(arg => arg.ControlSet)
.As('c')
.WithDescription("The ControlSet to parse. Default is to extract all control sets.")
.SetDefault(-1);
_fluentCommandLineParser.Setup(arg => arg.Debug)
.As("debug")
.WithDescription("Debug mode")
.SetDefault(false);
_fluentCommandLineParser.Setup(arg => arg.DateTimeFormat)
.As("dt")
.WithDescription(
"The custom date/time format to use when displaying timestamps. See https://goo.gl/CNVq0k for options. Default is: yyyy-MM-dd HH:mm:ss")
.SetDefault("yyyy-MM-dd HH:mm:ss");
_fluentCommandLineParser.Setup(arg => arg.NoTransLogs)
.As("nl")
.WithDescription(
"When true, ignore transaction log files for dirty hives. Default is FALSE").SetDefault(false);
var header =
$"AppCompatCache Parser version {Assembly.GetExecutingAssembly().GetName().Version}" +
$"\r\n\r\nAuthor: <NAME> (<EMAIL>)" +
$"\r\nhttps://github.com/EricZimmerman/AppCompatCacheParser";
var footer = @"Examples: AppCompatCacheParser.exe --csv c:\temp -t -c 2" + "\r\n\t " +
@" AppCompatCacheParser.exe --csv c:\temp --csvf results.csv" + "\r\n\t " +
"\r\n\t" +
" Short options (single letter) are prefixed with a single dash. Long commands are prefixed with two dashes\r\n";
_fluentCommandLineParser.SetupHelp("?", "help").WithHeader(header).Callback(text => logger.Info(text + "\r\n" + footer));
var result = _fluentCommandLineParser.Parse(args);
if (result.HelpCalled)
{
return;
}
if (result.HasErrors)
{
_fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options);
return;
}
var hiveToProcess = "Live Registry";
if (_fluentCommandLineParser.Object.HiveFile?.Length > 0)
{
hiveToProcess = _fluentCommandLineParser.Object.HiveFile;
}
logger.Info(header);
logger.Info("");
logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}\r\n");
if (IsAdministrator() == false)
{
logger.Fatal($"Warning: Administrator privileges not found!\r\n");
}
logger.Info($"Processing hive '{hiveToProcess}'");
logger.Info("");
if (_fluentCommandLineParser.Object.Debug)
{
LogManager.Configuration.LoggingRules.First().EnableLoggingForLevel(LogLevel.Debug);
}
try
{
var appCompat = new AppCompatCache.AppCompatCache(_fluentCommandLineParser.Object.HiveFile,
_fluentCommandLineParser.Object.ControlSet,_fluentCommandLineParser.Object.NoTransLogs);
var outFileBase = string.Empty;
var ts1 = DateTime.Now.ToString("yyyyMMddHHmmss");
if (_fluentCommandLineParser.Object.HiveFile?.Length > 0)
{
if (_fluentCommandLineParser.Object.ControlSet >= 0)
{
outFileBase =
$"{ts1}_{appCompat.OperatingSystem}_{Path.GetFileNameWithoutExtension(_fluentCommandLineParser.Object.HiveFile)}_ControlSet00{_fluentCommandLineParser.Object.ControlSet}_AppCompatCache.csv";
}
else
{
outFileBase =
$"{ts1}_{appCompat.OperatingSystem}_{Path.GetFileNameWithoutExtension(_fluentCommandLineParser.Object.HiveFile)}_AppCompatCache.csv";
}
}
else
{
outFileBase = $"{ts1}_{appCompat.OperatingSystem}_{Environment.MachineName}_AppCompatCache.csv";
}
if (_fluentCommandLineParser.Object.CsvName.IsNullOrEmpty() == false)
{
outFileBase = Path.GetFileName(_fluentCommandLineParser.Object.CsvName);
}
if (Directory.Exists(_fluentCommandLineParser.Object.CsvDirectory) == false)
{
Directory.CreateDirectory(_fluentCommandLineParser.Object.CsvDirectory);
}
var outFilename = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outFileBase);
var sw = new StreamWriter(outFilename);
var csv = new CsvWriter(sw,CultureInfo.InvariantCulture);
var foo = csv.Context.AutoMap<CacheEntry>();
var o = new TypeConverterOptions
{
DateTimeStyle = DateTimeStyles.AssumeUniversal & DateTimeStyles.AdjustToUniversal
};
csv.Context.TypeConverterOptionsCache.AddOptions<CacheEntry>(o);
foo.Map(t => t.LastModifiedTimeUTC).Convert(t=>t.Value.LastModifiedTimeUTC.HasValue ? t.Value.LastModifiedTimeUTC.Value.ToString(_fluentCommandLineParser.Object.DateTimeFormat): "");
foo.Map(t => t.CacheEntrySize).Ignore();
foo.Map(t => t.Data).Ignore();
foo.Map(t => t.InsertFlags).Ignore();
foo.Map(t => t.DataSize).Ignore();
foo.Map(t => t.LastModifiedFILETIMEUTC).Ignore();
foo.Map(t => t.PathSize).Ignore();
foo.Map(t => t.Signature).Ignore();
foo.Map(t => t.ControlSet).Index(0);
foo.Map(t => t.CacheEntryPosition).Index(1);
foo.Map(t => t.Path).Index(2);
foo.Map(t => t.LastModifiedTimeUTC).Index(3);
foo.Map(t => t.Executed).Index(4);
foo.Map(t => t.Duplicate).Index(5);
foo.Map(t => t.SourceFile).Index(6);
csv.WriteHeader<CacheEntry>();
csv.NextRecord();
logger.Debug($"**** Found {appCompat.Caches.Count} caches");
var cacheKeys = new HashSet<string>();
if (appCompat.Caches.Any())
{
foreach (var appCompatCach in appCompat.Caches)
{
if (_fluentCommandLineParser.Object.Debug)
{
appCompatCach.PrintDump();
}
try
{
logger.Info(
$"Found {appCompatCach.Entries.Count:N0} cache entries for {appCompat.OperatingSystem} in ControlSet00{appCompatCach.ControlSet}");
if (_fluentCommandLineParser.Object.SortTimestamps)
{
// csv.WriteRecords(appCompatCach.Entries.OrderByDescending(t => t.LastModifiedTimeUTC));
foreach (var cacheEntry in appCompatCach.Entries)
{
cacheEntry.SourceFile = hiveToProcess;
cacheEntry.Duplicate = cacheKeys.Contains(cacheEntry.GetKey());
cacheKeys.Add(cacheEntry.GetKey());
csv.WriteRecord(cacheEntry);
csv.NextRecord();
}
}
else
{
foreach (var cacheEntry in appCompatCach.Entries)
{
cacheEntry.SourceFile = hiveToProcess;
cacheEntry.Duplicate = cacheKeys.Contains(cacheEntry.GetKey());
cacheKeys.Add(cacheEntry.GetKey());
csv.WriteRecord(cacheEntry);
csv.NextRecord();
}
//csv.WriteRecords(appCompatCach.Entries);
}
}
catch (Exception ex)
{
logger.Error($"There was an error: Error message: {ex.Message} Stack: {ex.StackTrace}");
try
{
appCompatCach.PrintDump();
}
catch (Exception ex1)
{
logger.Error($"Couldn't PrintDump {ex1.Message} Stack: {ex1.StackTrace}");
}
}
}
sw.Flush();
sw.Close();
logger.Warn($"\r\nResults saved to '{outFilename}'\r\n");
}
else
{
logger.Warn($"\r\nNo caches were found!\r\n");
}
}
catch (Exception ex)
{
if (ex.Message.Contains("Sequence numbers do not match and transaction logs were not found in the same direct") == false)
{
if (ex.Message.Contains("Administrator privileges not found"))
{
logger.Fatal($"Could not access '{_fluentCommandLineParser.Object.HiveFile}'. Does it exist?");
logger.Error("");
logger.Fatal("Rerun the program with Administrator privileges to try again\r\n");
}
else if (ex.Message.Contains("Invalid diskName:"))
{
logger.Fatal($"Could not access '{_fluentCommandLineParser.Object.HiveFile}'. Invalid disk!");
logger.Error("");
}
else
{
logger.Error($"There was an error: {ex.Message}");
logger.Error($"Stacktrace: {ex.StackTrace}");
logger.Info("");
}
}
}
}
}
public class ApplicationArguments
{
public string HiveFile { get; set; }
public bool SortTimestamps { get; set; }
public int ControlSet { get; set; }
public string CsvDirectory { get; set; }
public string CsvName { get; set; }
public bool Debug { get; set; }
public bool NoTransLogs { get; set; } = false;
public string DateTimeFormat { get; set; }
}
} | ccDev-Labs/AppCompatCacheParser |
<|start_filename|>Plugins/VRExpansionPlugin/Source/VRExpansionPlugin/Public/VRBPDatatypes.h<|end_filename|>
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "Engine.h"
#include "VRBPDatatypes.generated.h"
USTRUCT()
struct VREXPANSIONPLUGIN_API FBPVRComponentPosRep
{
GENERATED_BODY()
public:
UPROPERTY()
FVector_NetQuantize100 Position;
UPROPERTY()
FRotator Orientation;
};
/*
Interactive Collision With Physics = Held items can be offset by geometry, uses physics for the offset, pushes physics simulating objects with weight taken into account
Interactive Collision With Sweep = Held items can be offset by geometry, uses sweep for the offset, pushes physics simulating objects, no weight
Sweep With Physics = Only sweeps movement, will not be offset by geomtry, still pushes physics simulating objects, no weight
Physics Only = Does not sweep at all (does not trigger OnHitEvents), still pushes physics simulating objects, no weight
*/
UENUM(Blueprintable)
enum EGripCollisionType
{
InteractiveCollisionWithPhysics,
InteractiveCollisionWithSweep,
InteractiveHybridCollisionWithSweep,
SweepWithPhysics,
PhysicsOnly
};
// This needs to be updated as the original gets changed, that or hope they make the original blueprint accessible.
UENUM(Blueprintable)
enum EBPHMDDeviceType
{
DT_OculusRift,
DT_Morpheus,
DT_ES2GenericStereoMesh,
DT_SteamVR,
DT_GearVR,
DT_Unknown
};
USTRUCT(BlueprintType, Category = "VRExpansionLibrary")
struct VREXPANSIONPLUGIN_API FBPActorGripInformation
{
GENERATED_BODY()
public:
UPROPERTY(BlueprintReadOnly)
AActor * Actor;
UPROPERTY(BlueprintReadOnly)
UPrimitiveComponent * Component;
UPROPERTY(BlueprintReadOnly)
TEnumAsByte<EGripCollisionType> GripCollisionType;
UPROPERTY(BlueprintReadOnly)
bool bColliding;
UPROPERTY(BlueprintReadOnly)
FTransform RelativeTransform;
UPROPERTY(BlueprintReadOnly)
bool bOriginalReplicatesMovement;
UPROPERTY(BlueprintReadOnly)
bool bTurnOffLateUpdateWhenColliding;
UPROPERTY(BlueprintReadOnly)
float Damping;
UPROPERTY(BlueprintReadOnly)
float Stiffness;
// For multi grip situations
//UPROPERTY(BlueprintReadOnly)
// USceneComponent * SecondaryAttachment;
//UPROPERTY()
// FTransform SecondaryRelativeTransform;
//UPROPERTY(BlueprintReadOnly)
// bool bHasSecondaryAttachment;
// Allow hand to not be primary positional attachment?
// End multi grip
/** Physics scene index of the body we are grabbing. */
//int32 SceneIndex;
/** Pointer to PhysX joint used by the handle*/
//physx::PxD6Joint* HandleData;
/** Pointer to kinematic actor jointed to grabbed object */
//physx::PxRigidDynamic* KinActorData;
FBPActorGripInformation()
{
// HandleData = NULL;
//KinActorData = NULL;
bTurnOffLateUpdateWhenColliding = true;
Damping = 200.0f;
Stiffness = 1500.0f;
Component = nullptr;
Actor = nullptr;
bColliding = false;
GripCollisionType = EGripCollisionType::InteractiveCollisionWithSweep;
//SecondaryAttachment = nullptr;
//bHasSecondaryAttachment = false;
//bHandIsPrimaryReference = true;
}
};
USTRUCT(BlueprintType, Category = "VRExpansionLibrary")
struct VREXPANSIONPLUGIN_API FBPActorPhysicsHandleInformation
{
GENERATED_BODY()
public:
UPROPERTY(BlueprintReadOnly)
AActor * Actor;
UPROPERTY(BlueprintReadOnly)
UPrimitiveComponent * Component;
/** Physics scene index of the body we are grabbing. */
int32 SceneIndex;
/** Pointer to PhysX joint used by the handle*/
physx::PxD6Joint* HandleData;
/** Pointer to kinematic actor jointed to grabbed object */
physx::PxRigidDynamic* KinActorData;
FBPActorPhysicsHandleInformation()
{
HandleData = NULL;
KinActorData = NULL;
Actor = nullptr;
Component = nullptr;
}
};
<|start_filename|>Plugins/VRExpansionPlugin/Source/VRExpansionPlugin/Public/ReplicatedVRCameraComponent.h<|end_filename|>
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "Engine.h"
#include "ReplicatedVRCameraComponent.generated.h"
UCLASS(Blueprintable, meta = (BlueprintSpawnableComponent), ClassGroup = VRExpansionLibrary)
class VREXPANSIONPLUGIN_API UReplicatedVRCameraComponent : public UCameraComponent
{
GENERATED_UCLASS_BODY()
// ~UGripMotionControllerComponent();
void TickComponent(float DeltaTime, enum ELevelTick TickType, FActorComponentTickFunction *ThisTickFunction) override;
/** Whether or not this component has authority within the frame*/
bool bHasAuthority;
/** Whether or not this component is currently on the network server*/
bool bIsServer;
// Whether to ever replicate position
//UPROPERTY(EditAnywhere, BlueprintReadWrite, Replicated, Category = "VRExpansionLibrary")
//bool bReplicateTransform;
UPROPERTY(BlueprintReadOnly, ReplicatedUsing = OnRep_ReplicatedTransform, Category = "VRExpansionLibrary")
FBPVRComponentPosRep ReplicatedTransform;
UFUNCTION()
virtual void OnRep_ReplicatedTransform()
{
SetRelativeLocationAndRotation(ReplicatedTransform.Position, ReplicatedTransform.Orientation);
}
// Rate to update the position to the server, 100htz is default (same as replication rate, should also hit every tick).
UPROPERTY(EditAnywhere, BlueprintReadWrite, Replicated, Category = "VRExpansionLibrary")
float NetUpdateRate;
// Used in Tick() to accumulate before sending updates, didn't want to use a timer in this case.
float NetUpdateCount;
// I'm sending it unreliable because it is being resent pretty often
UFUNCTION(Unreliable, Server, WithValidation)
void Server_SendTransform(FBPVRComponentPosRep NewTransform);
// Need this as I can't think of another way for an actor component to make sure it isn't on the server
bool IsLocallyControlled() const
{
// Epic used a check for a player controller to control has authority, however the controllers are always attached to a pawn
// So this check would have always failed to work in the first place.....
APawn* Owner = Cast<APawn>(GetOwner());
if (!Owner)
{
//const APlayerController* Actor = Cast<APlayerController>(GetOwner());
//if (!Actor)
return false;
//return Actor->IsLocalPlayerController();
}
return Owner->IsLocallyControlled();
}
bool IsServer() const
{
if (GEngine != nullptr && GWorld != nullptr)
{
switch (GEngine->GetNetMode(GWorld))
{
case NM_Client:
{return false; } break;
case NM_DedicatedServer:
case NM_ListenServer:
default:
{return true; } break;
}
}
return false;
}
};
<|start_filename|>Plugins/VRExpansionPlugin/Source/VRExpansionPlugin/Public/VRRootComponent.h<|end_filename|>
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "Engine.h"
#include "Components/ShapeComponent.h"
#include "VRRootComponent.generated.h"
//For UE4 Profiler ~ Stat Group
DECLARE_STATS_GROUP(TEXT("VRPhysicsUpdate"), STATGROUP_VRPhysics, STATCAT_Advanced);
// EXPERIMENTAL, don't use
UCLASS(Blueprintable, meta = (BlueprintSpawnableComponent), ClassGroup = VRExpansionLibrary)
class VREXPANSIONPLUGIN_API UVRRootComponent : public UCapsuleComponent//UShapeComponent
{
GENERATED_UCLASS_BODY()
public:
friend class FDrawCylinderSceneProxy;
void GenerateOffsetToWorld();
UFUNCTION(BlueprintPure, Category = "MotionController")
FVector GetVRForwardVector()
{
return OffsetComponentToWorld.GetRotation().GetForwardVector();
}
UFUNCTION(BlueprintPure, Category = "MotionController")
FVector GetVRRightVector()
{
return OffsetComponentToWorld.GetRotation().GetRightVector();
}
UFUNCTION(BlueprintPure, Category = "MotionController")
FVector GetVRUpVector()
{
return OffsetComponentToWorld.GetRotation().GetUpVector();
}
UFUNCTION(BlueprintPure, Category = "MotionController")
FVector GetVRLocation()
{
return OffsetComponentToWorld.GetLocation();
}
UFUNCTION(BlueprintPure, Category = "MotionController")
FRotator GetVRRotation()
{
return OffsetComponentToWorld.GetRotation().Rotator();
}
protected:
virtual bool MoveComponentImpl(const FVector& Delta, const FQuat& NewRotation, bool bSweep, FHitResult* OutHit = NULL, EMoveComponentFlags MoveFlags = MOVECOMP_NoFlags, ETeleportType Teleport = ETeleportType::None) override;
virtual void OnUpdateTransform(EUpdateTransformFlags UpdateTransformFlags, ETeleportType Teleport = ETeleportType::None) override;
void SendPhysicsTransform(ETeleportType Teleport);
const TArray<FOverlapInfo>* ConvertRotationOverlapsToCurrentOverlaps(TArray<FOverlapInfo>& OverlapsAtEndLocation, const TArray<FOverlapInfo>& CurrentOverlaps);
const TArray<FOverlapInfo>* ConvertSweptOverlapsToCurrentOverlaps(
TArray<FOverlapInfo>& OverlapsAtEndLocation, const TArray<FOverlapInfo>& SweptOverlaps, int32 SweptOverlapsIndex,
const FVector& EndLocation, const FQuat& EndRotationQuat);
public:
void UVRRootComponent::BeginPlay() override;
bool IsLocallyControlled() const
{
// Epic used a check for a player controller to control has authority, however the controllers are always attached to a pawn
// So this check would have always failed to work in the first place.....
APawn* Owner = Cast<APawn>(GetOwner());
if (!Owner)
{
//const APlayerController* Actor = Cast<APlayerController>(GetOwner());
//if (!Actor)
return false;
//return Actor->IsLocalPlayerController();
}
return Owner->IsLocallyControlled();
}
// Whether to auto size the capsule collision to the height of the head.
UPROPERTY(BlueprintReadWrite, Transient, Category = "VRExpansionLibrary")
USceneComponent * TargetPrimitiveComponent;
UPROPERTY(BlueprintReadOnly, Transient, Category = "VRExpansionLibrary")
FTransform OffsetComponentToWorld;
FVector DifferenceFromLastFrame;
// Used to offset the collision (IE backwards from the player slightly.
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "VRExpansionLibrary")
FVector VRCapsuleOffset;
FVector curCameraLoc;
FRotator curCameraRot;
FVector lastCameraLoc;
FRotator lastCameraRot;
bool bHadRelativeMovement;
FPrimitiveSceneProxy* CreateSceneProxy() override;
void TickComponent(float DeltaTime, enum ELevelTick TickType, FActorComponentTickFunction *ThisTickFunction) override;
public:
// Begin UObject interface
#if WITH_EDITOR
virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override;
void PreEditChange(UProperty* PropertyThatWillChange);
#endif // WITH_EDITOR
// End UObject interface
virtual FBoxSphereBounds CalcBounds(const FTransform& LocalToWorld) const override;
};
<|start_filename|>Plugins/VRExpansionPlugin/Source/VRExpansionPlugin/Private/VRExpansionFunctionLibrary.cpp<|end_filename|>
// Fill out your copyright notice in the Description page of Project Settings.
#include "VRExpansionPluginPrivatePCH.h"
#include "VRExpansionFunctionLibrary.h"
//General Log
DEFINE_LOG_CATEGORY(VRExpansionFunctionLibraryLog);
UVRExpansionFunctionLibrary::UVRExpansionFunctionLibrary(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer)
{
PrimaryComponentTick.bCanEverTick = false;
}
//=============================================================================
UVRExpansionFunctionLibrary::~UVRExpansionFunctionLibrary()
{
if(bInitialized)
UnloadOpenVRModule();
}
bool UVRExpansionFunctionLibrary::OpenVRHandles()
{
if (IsLocallyControlled() && !bInitialized)
bInitialized = LoadOpenVRModule();
else if (bInitialized)
return true;
else
bInitialized = false;
return bInitialized;
}
bool UVRExpansionFunctionLibrary::CloseVRHandles()
{
if (bInitialized)
{
UnloadOpenVRModule();
bInitialized = false;
return true;
}
else
return false;
}
bool UVRExpansionFunctionLibrary::LoadOpenVRModule()
{
#if PLATFORM_WINDOWS
#if PLATFORM_64BITS
FString RootOpenVRPath;
TCHAR VROverridePath[MAX_PATH];
FPlatformMisc::GetEnvironmentVariable(TEXT("VR_OVERRIDE"), VROverridePath, MAX_PATH);
if (FCString::Strlen(VROverridePath) > 0)
{
RootOpenVRPath = FString::Printf(TEXT("%s\\bin\\win64\\"), VROverridePath);
}
else
{
RootOpenVRPath = FPaths::EngineDir() / FString::Printf(TEXT("Binaries/ThirdParty/OpenVR/%s/Win64/"), OPENVR_SDK_VER);
}
FPlatformProcess::PushDllDirectory(*RootOpenVRPath);
OpenVRDLLHandle = FPlatformProcess::GetDllHandle(*(RootOpenVRPath + "openvr_api.dll"));
FPlatformProcess::PopDllDirectory(*RootOpenVRPath);
#else
FString RootOpenVRPath = FPaths::EngineDir() / FString::Printf(TEXT("Binaries/ThirdParty/OpenVR/%s/Win32/"), OPENVR_SDK_VER);
FPlatformProcess::PushDllDirectory(*RootOpenVRPath);
OpenVRDLLHandle = FPlatformProcess::GetDllHandle(*(RootOpenVRPath + "openvr_api.dll"));
FPlatformProcess::PopDllDirectory(*RootOpenVRPath);
#endif
#elif PLATFORM_MAC
OpenVRDLLHandle = FPlatformProcess::GetDllHandle(TEXT("libopenvr_api.dylib"));
#endif //PLATFORM_WINDOWS
if (!OpenVRDLLHandle)
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Failed to load OpenVR library."));
return false;
}
//@todo steamvr: Remove GetProcAddress() workaround once we update to Steamworks 1.33 or higher
//VRInitFn = (pVRInit)FPlatformProcess::GetDllExport(OpenVRDLLHandle, TEXT("VR_Init"));
//VRShutdownFn = (pVRShutdown)FPlatformProcess::GetDllExport(OpenVRDLLHandle, TEXT("VR_Shutdown"));
//VRIsHmdPresentFn = (pVRIsHmdPresent)FPlatformProcess::GetDllExport(OpenVRDLLHandle, TEXT("VR_IsHmdPresent"));
VRGetStringForHmdErrorFn = (pVRGetStringForHmdError)FPlatformProcess::GetDllExport(OpenVRDLLHandle, TEXT("VR_GetStringForHmdError"));
VRGetGenericInterfaceFn = (pVRGetGenericInterface)FPlatformProcess::GetDllExport(OpenVRDLLHandle, TEXT("VR_GetGenericInterface"));
if (/*!VRInitFn || !VRShutdownFn || !VRIsHmdPresentFn || */!VRGetStringForHmdErrorFn || !VRGetGenericInterfaceFn)
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Failed to GetProcAddress() on openvr_api.dll"));
UnloadOpenVRModule();
return false;
}
return true;
}
void UVRExpansionFunctionLibrary::UnloadOpenVRModule()
{
if (OpenVRDLLHandle != nullptr)
{
FPlatformProcess::FreeDllHandle(OpenVRDLLHandle);
OpenVRDLLHandle = nullptr;
//(*VRShutdownFn)();
}
}
bool UVRExpansionFunctionLibrary::GetIsHMDConnected()
{
if (GEngine && GEngine->HMDDevice.IsValid() && GEngine->HMDDevice->IsHMDConnected())
return true;
return false;
}
EBPHMDDeviceType UVRExpansionFunctionLibrary::GetHMDType()
{
if (GEngine && GEngine->HMDDevice.IsValid())
{
switch (GEngine->HMDDevice->GetHMDDeviceType())
{
case EHMDDeviceType::DT_ES2GenericStereoMesh: return EBPHMDDeviceType::DT_ES2GenericStereoMesh; break;
case EHMDDeviceType::DT_GearVR: return EBPHMDDeviceType::DT_GearVR; break;
case EHMDDeviceType::DT_Morpheus: return EBPHMDDeviceType::DT_Morpheus; break;
case EHMDDeviceType::DT_OculusRift: return EBPHMDDeviceType::DT_OculusRift; break;
case EHMDDeviceType::DT_SteamVR: return EBPHMDDeviceType::DT_SteamVR; break;
// Return unknown if not a matching enum, may need to add new entries in the copied enum if the original adds new ones in this case
default: return EBPHMDDeviceType::DT_Unknown; break;
}
}
return EBPHMDDeviceType::DT_Unknown;
}
bool UVRExpansionFunctionLibrary::GetVRControllerPropertyString(TEnumAsByte<EVRControllerProperty_String> PropertyToRetrieve, int32 DeviceID, FString & StringValue)
{
#if !STEAMVR_SUPPORTED_PLATFORMS
return false;
#else
if (!bInitialized)
return false;
if (!(GEngine->HMDDevice.IsValid() && (GEngine->HMDDevice->GetHMDDeviceType() == EHMDDeviceType::DT_SteamVR)))
return false;
vr::HmdError HmdErr;
vr::IVRSystem * VRSystem = (vr::IVRSystem*)(*VRGetGenericInterfaceFn)(vr::IVRSystem_Version, &HmdErr);
//vr::IVRSystem * VRSystem = (vr::IVRSystem*)vr::VR_GetGenericInterface(vr::IVRSystem_Version, &HmdErr);
if (!VRSystem)
return false;
vr::TrackedPropertyError pError = vr::TrackedPropertyError::TrackedProp_Success;
char charvalue[vr::k_unMaxPropertyStringSize];
uint32_t buffersize = 255;
uint32_t ret = VRSystem->GetStringTrackedDeviceProperty(DeviceID, (vr::ETrackedDeviceProperty) (((int32)PropertyToRetrieve.GetValue()) + 3000), charvalue, buffersize, &pError);
if (pError != vr::TrackedPropertyError::TrackedProp_Success)
return false;
StringValue = FString(ANSI_TO_TCHAR(charvalue));
return true;
#endif
}
bool UVRExpansionFunctionLibrary::GetVRDevicePropertyString(TEnumAsByte<EVRDeviceProperty_String> PropertyToRetrieve, int32 DeviceID, FString & StringValue)
{
#if !STEAMVR_SUPPORTED_PLATFORMS
return false;
#else
if (!bInitialized)
return false;
if (!(GEngine->HMDDevice.IsValid() && (GEngine->HMDDevice->GetHMDDeviceType() == EHMDDeviceType::DT_SteamVR)))
return false;
vr::HmdError HmdErr;
vr::IVRSystem * VRSystem = (vr::IVRSystem*)(*VRGetGenericInterfaceFn)(vr::IVRSystem_Version, &HmdErr);
//vr::IVRSystem * VRSystem = (vr::IVRSystem*)vr::VR_GetGenericInterface(vr::IVRSystem_Version, &HmdErr);
if (!VRSystem)
return false;
vr::TrackedPropertyError pError = vr::TrackedPropertyError::TrackedProp_Success;
char charvalue[vr::k_unMaxPropertyStringSize];
uint32_t buffersize = 255;
uint32_t ret = VRSystem->GetStringTrackedDeviceProperty(DeviceID, (vr::ETrackedDeviceProperty) (((int32)PropertyToRetrieve.GetValue()) + 1000), charvalue, buffersize, &pError);
if (pError != vr::TrackedPropertyError::TrackedProp_Success)
return false;
StringValue = FString(ANSI_TO_TCHAR(charvalue));
return true;
#endif
}
bool UVRExpansionFunctionLibrary::GetVRDevicePropertyBool(TEnumAsByte<EVRDeviceProperty_Bool> PropertyToRetrieve, int32 DeviceID, bool & BoolValue)
{
#if !STEAMVR_SUPPORTED_PLATFORMS
return false;
#else
if (!bInitialized)
return false;
if (!(GEngine->HMDDevice.IsValid() && (GEngine->HMDDevice->GetHMDDeviceType() == EHMDDeviceType::DT_SteamVR)))
return false;
vr::HmdError HmdErr;
vr::IVRSystem * VRSystem = (vr::IVRSystem*)(*VRGetGenericInterfaceFn)(vr::IVRSystem_Version, &HmdErr);
//vr::IVRSystem * VRSystem = (vr::IVRSystem*)vr::VR_GetGenericInterface(vr::IVRSystem_Version, &HmdErr);
if (!VRSystem)
return false;
vr::TrackedPropertyError pError = vr::TrackedPropertyError::TrackedProp_Success;
bool ret = VRSystem->GetBoolTrackedDeviceProperty(DeviceID, (vr::ETrackedDeviceProperty) (((int32)PropertyToRetrieve.GetValue()) + 1000), &pError);
if (pError != vr::TrackedPropertyError::TrackedProp_Success)
return false;
BoolValue = ret;
return true;
#endif
}
bool UVRExpansionFunctionLibrary::GetVRDevicePropertyFloat(TEnumAsByte<EVRDeviceProperty_Float> PropertyToRetrieve, int32 DeviceID, float & FloatValue)
{
#if !STEAMVR_SUPPORTED_PLATFORMS
return false;
#else
if (!bInitialized)
return false;
if (!(GEngine->HMDDevice.IsValid() && (GEngine->HMDDevice->GetHMDDeviceType() == EHMDDeviceType::DT_SteamVR)))
return false;
vr::HmdError HmdErr;
vr::IVRSystem * VRSystem = (vr::IVRSystem*)(*VRGetGenericInterfaceFn)(vr::IVRSystem_Version, &HmdErr);
//vr::IVRSystem * VRSystem = (vr::IVRSystem*)vr::VR_GetGenericInterface(vr::IVRSystem_Version, &HmdErr);
if (!VRSystem)
return false;
vr::TrackedPropertyError pError = vr::TrackedPropertyError::TrackedProp_Success;
float ret = VRSystem->GetFloatTrackedDeviceProperty(DeviceID, (vr::ETrackedDeviceProperty) (((int32)PropertyToRetrieve.GetValue()) + 1000), &pError);
if (pError != vr::TrackedPropertyError::TrackedProp_Success)
return false;
FloatValue = ret;
return true;
#endif
}
UTexture2D * UVRExpansionFunctionLibrary::GetVRDeviceModelAndTexture(UObject* WorldContextObject, TEnumAsByte<ESteamVRTrackedDeviceType> DeviceType, TArray<UProceduralMeshComponent *> ProceduralMeshComponentsToFill, bool & bSucceeded, bool bCreateCollision/*, TArray<uint8> & OutRawTexture, bool bReturnRawTexture*/)
{
#if !STEAMVR_SUPPORTED_PLATFORMS
bSucceeded = false;
return false;
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Not SteamVR Supported Platform!!"));
#else
if (!bInitialized)
{
bSucceeded = false;
return false;
}
/*if (!(GEngine->HMDDevice.IsValid() && (GEngine->HMDDevice->GetHMDDeviceType() == EHMDDeviceType::DT_SteamVR)))
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Couldn't Get HMD Device!!"));
bSucceeded = false;
return nullptr;
}*/
/* FSteamVRHMD* SteamVRHMD = (FSteamVRHMD*)(GEngine->HMDDevice.Get());
if (!SteamVRHMD || !SteamVRHMD->IsStereoEnabled())
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Couldn't Get HMD Device!!"));
bSucceeded = false;
return nullptr;
}*/
vr::HmdError HmdErr;
vr::IVRSystem * VRSystem = (vr::IVRSystem*)(*VRGetGenericInterfaceFn)(vr::IVRSystem_Version, &HmdErr);
//vr::IVRSystem * VRSystem = (vr::IVRSystem*)vr::VR_GetGenericInterface(vr::IVRSystem_Version, &HmdErr);
if (!VRSystem)
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("VRSystem InterfaceErrorCode %i"), (int32)HmdErr);
}
vr::IVRRenderModels * VRRenderModels = (vr::IVRRenderModels*)(*VRGetGenericInterfaceFn)(vr::IVRRenderModels_Version, &HmdErr);
//vr::IVRRenderModels * VRRenderModels = (vr::IVRRenderModels*)vr::VR_GetGenericInterface(vr::IVRRenderModels_Version, &HmdErr);
if (!VRRenderModels)
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Render Models InterfaceErrorCode %i"), (int32)HmdErr);
}
if (!VRSystem || !VRRenderModels)
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Couldn't Get Interfaces!!"));
bSucceeded = false;
return nullptr;
}
TArray<int32> TrackedIDs;
USteamVRFunctionLibrary::GetValidTrackedDeviceIds(DeviceType.GetValue(), TrackedIDs);
if (TrackedIDs.Num() == 0)
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Couldn't Get Tracked Devices!!"));
bSucceeded = false;
return nullptr;
}
int32 DeviceID = TrackedIDs[0];
vr::TrackedPropertyError pError = vr::TrackedPropertyError::TrackedProp_Success;
char RenderModelName[vr::k_unMaxPropertyStringSize];
uint32_t buffersize = 255;
uint32_t ret = VRSystem->GetStringTrackedDeviceProperty(DeviceID, vr::ETrackedDeviceProperty::Prop_RenderModelName_String, RenderModelName, buffersize, &pError);
if (pError != vr::TrackedPropertyError::TrackedProp_Success)
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Couldn't Get Render Model Name String!!"));
bSucceeded = false;
return nullptr;
}
//uint32_t numComponents = VRRenderModels->GetComponentCount("vr_controller_vive_1_5");
//UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("NumComponents: %i"), (int32)numComponents);
// if numComponents > 0 load each, otherwise load the main one only
vr::RenderModel_t *RenderModel;
if (!VRRenderModels->LoadRenderModel(RenderModelName, &RenderModel))
{
UE_LOG(VRExpansionFunctionLibraryLog, Warning, TEXT("Couldn't Load Model!!"));
bSucceeded = false;
return nullptr;
}
if (ProceduralMeshComponentsToFill.Num() > 0)
{
TArray<FVector> vertices;
TArray<int32> triangles;
TArray<FVector> normals;
TArray<FVector2D> UV0;
TArray<FColor> vertexColors;
TArray<FProcMeshTangent> tangents;
vr::HmdVector3_t vPosition;
vr::HmdVector3_t vNormal;
for (uint32_t i = 0; i < RenderModel->unVertexCount; ++i)
{
vPosition = RenderModel->rVertexData[i].vPosition;
vertices.Add(FVector(vPosition.v[2], vPosition.v[1], vPosition.v[0]));
vNormal = RenderModel->rVertexData[i].vNormal;
normals.Add(FVector(vNormal.v[2], vNormal.v[1], vNormal.v[0]));
UV0.Add(FVector2D(RenderModel->rVertexData[i].rfTextureCoord[0], RenderModel->rVertexData[i].rfTextureCoord[1]));
}
for (uint32_t i = 0; i < RenderModel->unTriangleCount * 3; i += 3)
{
triangles.Add(RenderModel->rIndexData[i]);
triangles.Add(RenderModel->rIndexData[i + 1]);
triangles.Add(RenderModel->rIndexData[i + 2]);
}
float scale = UHeadMountedDisplayFunctionLibrary::GetWorldToMetersScale(WorldContextObject);
for (int i = 0; i < ProceduralMeshComponentsToFill.Num(); ++i)
{
ProceduralMeshComponentsToFill[i]->ClearAllMeshSections();
ProceduralMeshComponentsToFill[i]->CreateMeshSection(1, vertices, triangles, normals, UV0, vertexColors, tangents, bCreateCollision);
ProceduralMeshComponentsToFill[i]->SetMeshSectionVisible(1, true);
ProceduralMeshComponentsToFill[i]->SetWorldScale3D(FVector(scale, scale, scale));
}
}
vr::TextureID_t texID = RenderModel->diffuseTextureId;
vr::RenderModel_TextureMap_t * texture;
UTexture2D* OutTexture = nullptr;
if (VRRenderModels->LoadTexture(texID, &texture))
{
uint32 Width = texture->unWidth;
uint32 Height = texture->unHeight;
OutTexture = UTexture2D::CreateTransient(Width, Height, PF_R8G8B8A8);
uint8* MipData = (uint8*)OutTexture->PlatformData->Mips[0].BulkData.Lock(LOCK_READ_WRITE);
FMemory::Memcpy(MipData, (void*)texture->rubTextureMapData, Height * Width * 4);
OutTexture->PlatformData->Mips[0].BulkData.Unlock();
//Setting some Parameters for the Texture and finally returning it
OutTexture->PlatformData->NumSlices = 1;
OutTexture->NeverStream = true;
OutTexture->UpdateResource();
/*if (bReturnRawTexture)
{
OutRawTexture.AddUninitialized(Height * Width * 4);
FMemory::Memcpy(OutRawTexture.GetData(), (void*)texture->rubTextureMapData, Height * Width * 4);
}*/
bSucceeded = true;
VRRenderModels->FreeTexture(texture);
}
else
{
bSucceeded = false;
}
VRRenderModels->FreeRenderModel(RenderModel);
return OutTexture;
#endif
}
<|start_filename|>Plugins/VRExpansionPlugin/Source/VRExpansionPlugin/Public/ParentRelativeAttachmentComponent.h<|end_filename|>
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "Engine.h"
#include "Components/ShapeComponent.h"
#include "ParentRelativeAttachmentComponent.generated.h"
UCLASS(Blueprintable, meta = (BlueprintSpawnableComponent), ClassGroup = VRExpansionLibrary)
class VREXPANSIONPLUGIN_API UParentRelativeAttachmentComponent : public USceneComponent
{
GENERATED_UCLASS_BODY()
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "VRExpansionLibrary")
bool bLockPitch;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "VRExpansionLibrary")
bool bLockYaw;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "VRExpansionLibrary")
bool bLockRoll;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "VRExpansionLibrary", meta = (ClampMin = "0", UIMin = "0"))
float PitchTolerance;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "VRExpansionLibrary", meta = (ClampMin = "0", UIMin = "0"))
float YawTolerance;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "VRExpansionLibrary", meta = (ClampMin = "0", UIMin = "0"))
float RollTolerance;
void TickComponent(float DeltaTime, enum ELevelTick TickType, FActorComponentTickFunction *ThisTickFunction) override;
};
<|start_filename|>Plugins/VRExpansionPlugin/Source/VRExpansionPlugin/Private/ParentRelativeAttachmentComponent.cpp<|end_filename|>
// Copyright 1998-2016 Epic Games, Inc. All Rights Reserved.
#include "VRExpansionPluginPrivatePCH.h"
#include "Runtime/Engine/Private/EnginePrivate.h"
#include "ParentRelativeAttachmentComponent.h"
UParentRelativeAttachmentComponent::UParentRelativeAttachmentComponent(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer)
{
PrimaryComponentTick.bCanEverTick = true;
PrimaryComponentTick.bStartWithTickEnabled = true;
//PrimaryComponentTick.TickGroup = TG_PrePhysics;
this->RelativeScale3D = FVector(1.0f, 1.0f, 1.0f);
this->RelativeLocation = FVector(0, 0, 0);
bLockPitch = true;
bLockYaw = false;
bLockRoll = true;
PitchTolerance = 0.0f;
YawTolerance = 0.0f;
RollTolerance = 0.0f;
}
void UParentRelativeAttachmentComponent::TickComponent(float DeltaTime, enum ELevelTick TickType, FActorComponentTickFunction *ThisTickFunction)
{
if (this->GetAttachParent())
{
FRotator InverseRot = GetAttachParent()->GetComponentRotation();
FRotator CurRot = this->GetComponentRotation();
float newYaw = CurRot.Yaw;
float newRoll = CurRot.Roll;
float newPitch = CurRot.Pitch;
if (bLockYaw)
newYaw = 0;
else if (!bLockYaw && (FPlatformMath::Abs(InverseRot.Yaw - CurRot.Yaw)) > YawTolerance)
newYaw = InverseRot.Yaw;
else
newYaw = CurRot.Yaw;
if (bLockPitch)
newPitch = 0;
else if (!bLockPitch && (FPlatformMath::Abs(InverseRot.Pitch - CurRot.Pitch)) > PitchTolerance)
newPitch = InverseRot.Pitch;
if (bLockRoll)
newRoll = 0;
else if (!bLockRoll && (FPlatformMath::Abs(InverseRot.Roll - CurRot.Roll)) > RollTolerance)
newRoll = InverseRot.Roll;
SetWorldRotation(FRotator(newPitch, newYaw, newRoll), false);
}
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
}
<|start_filename|>Plugins/VRExpansionPlugin/Source/VRExpansionPlugin/Public/VRCharacter.h<|end_filename|>
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "VRCharacter.generated.h"
UCLASS()
class VREXPANSIONPLUGIN_API AVRCharacter : public ACharacter
{
GENERATED_BODY()
public:
AVRCharacter(const FObjectInitializer& ObjectInitializer = FObjectInitializer::Get());
// Overriding teleport so that it auto calls my controllers re-positioning
virtual bool TeleportTo(const FVector& DestLocation, const FRotator& DestRotation, bool bIsATest = false, bool bNoCheck = false) override;
UPROPERTY(Category = VRCharacter, VisibleAnywhere, Transient, BlueprintReadOnly, meta = (AllowPrivateAccess = "true"))
UVRRootComponent * VRRootReference;
UPROPERTY(Category = VRCharacter, VisibleAnywhere, BlueprintReadOnly, meta = (AllowPrivateAccess = "true"))
UReplicatedVRCameraComponent * VRReplicatedCamera;
UPROPERTY(Category = VRCharacter, VisibleAnywhere, BlueprintReadOnly, meta = (AllowPrivateAccess = "true"))
UParentRelativeAttachmentComponent * ParentRelativeAttachment;
UPROPERTY(Category = VRCharacter, VisibleAnywhere, BlueprintReadOnly, meta = (AllowPrivateAccess = "true"))
UGripMotionControllerComponent * LeftMotionController;
UPROPERTY(Category = VRCharacter, VisibleAnywhere, BlueprintReadOnly, meta = (AllowPrivateAccess = "true"))
UGripMotionControllerComponent * RightMotionController;
};
<|start_filename|>Plugins/VRExpansionPlugin/Source/VRExpansionPlugin/Public/VRCharacterMovementComponent.h<|end_filename|>
// Copyright 1998-2016 Epic Games, Inc. All Rights Reserved.
#pragma once
#include "AI/Navigation/NavigationAvoidanceTypes.h"
#include "AI/RVOAvoidanceInterface.h"
#include "Animation/AnimationAsset.h"
#include "Engine/EngineBaseTypes.h"
#include "Engine/EngineTypes.h"
#include "GameFramework/PawnMovementComponent.h"
#include "Interfaces/NetworkPredictionInterface.h"
#include "WorldCollision.h"
#include "VRCharacterMovementComponent.generated.h"
class FDebugDisplayInfo;
class ACharacter;
class UVRCharacterMovementComponent;
/** Shared pointer for easy memory management of FSavedMove_Character, for accumulating and replaying network moves. */
//typedef TSharedPtr<class FSavedMove_Character> FSavedMovePtr;
//=============================================================================
/**
* VRCharacterMovementComponent handles movement logic for the associated Character owner.
* It supports various movement modes including: walking, falling, swimming, flying, custom.
*
* Movement is affected primarily by current Velocity and Acceleration. Acceleration is updated each frame
* based on the input vector accumulated thus far (see UPawnMovementComponent::GetPendingInputVector()).
*
* Networking is fully implemented, with server-client correction and prediction included.
*
* @see ACharacter, UPawnMovementComponent
* @see https://docs.unrealengine.com/latest/INT/Gameplay/Framework/Pawn/Character/
*/
UCLASS()
class VREXPANSIONPLUGIN_API UVRCharacterMovementComponent : public UCharacterMovementComponent
{
GENERATED_BODY()
public:
UPROPERTY(BlueprintReadOnly, Category = VRMovement)
UVRRootComponent * VRRootCapsule;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "VRCharacterMovementComponent")
bool bAllowWalkingThroughWalls;
/**
* Default UObject constructor.
*/
UVRCharacterMovementComponent(const FObjectInitializer& ObjectInitializer = FObjectInitializer::Get());
FVector GetImpartedMovementBaseVelocity() const override;
float ImmersionDepth() const override;
void VisualizeMovement() const override;
bool CanCrouch();
/*void UVRCharacterMovementComponent::PhysWalking(float deltaTime, int32 Iterations) override;
bool HasRootMotion() const
{
return RootMotionParams.bHasRootMotion;
}*/
// Cheating at the relative collision detection
void TickComponent(float DeltaTime, enum ELevelTick TickType, FActorComponentTickFunction *ThisTickFunction);
// Need to fill our capsule component variable here and override the default tick ordering
void SetUpdatedComponent(USceneComponent* NewUpdatedComponent) override;
// Always called with the capsulecomponent location, no idea why it doesn't just get it inside it already
void FindFloor(const FVector& CapsuleLocation, FFindFloorResult& OutFloorResult, bool bZeroDelta, const FHitResult* DownwardSweepResult) const override;
// Need to use actual capsule location for step up
bool StepUp(const FVector& GravDir, const FVector& Delta, const FHitResult &InHit, FStepDownResult* OutStepDownResult);
// Skip physics channels when looking for floor
bool FloorSweepTest(
FHitResult& OutHit,
const FVector& Start,
const FVector& End,
ECollisionChannel TraceChannel,
const struct FCollisionShape& CollisionShape,
const struct FCollisionQueryParams& Params,
const struct FCollisionResponseParams& ResponseParam
) const override;
// Don't step up on physics actors
virtual bool CanStepUp(const FHitResult& Hit) const override;
};
<|start_filename|>Plugins/VRExpansionPlugin/Source/VRExpansionPlugin/Private/VRCharacterMovementComponent.cpp<|end_filename|>
// Copyright 1998-2016 Epic Games, Inc. All Rights Reserved.
/*=============================================================================
Movement.cpp: Character movement implementation
=============================================================================*/
#include "VRExpansionPluginPrivatePCH.h"
#include "GameFramework/PhysicsVolume.h"
#include "GameFramework/GameNetworkManager.h"
#include "GameFramework/Character.h"
#include "VRCharacterMovementComponent.h"
#include "GameFramework/GameState.h"
#include "Components/PrimitiveComponent.h"
#include "Animation/AnimMontage.h"
#include "PhysicsEngine/DestructibleActor.h"
// @todo this is here only due to circular dependency to AIModule. To be removed
#include "Navigation/PathFollowingComponent.h"
#include "AI/Navigation/AvoidanceManager.h"
#include "Components/CapsuleComponent.h"
#include "Components/BrushComponent.h"
#include "Components/DestructibleComponent.h"
#include "Engine/DemoNetDriver.h"
#include "Engine/NetworkObjectList.h"
#include "PerfCountersHelpers.h"
/**
* Character stats
*/
DECLARE_CYCLE_STAT(TEXT("Char StepUp"), STAT_CharStepUp, STATGROUP_Character);
DECLARE_CYCLE_STAT(TEXT("Char FindFloor"), STAT_CharFindFloor, STATGROUP_Character);
// MAGIC NUMBERS
const float MAX_STEP_SIDE_Z = 0.08f; // maximum z value for the normal on the vertical side of steps
// Statics
namespace CharacterMovementComponentStatics
{
static const FName ImmersionDepthName = FName(TEXT("MovementComp_Character_ImmersionDepth"));
}
UVRCharacterMovementComponent::UVRCharacterMovementComponent(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer)
{
PostPhysicsTickFunction.bCanEverTick = true;
PostPhysicsTickFunction.bStartWithTickEnabled = false;
//PostPhysicsTickFunction.TickGroup = TG_PostPhysics;
PrimaryComponentTick.TickGroup = TG_PrePhysics;
VRRootCapsule = NULL;
// Keep this false
this->bTickBeforeOwner = false;
bAllowWalkingThroughWalls = false;
}
void UVRCharacterMovementComponent::TickComponent(float DeltaTime, enum ELevelTick TickType, FActorComponentTickFunction *ThisTickFunction)
{
// There are many better ways of handling this, I am just playing around for now
if (!bAllowWalkingThroughWalls && VRRootCapsule)
{
if (VRRootCapsule->bHadRelativeMovement)
{
// For now am faking a non move by adding an input vector of a super small amount in the direction of the relative movement
// This will cause the movement component to check for intersections even if no real movement was performed this frame
// Need a more nuanced solution eventually
AddInputVector(VRRootCapsule->DifferenceFromLastFrame * 0.01f);
}
}
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
}
// No support for crouching code yet
bool UVRCharacterMovementComponent::CanCrouch()
{
return false;
}
bool UVRCharacterMovementComponent::CanStepUp(const FHitResult& Hit) const
{
if (!Hit.IsValidBlockingHit() || !HasValidData() || MovementMode == MOVE_Falling )
{
return false;
}
// No component for "fake" hits when we are on a known good base.
const UPrimitiveComponent* HitComponent = Hit.Component.Get();
if (!HitComponent)
{
return true;
}
if (HitComponent->IsSimulatingPhysics())
return false;
if (!HitComponent->CanCharacterStepUp(CharacterOwner))
{
return false;
}
// No actor for "fake" hits when we are on a known good base.
const AActor* HitActor = Hit.GetActor();
if (!HitActor)
{
return true;
}
if (!HitActor->CanBeBaseForCharacter(CharacterOwner))
{
return false;
}
return true;
}
void UVRCharacterMovementComponent::SetUpdatedComponent(USceneComponent* NewUpdatedComponent)
{
Super::SetUpdatedComponent(NewUpdatedComponent);
if (UpdatedComponent)
{
VRRootCapsule = Cast<UVRRootComponent>(UpdatedComponent);
// Stop the tick forcing
UpdatedComponent->PrimaryComponentTick.RemovePrerequisite(this, PrimaryComponentTick);
// Start forcing the root to tick before this, the actor tick will still tick after the movement component
// We want the root component to tick first because it is setting its offset location based off of tick
this->PrimaryComponentTick.AddPrerequisite(UpdatedComponent, UpdatedComponent->PrimaryComponentTick);
}
}
bool UVRCharacterMovementComponent::StepUp(const FVector& GravDir, const FVector& Delta, const FHitResult &InHit, FStepDownResult* OutStepDownResult)
{
SCOPE_CYCLE_COUNTER(STAT_CharStepUp);
if (!CanStepUp(InHit) || MaxStepHeight <= 0.f)
{
return false;
}
FVector OldLocation;
if (VRRootCapsule)
OldLocation = VRRootCapsule->OffsetComponentToWorld.GetLocation();
else
OldLocation = UpdatedComponent->GetComponentLocation();
float PawnRadius, PawnHalfHeight;
CharacterOwner->GetCapsuleComponent()->GetScaledCapsuleSize(PawnRadius, PawnHalfHeight);
// Don't bother stepping up if top of capsule is hitting something.
const float InitialImpactZ = InHit.ImpactPoint.Z;
if (InitialImpactZ > OldLocation.Z + (PawnHalfHeight*2 - PawnRadius))
{
return false;
}
// Don't step up if the impact is below us
if (InitialImpactZ <= OldLocation.Z - PawnHalfHeight)
{
return false;
}
if (GravDir.IsZero())
{
return false;
}
// Gravity should be a normalized direction
ensure(GravDir.IsNormalized());
float StepTravelUpHeight = MaxStepHeight;
float StepTravelDownHeight = StepTravelUpHeight;
const float StepSideZ = -1.f * (InHit.ImpactNormal | GravDir);
float PawnInitialFloorBaseZ = OldLocation.Z -PawnHalfHeight;
float PawnFloorPointZ = PawnInitialFloorBaseZ;
if (IsMovingOnGround() && CurrentFloor.IsWalkableFloor())
{
// Since we float a variable amount off the floor, we need to enforce max step height off the actual point of impact with the floor.
const float FloorDist = FMath::Max(0.f, CurrentFloor.FloorDist);
PawnInitialFloorBaseZ -= FloorDist;
StepTravelUpHeight = FMath::Max(StepTravelUpHeight - FloorDist, 0.f);
StepTravelDownHeight = (MaxStepHeight + MAX_FLOOR_DIST*2.f);
const bool bHitVerticalFace = !IsWithinEdgeTolerance(InHit.Location, InHit.ImpactPoint, PawnRadius);
if (!CurrentFloor.bLineTrace && !bHitVerticalFace)
{
PawnFloorPointZ = CurrentFloor.HitResult.ImpactPoint.Z;
}
else
{
// Base floor point is the base of the capsule moved down by how far we are hovering over the surface we are hitting.
PawnFloorPointZ -= CurrentFloor.FloorDist;
}
}
// Scope our movement updates, and do not apply them until all intermediate moves are completed.
FScopedMovementUpdate ScopedStepUpMovement(UpdatedComponent, EScopedUpdate::DeferredUpdates);
// step up - treat as vertical wall
FHitResult SweepUpHit(1.f);
const FQuat PawnRotation = UpdatedComponent->GetComponentQuat();
MoveUpdatedComponent(-GravDir * StepTravelUpHeight, PawnRotation, true, &SweepUpHit);
if (SweepUpHit.bStartPenetrating)
{
// Undo movement
ScopedStepUpMovement.RevertMove();
return false;
}
// step fwd
FHitResult Hit(1.f);
MoveUpdatedComponent(Delta, PawnRotation, true, &Hit);
// Check result of forward movement
if (Hit.bBlockingHit)
{
if (Hit.bStartPenetrating)
{
// Undo movement
ScopedStepUpMovement.RevertMove();
return false;
}
// If we hit something above us and also something ahead of us, we should notify about the upward hit as well.
// The forward hit will be handled later (in the bSteppedOver case below).
// In the case of hitting something above but not forward, we are not blocked from moving so we don't need the notification.
if (SweepUpHit.bBlockingHit && Hit.bBlockingHit)
{
HandleImpact(SweepUpHit);
}
// pawn ran into a wall
HandleImpact(Hit);
if (IsFalling())
{
return true;
}
// adjust and try again
const float ForwardHitTime = Hit.Time;
const float ForwardSlideAmount = SlideAlongSurface(Delta, 1.f - Hit.Time, Hit.Normal, Hit, true);
if (IsFalling())
{
ScopedStepUpMovement.RevertMove();
return false;
}
// If both the forward hit and the deflection got us nowhere, there is no point in this step up.
if (ForwardHitTime == 0.f && ForwardSlideAmount == 0.f)
{
ScopedStepUpMovement.RevertMove();
return false;
}
}
// Step down
MoveUpdatedComponent(GravDir * StepTravelDownHeight, UpdatedComponent->GetComponentQuat(), true, &Hit);
// If step down was initially penetrating abort the step up
if (Hit.bStartPenetrating)
{
ScopedStepUpMovement.RevertMove();
return false;
}
FStepDownResult StepDownResult;
if (Hit.IsValidBlockingHit())
{
// See if this step sequence would have allowed us to travel higher than our max step height allows.
const float DeltaZ = Hit.ImpactPoint.Z - PawnFloorPointZ;
if (DeltaZ > MaxStepHeight)
{
//UE_LOG(LogCharacterMovement, VeryVerbose, TEXT("- Reject StepUp (too high Height %.3f) up from floor base %f to %f"), DeltaZ, PawnInitialFloorBaseZ, NewLocation.Z);
ScopedStepUpMovement.RevertMove();
return false;
}
// Reject unwalkable surface normals here.
if (!IsWalkable(Hit))
{
// Reject if normal opposes movement direction
const bool bNormalTowardsMe = (Delta | Hit.ImpactNormal) < 0.f;
if (bNormalTowardsMe)
{
//UE_LOG(LogCharacterMovement, VeryVerbose, TEXT("- Reject StepUp (unwalkable normal %s opposed to movement)"), *Hit.ImpactNormal.ToString());
ScopedStepUpMovement.RevertMove();
return false;
}
// Also reject if we would end up being higher than our starting location by stepping down.
// It's fine to step down onto an unwalkable normal below us, we will just slide off. Rejecting those moves would prevent us from being able to walk off the edge.
if (Hit.Location.Z > OldLocation.Z)
{
//UE_LOG(LogCharacterMovement, VeryVerbose, TEXT("- Reject StepUp (unwalkable normal %s above old position)"), *Hit.ImpactNormal.ToString());
ScopedStepUpMovement.RevertMove();
return false;
}
}
// Reject moves where the downward sweep hit something very close to the edge of the capsule. This maintains consistency with FindFloor as well.
if (!IsWithinEdgeTolerance(Hit.Location, Hit.ImpactPoint, PawnRadius))
{
//UE_LOG(LogCharacterMovement, VeryVerbose, TEXT("- Reject StepUp (outside edge tolerance)"));
ScopedStepUpMovement.RevertMove();
return false;
}
// Don't step up onto invalid surfaces if traveling higher.
if (DeltaZ > 0.f && !CanStepUp(Hit))
{
//UE_LOG(LogCharacterMovement, VeryVerbose, TEXT("- Reject StepUp (up onto surface with !CanStepUp())"));
ScopedStepUpMovement.RevertMove();
return false;
}
// See if we can validate the floor as a result of this step down. In almost all cases this should succeed, and we can avoid computing the floor outside this method.
if (OutStepDownResult != NULL)
{
FindFloor(UpdatedComponent->GetComponentLocation(), StepDownResult.FloorResult, false, &Hit);
// Reject unwalkable normals if we end up higher than our initial height.
// It's fine to walk down onto an unwalkable surface, don't reject those moves.
if (Hit.Location.Z > OldLocation.Z)
{
// We should reject the floor result if we are trying to step up an actual step where we are not able to perch (this is rare).
// In those cases we should instead abort the step up and try to slide along the stair.
if (!StepDownResult.FloorResult.bBlockingHit && StepSideZ < MAX_STEP_SIDE_Z)
{
ScopedStepUpMovement.RevertMove();
return false;
}
}
StepDownResult.bComputedFloor = true;
}
}
// Copy step down result.
if (OutStepDownResult != NULL)
{
*OutStepDownResult = StepDownResult;
}
// Don't recalculate velocity based on this height adjustment, if considering vertical adjustments.
bJustTeleported |= !bMaintainHorizontalGroundVelocity;
return true;
}
void UVRCharacterMovementComponent::FindFloor(const FVector& CapsuleLocation, FFindFloorResult& OutFloorResult, bool bZeroDelta, const FHitResult* DownwardSweepResult) const
{
SCOPE_CYCLE_COUNTER(STAT_CharFindFloor);
// No collision, no floor...
if (!HasValidData() || !UpdatedComponent->IsQueryCollisionEnabled())
{
OutFloorResult.Clear();
return;
}
FVector UseCapsuleLocation = CapsuleLocation;
if (VRRootCapsule)
UseCapsuleLocation = VRRootCapsule->OffsetComponentToWorld.GetLocation();
check(CharacterOwner->GetCapsuleComponent());
// Increase height check slightly if walking, to prevent floor height adjustment from later invalidating the floor result.
const float HeightCheckAdjust = (IsMovingOnGround() ? MAX_FLOOR_DIST + KINDA_SMALL_NUMBER : -MAX_FLOOR_DIST);
float FloorSweepTraceDist = FMath::Max(MAX_FLOOR_DIST, MaxStepHeight + HeightCheckAdjust);
float FloorLineTraceDist = FloorSweepTraceDist;
bool bNeedToValidateFloor = true;
// Sweep floor
if (FloorLineTraceDist > 0.f || FloorSweepTraceDist > 0.f)
{
UCharacterMovementComponent* MutableThis = const_cast<UCharacterMovementComponent*>((UCharacterMovementComponent*)this);
if (bAlwaysCheckFloor || !bZeroDelta || bForceNextFloorCheck || bJustTeleported)
{
MutableThis->bForceNextFloorCheck = false;
ComputeFloorDist(UseCapsuleLocation, FloorLineTraceDist, FloorSweepTraceDist, OutFloorResult, CharacterOwner->GetCapsuleComponent()->GetScaledCapsuleRadius(), DownwardSweepResult);
}
else
{
// Force floor check if base has collision disabled or if it does not block us.
UPrimitiveComponent* MovementBase = CharacterOwner->GetMovementBase();
const AActor* BaseActor = MovementBase ? MovementBase->GetOwner() : NULL;
const ECollisionChannel CollisionChannel = UpdatedComponent->GetCollisionObjectType();
if (MovementBase != NULL)
{
MutableThis->bForceNextFloorCheck = !MovementBase->IsQueryCollisionEnabled()
|| MovementBase->GetCollisionResponseToChannel(CollisionChannel) != ECR_Block
|| MovementBaseUtility::IsDynamicBase(MovementBase);
}
const bool IsActorBasePendingKill = BaseActor && BaseActor->IsPendingKill();
if (!bForceNextFloorCheck && !IsActorBasePendingKill && MovementBase)
{
//UE_LOG(LogCharacterMovement, Log, TEXT("%s SKIP check for floor"), *CharacterOwner->GetName());
OutFloorResult = CurrentFloor;
bNeedToValidateFloor = false;
}
else
{
MutableThis->bForceNextFloorCheck = false;
ComputeFloorDist(UseCapsuleLocation, FloorLineTraceDist, FloorSweepTraceDist, OutFloorResult, CharacterOwner->GetCapsuleComponent()->GetScaledCapsuleRadius(), DownwardSweepResult);
}
}
}
// OutFloorResult.HitResult is now the result of the vertical floor check.
// See if we should try to "perch" at this location.
if (bNeedToValidateFloor && OutFloorResult.bBlockingHit && !OutFloorResult.bLineTrace)
{
const bool bCheckRadius = true;
if (ShouldComputePerchResult(OutFloorResult.HitResult, bCheckRadius))
{
float MaxPerchFloorDist = FMath::Max(MAX_FLOOR_DIST, MaxStepHeight + HeightCheckAdjust);
if (IsMovingOnGround())
{
MaxPerchFloorDist += FMath::Max(0.f, PerchAdditionalHeight);
}
FFindFloorResult PerchFloorResult;
if (ComputePerchResult(GetValidPerchRadius(), OutFloorResult.HitResult, MaxPerchFloorDist, PerchFloorResult))
{
// Don't allow the floor distance adjustment to push us up too high, or we will move beyond the perch distance and fall next time.
const float AvgFloorDist = (MIN_FLOOR_DIST + MAX_FLOOR_DIST) * 0.5f;
const float MoveUpDist = (AvgFloorDist - OutFloorResult.FloorDist);
if (MoveUpDist + PerchFloorResult.FloorDist >= MaxPerchFloorDist)
{
OutFloorResult.FloorDist = AvgFloorDist;
}
// If the regular capsule is on an unwalkable surface but the perched one would allow us to stand, override the normal to be one that is walkable.
if (!OutFloorResult.bWalkableFloor)
{
OutFloorResult.SetFromLineTrace(PerchFloorResult.HitResult, OutFloorResult.FloorDist, FMath::Min(PerchFloorResult.FloorDist, PerchFloorResult.LineDist), true);
}
}
else
{
// We had no floor (or an invalid one because it was unwalkable), and couldn't perch here, so invalidate floor (which will cause us to start falling).
OutFloorResult.bWalkableFloor = false;
}
}
}
}
bool UVRCharacterMovementComponent::FloorSweepTest(
FHitResult& OutHit,
const FVector& Start,
const FVector& End,
ECollisionChannel TraceChannel,
const struct FCollisionShape& CollisionShape,
const struct FCollisionQueryParams& Params,
const struct FCollisionResponseParams& ResponseParam
) const
{
bool bBlockingHit = false;
if (!bUseFlatBaseForFloorChecks)
{
TArray<FHitResult> OutHits;
GetWorld()->SweepMultiByChannel(OutHits, Start, End, FQuat::Identity, TraceChannel, CollisionShape, Params, ResponseParam);
for (int i = 0; i < OutHits.Num(); i++)
{
if (OutHits[i].bBlockingHit && (OutHits[i].Component.IsValid() && !OutHits[i].Component->IsSimulatingPhysics()))
{
OutHit = OutHits[i];
bBlockingHit = true;
break;
}
}
//bBlockingHit = GetWorld()->SweepSingleByChannel(OutHit, Start, End, FQuat::Identity, TraceChannel, CollisionShape, Params, ResponseParam);
}
else
{
// Test with a box that is enclosed by the capsule.
const float CapsuleRadius = CollisionShape.GetCapsuleRadius();
const float CapsuleHeight = CollisionShape.GetCapsuleHalfHeight();
const FCollisionShape BoxShape = FCollisionShape::MakeBox(FVector(CapsuleRadius * 0.707f, CapsuleRadius * 0.707f, CapsuleHeight));
// First test with the box rotated so the corners are along the major axes (ie rotated 45 degrees).
TArray<FHitResult> OutHits;
GetWorld()->SweepMultiByChannel(OutHits, Start, End, FQuat(FVector(0.f, 0.f, -1.f), PI * 0.25f), TraceChannel, BoxShape, Params, ResponseParam);
for (int i = 0; i < OutHits.Num(); i++)
{
if (OutHits[i].bBlockingHit && (OutHits[i].Component.IsValid() && !OutHits[i].Component->IsSimulatingPhysics()))
{
OutHit = OutHits[i];
bBlockingHit = true;
break;
}
}
//bBlockingHit = GetWorld()->SweepSingleByChannel(OutHit, Start, End, FQuat(FVector(0.f, 0.f, -1.f), PI * 0.25f), TraceChannel, BoxShape, Params, ResponseParam);
if (!bBlockingHit)
{
// Test again with the same box, not rotated.
OutHit.Reset(1.f, false);
TArray<FHitResult> OutHits;
GetWorld()->SweepMultiByChannel(OutHits, Start, End, FQuat::Identity, TraceChannel, BoxShape, Params, ResponseParam);
for (int i = 0; i < OutHits.Num(); i++)
{
if (OutHits[i].bBlockingHit && (OutHits[i].Component.IsValid() && !OutHits[i].Component->IsSimulatingPhysics()))
{
OutHit = OutHits[i];
bBlockingHit = true;
break;
}
}
//bBlockingHit = GetWorld()->SweepSingleByChannel(OutHit, Start, End, FQuat::Identity, TraceChannel, BoxShape, Params, ResponseParam);
}
}
return bBlockingHit;
}
FVector UVRCharacterMovementComponent::GetImpartedMovementBaseVelocity() const
{
FVector Result = FVector::ZeroVector;
if (CharacterOwner)
{
UPrimitiveComponent* MovementBase = CharacterOwner->GetMovementBase();
if (MovementBaseUtility::IsDynamicBase(MovementBase))
{
FVector BaseVelocity = MovementBaseUtility::GetMovementBaseVelocity(MovementBase, CharacterOwner->GetBasedMovement().BoneName);
if (bImpartBaseAngularVelocity)
{
const FVector CharacterBasePosition = (UpdatedComponent->GetComponentLocation()/* - FVector(0.f, 0.f, CharacterOwner->GetCapsuleComponent()->GetScaledCapsuleHalfHeight())*/);
const FVector BaseTangentialVel = MovementBaseUtility::GetMovementBaseTangentialVelocity(MovementBase, CharacterOwner->GetBasedMovement().BoneName, CharacterBasePosition);
BaseVelocity += BaseTangentialVel;
}
if (bImpartBaseVelocityX)
{
Result.X = BaseVelocity.X;
}
if (bImpartBaseVelocityY)
{
Result.Y = BaseVelocity.Y;
}
if (bImpartBaseVelocityZ)
{
Result.Z = BaseVelocity.Z;
}
}
}
return Result;
}
float UVRCharacterMovementComponent::ImmersionDepth() const
{
float depth = 0.f;
if (CharacterOwner && GetPhysicsVolume()->bWaterVolume)
{
const float CollisionHalfHeight = CharacterOwner->GetSimpleCollisionHalfHeight();
if ((CollisionHalfHeight == 0.f) || (Buoyancy == 0.f))
{
depth = 1.f;
}
else
{
UBrushComponent* VolumeBrushComp = GetPhysicsVolume()->GetBrushComponent();
FHitResult Hit(1.f);
if (VolumeBrushComp)
{
const FVector TraceStart = UpdatedComponent->GetComponentLocation() + FVector(0.f, 0.f, CollisionHalfHeight*2);
const FVector TraceEnd = UpdatedComponent->GetComponentLocation();// -FVector(0.f, 0.f, CollisionHalfHeight);
FCollisionQueryParams NewTraceParams(CharacterMovementComponentStatics::ImmersionDepthName, true);
VolumeBrushComp->LineTraceComponent(Hit, TraceStart, TraceEnd, NewTraceParams);
}
depth = (Hit.Time == 1.f) ? 1.f : (1.f - Hit.Time);
}
}
return depth;
}
void UVRCharacterMovementComponent::VisualizeMovement() const
{
if (CharacterOwner == nullptr)
{
return;
}
#if !(UE_BUILD_SHIPPING || UE_BUILD_TEST)
const FVector TopOfCapsule = GetActorLocation() + FVector(0.f, 0.f, CharacterOwner->GetSimpleCollisionHalfHeight()*2);
float HeightOffset = 0.f;
// Position
{
const FColor DebugColor = FColor::White;
const FVector DebugLocation = TopOfCapsule + FVector(0.f, 0.f, HeightOffset);
FString DebugText = FString::Printf(TEXT("Position: %s"), *GetActorLocation().ToCompactString());
DrawDebugString(GetWorld(), DebugLocation, DebugText, nullptr, DebugColor, 0.f, true);
}
// Velocity
{
const FColor DebugColor = FColor::Green;
HeightOffset += 15.f;
const FVector DebugLocation = TopOfCapsule + FVector(0.f, 0.f, HeightOffset);
DrawDebugDirectionalArrow(GetWorld(), DebugLocation, DebugLocation + Velocity,
100.f, DebugColor, false, -1.f, (uint8)'\000', 10.f);
FString DebugText = FString::Printf(TEXT("Velocity: %s (Speed: %.2f)"), *Velocity.ToCompactString(), Velocity.Size());
DrawDebugString(GetWorld(), DebugLocation + FVector(0.f, 0.f, 5.f), DebugText, nullptr, DebugColor, 0.f, true);
}
// Acceleration
{
const FColor DebugColor = FColor::Yellow;
HeightOffset += 15.f;
const float MaxAccelerationLineLength = 200.f;
const float CurrentMaxAccel = GetMaxAcceleration();
const float CurrentAccelAsPercentOfMaxAccel = CurrentMaxAccel > 0.f ? Acceleration.Size() / CurrentMaxAccel : 1.f;
const FVector DebugLocation = TopOfCapsule + FVector(0.f, 0.f, HeightOffset);
DrawDebugDirectionalArrow(GetWorld(), DebugLocation,
DebugLocation + Acceleration.GetSafeNormal(SMALL_NUMBER) * CurrentAccelAsPercentOfMaxAccel * MaxAccelerationLineLength,
25.f, DebugColor, false, -1.f, (uint8)'\000', 8.f);
FString DebugText = FString::Printf(TEXT("Acceleration: %s"), *Acceleration.ToCompactString());
DrawDebugString(GetWorld(), DebugLocation + FVector(0.f, 0.f, 5.f), DebugText, nullptr, DebugColor, 0.f, true);
}
// Movement Mode
{
const FColor DebugColor = FColor::Blue;
HeightOffset += 20.f;
const FVector DebugLocation = TopOfCapsule + FVector(0.f, 0.f, HeightOffset);
FString DebugText = FString::Printf(TEXT("MovementMode: %s"), *GetMovementName());
DrawDebugString(GetWorld(), DebugLocation, DebugText, nullptr, DebugColor, 0.f, true);
}
// Root motion (additive)
if (CurrentRootMotion.HasAdditiveVelocity())
{
const FColor DebugColor = FColor::Cyan;
HeightOffset += 15.f;
const FVector DebugLocation = TopOfCapsule + FVector(0.f, 0.f, HeightOffset);
FVector CurrentAdditiveVelocity(FVector::ZeroVector);
CurrentRootMotion.AccumulateAdditiveRootMotionVelocity(0.f, *CharacterOwner, *this, CurrentAdditiveVelocity);
DrawDebugDirectionalArrow(GetWorld(), DebugLocation, DebugLocation + CurrentAdditiveVelocity,
100.f, DebugColor, false, -1.f, (uint8)'\000', 10.f);
FString DebugText = FString::Printf(TEXT("RootMotionAdditiveVelocity: %s (Speed: %.2f)"),
*CurrentAdditiveVelocity.ToCompactString(), CurrentAdditiveVelocity.Size());
DrawDebugString(GetWorld(), DebugLocation + FVector(0.f, 0.f, 5.f), DebugText, nullptr, DebugColor, 0.f, true);
}
// Root motion (override)
if (CurrentRootMotion.HasOverrideVelocity())
{
const FColor DebugColor = FColor::Green;
HeightOffset += 15.f;
const FVector DebugLocation = TopOfCapsule + FVector(0.f, 0.f, HeightOffset);
FString DebugText = FString::Printf(TEXT("Has Override RootMotion"));
DrawDebugString(GetWorld(), DebugLocation, DebugText, nullptr, DebugColor, 0.f, true);
}
#endif // !(UE_BUILD_SHIPPING || UE_BUILD_TEST)
}
| ProteusVR/SteamVR_Template |
<|start_filename|>lib/code_corps/policy/stripe_connect_account.ex<|end_filename|>
defmodule CodeCorps.Policy.StripeConnectAccount do
import CodeCorps.Policy.Helpers, only: [get_organization: 1, owned_by?: 2]
alias CodeCorps.{StripeConnectAccount, User}
def show?(%User{} = user, %StripeConnectAccount{} = stripe_connect_account),
do: stripe_connect_account |> get_organization() |> owned_by?(user)
def create?(%User{} = user, %{} = params),
do: params |> get_organization() |> owned_by?(user)
def update?(%User{} = user, %StripeConnectAccount{} = stripe_connect_account),
do: stripe_connect_account |> get_organization() |> owned_by?(user)
end
<|start_filename|>priv/repo/migrations/20171119004204_create_github_issue_assignees.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.CreateGithubIssueAssignees do
use Ecto.Migration
def change do
create table(:github_issue_assignees) do
add :github_issue_id, references(:github_issues, on_delete: :nothing)
add :github_user_id, references(:github_users, on_delete: :nothing)
timestamps()
end
create index(:github_issue_assignees, [:github_issue_id])
create index(:github_issue_assignees, [:github_user_id])
create unique_index(:github_issue_assignees, [:github_issue_id, :github_user_id])
end
end
<|start_filename|>test/lib/code_corps/github/utils/result_aggregator_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Utils.ResultAggregatorTest do
use CodeCorps.DbAccessCase
alias CodeCorps.{
Comment,
GitHub.Utils.ResultAggregator,
GithubRepo,
Task,
}
alias Ecto.Changeset
describe "aggregate/1" do
test "aggregates Task results correctly" do
record = %Task{}
good = {:ok, record}
changeset = %Changeset{}
bad = {:error, changeset}
assert [] |> ResultAggregator.aggregate == {:ok, []}
assert [good] |> ResultAggregator.aggregate == {:ok, [record]}
assert [good, good] |> ResultAggregator.aggregate == {:ok, [record, record]}
assert [good, bad] |> ResultAggregator.aggregate == {:error, {[record], [changeset]}}
assert [bad] |> ResultAggregator.aggregate == {:error, {[], [changeset]}}
assert [bad, bad] |> ResultAggregator.aggregate == {:error, {[], [changeset, changeset]}}
end
test "aggregates Comment results correctly" do
record = %Comment{}
good = {:ok, record}
changeset = %Changeset{}
bad = {:error, changeset}
assert [] |> ResultAggregator.aggregate == {:ok, []}
assert [good] |> ResultAggregator.aggregate == {:ok, [record]}
assert [good, good] |> ResultAggregator.aggregate == {:ok, [record, record]}
assert [good, bad] |> ResultAggregator.aggregate == {:error, {[record], [changeset]}}
assert [bad] |> ResultAggregator.aggregate == {:error, {[], [changeset]}}
assert [bad, bad] |> ResultAggregator.aggregate == {:error, {[], [changeset, changeset]}}
end
test "aggregates GithubRepo results correctly" do
record = %GithubRepo{}
good = {:ok, record}
changeset = %Changeset{}
bad = {:error, changeset}
assert [] |> ResultAggregator.aggregate == {:ok, []}
assert [good] |> ResultAggregator.aggregate == {:ok, [record]}
assert [good, good] |> ResultAggregator.aggregate == {:ok, [record, record]}
assert [good, bad] |> ResultAggregator.aggregate == {:error, {[record], [changeset]}}
assert [bad] |> ResultAggregator.aggregate == {:error, {[], [changeset]}}
assert [bad, bad] |> ResultAggregator.aggregate == {:error, {[], [changeset, changeset]}}
end
end
end
<|start_filename|>test/lib/code_corps/policy/stripe_connect_account_test.exs<|end_filename|>
defmodule CodeCorps.Policy.StripeConnectAccountTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.StripeConnectAccount,
only: [show?: 2, create?: 2, update?: 2]
import CodeCorps.StripeConnectAccount, only: [create_changeset: 2]
alias CodeCorps.StripeConnectAccount
describe "show?" do
test "returns true when user is owner of organization" do
user = insert(:user)
organization = insert(:organization, owner: user)
stripe_connect_account = insert(:stripe_connect_account, organization: organization)
assert show?(user, stripe_connect_account)
end
test "returns false otherwise" do
user = insert(:user)
organization = insert(:organization)
stripe_connect_account = insert(:stripe_connect_account, organization: organization)
refute show?(user, stripe_connect_account)
end
end
describe "create?" do
test "returns true when user is owner of organization" do
user = insert(:user)
organization = insert(:organization, owner: user)
changeset = create_changeset(%StripeConnectAccount{}, %{organization_id: organization.id})
assert create?(user, changeset)
end
test "returns false otherwise" do
user = insert(:user)
organization = insert(:organization)
changeset = create_changeset(%StripeConnectAccount{}, %{organization_id: organization.id})
refute create?(user, changeset)
end
end
describe "update?" do
test "returns true when user is owner of organization" do
user = insert(:user)
organization = insert(:organization, owner: user)
stripe_connect_account = insert(:stripe_connect_account, organization: organization)
assert show?(user, stripe_connect_account)
end
test "returns false otherwise" do
user = insert(:user)
organization = insert(:organization)
stripe_connect_account = insert(:stripe_connect_account, organization: organization)
refute update?(user, stripe_connect_account)
end
end
end
<|start_filename|>lib/code_corps_web/plugs/analytics_identify.ex<|end_filename|>
defmodule CodeCorpsWeb.Plug.AnalyticsIdentify do
@moduledoc """
Plug used to identify the current user on Segment.com using `CodeCorps.Analytics.Segment`.
"""
def init(opts), do: opts
def call(conn, _opts), do: conn |> identify
defp identify(%{assigns: %{current_user: user}} = conn) do
CodeCorps.Analytics.SegmentTracker.identify(user)
conn
end
defp identify(conn), do: conn
end
<|start_filename|>test/lib/code_corps/analytics/segment_data_extractor_test.exs<|end_filename|>
defmodule CodeCorps.Analytics.SegmentDataExtractorTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.Factories
alias CodeCorps.Analytics.SegmentDataExtractor
describe "get_project_id/1" do
test "should return correct id for project user" do
project_user = build(:project_user)
project_id = "project_#{project_user.project_id}"
assert SegmentDataExtractor.get_project_id(project_user) == project_id
end
test "should return nil for unknown resource" do
assert SegmentDataExtractor.get_project_id(%{}) == nil
end
end
end
<|start_filename|>lib/code_corps/github/event/issue_comment/validator.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.IssueComment.Validator do
@moduledoc ~S"""
In charge of validatng a GitHub IssueComment webhook payload.
https://developer.github.com/v3/activity/events/types/#issuecommentevent
"""
@behaviour CodeCorps.GitHub.Event.Validator
@doc ~S"""
Returns `true` if all keys required to properly handle an Issue webhook are
present in the provided payload.
"""
@impl CodeCorps.GitHub.Event.Validator
@spec valid?(map) :: boolean
def valid?(%{
"action" => _,
"issue" => %{
"id" => _, "title" => _, "body" => _, "state" => _,
"user" => %{"id" => _}
},
"comment" => %{
"id" => _, "body" => _,
"user" => %{"id" => _}
},
"repository" => %{"id" => _}}), do: true
def valid?(_), do: false
end
<|start_filename|>priv/repo/migrations/20171205161052_create_messages.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.CreateMessages do
use Ecto.Migration
def change do
create table(:messages) do
add :body, :text
add :initiated_by, :string
add :subject, :text
add :author_id, references(:users, on_delete: :nothing)
add :project_id, references(:projects, on_delete: :nothing)
timestamps()
end
create index(:messages, [:author_id])
create index(:messages, [:initiated_by])
create index(:messages, [:project_id])
end
end
<|start_filename|>priv/repo/migrations/20161019090945_add_stripe_customers_cards_tables.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddStripeCustomersCardsTables do
use Ecto.Migration
def change do
create table(:stripe_customers) do
add :created, :utc_datetime
add :currency, :string
add :delinquent, :boolean
add :email, :string
add :id_from_stripe, :string, null: false
add :user_id, references(:users, on_delete: :nothing), null: false
timestamps()
end
create unique_index(:stripe_customers, [:id_from_stripe])
create unique_index(:stripe_customers, [:user_id])
create table(:stripe_cards) do
add :brand, :string
add :customer_id_from_stripe, :string
add :cvc_check, :string
add :exp_month, :integer
add :exp_year, :integer
add :id_from_stripe, :string, null: false
add :last4, :string
add :name, :string
add :user_id, references(:users, on_delete: :nothing), null: false
timestamps()
end
create index(:stripe_cards, [:user_id])
create unique_index(:stripe_cards, [:id_from_stripe])
end
end
<|start_filename|>lib/code_corps/model/donation_goal.ex<|end_filename|>
defmodule CodeCorps.DonationGoal do
@moduledoc """
Represents one of many donation goals which can belong to a project
## Fields
* amount - donation amount, in cents, needed to reach the goal
* current - indicates if the goal is currently active
* description - a longer, more informative description of the goal
"""
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "donation_goals" do
field :amount, :integer
field :current, :boolean, default: false
field :description, :string
belongs_to :project, CodeCorps.Project
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
@spec create_changeset(struct, map) :: Ecto.Changeset.t
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:amount, :description, :project_id])
|> validate_required([:amount, :description, :project_id])
|> validate_number(:amount, greater_than: 0)
|> assoc_constraint(:project)
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
@spec update_changeset(struct, map) :: Ecto.Changeset.t
def update_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:amount, :description])
|> validate_required([:amount, :description])
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
@spec set_current_changeset(struct, map) :: Ecto.Changeset.t
def set_current_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:current])
|> validate_required([:current])
|> unique_constraint(:current, name: :donation_goals_current_unique_to_project)
end
end
<|start_filename|>test/lib/code_corps/transition/user_state_test.exs<|end_filename|>
defmodule CodeCorps.Transition.UserStateTest do
use ExUnit.Case, async: true
alias CodeCorps.Transition.UserState
describe "next/2" do
test "returns nil if state_transition is nil" do
assert UserState.next("foo", nil) == nil
end
test "returns {:ok, next_state} for valid transitions" do
assert UserState.next("signed_up", "edit_profile") == {:ok, "edited_profile"}
assert UserState.next("edited_profile", "select_categories") == {:ok, "selected_categories"}
assert UserState.next("edited_profile", "skip_categories") == {:ok, "skipped_categories"}
assert UserState.next("selected_categories", "select_roles") == {:ok, "selected_roles"}
assert UserState.next("selected_categories", "skip_roles") == {:ok, "skipped_roles"}
assert UserState.next("skipped_categories", "select_roles") == {:ok, "selected_roles"}
assert UserState.next("skipped_categories", "skip_roles") == {:ok, "skipped_roles"}
assert UserState.next("selected_roles", "select_skills") == {:ok, "selected_skills"}
assert UserState.next("selected_roles", "skip_skills") == {:ok, "skipped_skills"}
assert UserState.next("skipped_roles", "select_skills") == {:ok, "selected_skills"}
assert UserState.next("skipped_roles", "skip_skills") == {:ok, "skipped_skills"}
end
test "returns {:error, message} for invalid transitions" do
assert UserState.next("foo", "bar") == {:error, "invalid transition bar from foo"}
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/stripe_connect_account_service_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.StripeConnectAccountServiceTest do
use CodeCorps.StripeCase
alias CodeCorps.{StripeConnectAccount, StripeExternalAccount}
alias CodeCorps.StripeService.StripeConnectAccountService
describe "create" do
test "creates a StripeConnectAccount" do
organization = insert(:organization)
attributes = %{
"country" => "US",
"organization_id" => organization.id,
"tos_acceptance_date" => 123456
}
{:ok, %StripeConnectAccount{} = connect_account} =
StripeConnectAccountService.create(attributes)
assert connect_account.country == "US"
assert connect_account.organization_id == organization.id
assert connect_account.type == "custom"
assert connect_account.tos_acceptance_date == 123456
end
end
describe "update/2" do
test "assigns the external_account property to the record, creates external account" do
connect_account = insert(:stripe_connect_account)
{:ok, %StripeConnectAccount{} = updated_account} =
StripeConnectAccountService.update(connect_account, %{"external_account" =>"ba_123"})
assert updated_account.external_account == "ba_123"
assert Repo.get_by(StripeExternalAccount, stripe_connect_account_id: connect_account.id)
end
end
describe "update_from_stripe/1" do
test "updates connect account with stripe information, creates external_account" do
# we use a preset fixture from StripeTesting
# the fixture is for multiple external accounts, because we want to make sure
# that part is not failing due to us only supporting a has_one relationship
id_from_stripe = "account_with_multiple_external_accounts"
connect_account = insert(:stripe_connect_account, id_from_stripe: id_from_stripe)
{:ok, %StripeConnectAccount{} = updated_account} =
StripeConnectAccountService.update_from_stripe(id_from_stripe)
assert updated_account.business_name == "Some Company Inc."
assert Repo.get_by(StripeExternalAccount, stripe_connect_account_id: connect_account.id)
end
test "deletes old external account, if it exists" do
# we use a preset fixture from StripeTesting
# the fixture is for multiple external accounts, because we want to make sure
# that part is not failing due to us only supporting a has_one relationship
id_from_stripe = "account_with_multiple_external_accounts"
connect_account = insert(:stripe_connect_account, id_from_stripe: id_from_stripe)
external_account = insert(:stripe_external_account, stripe_connect_account: connect_account)
{:ok, %StripeConnectAccount{} = updated_account} =
StripeConnectAccountService.update_from_stripe(id_from_stripe)
assert updated_account.business_name == "Some Company Inc."
assert Repo.get(StripeExternalAccount, external_account.id) == nil
assert Repo.get_by(StripeExternalAccount, stripe_connect_account_id: connect_account.id)
end
end
end
<|start_filename|>lib/code_corps/model/github_repo.ex<|end_filename|>
defmodule CodeCorps.GithubRepo do
use CodeCorps.Model
alias Ecto.Changeset
@type t :: %__MODULE__{}
schema "github_repos" do
field :github_account_avatar_url, :string
field :github_account_id, :integer
field :github_account_login, :string
field :github_account_type, :string
field :github_id, :integer
field :name, :string
field :sync_state, :string, default: "unsynced"
field :syncing_comments_count, :integer, default: 0
field :syncing_issues_count, :integer, default: 0
field :syncing_pull_requests_count, :integer, default: 0
belongs_to :github_app_installation, CodeCorps.GithubAppInstallation
belongs_to :project, CodeCorps.Project
has_many :github_comments, CodeCorps.GithubComment
has_many :github_issues, CodeCorps.GithubIssue
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [
:github_account_id, :github_account_avatar_url, :github_account_login,
:github_account_type, :github_app_installation_id, :github_id, :name,
:project_id, :sync_state, :syncing_comments_count, :syncing_issues_count,
:syncing_pull_requests_count
])
|> validate_required([
:github_account_id, :github_account_avatar_url, :github_account_login,
:github_account_type, :github_id, :name
])
|> assoc_constraint(:github_app_installation)
|> assoc_constraint(:project)
end
def update_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:project_id])
|> assoc_constraint(:project)
|> maybe_reset_sync_state()
|> validate_inclusion(:sync_state, sync_states())
end
def update_sync_changeset(struct, params) do
struct
|> changeset(params)
|> validate_inclusion(:sync_state, sync_states())
end
def sync_states do
~w{
unsynced
fetching_pull_requests errored_fetching_pull_requests
syncing_github_pull_requests errored_syncing_github_pull_requests
fetching_issues errored_fetching_issues
syncing_github_issues errored_syncing_github_issues
fetching_comments errored_fetching_comments
syncing_github_comments errored_syncing_github_comments
syncing_users errored_syncing_users
syncing_tasks errored_syncing_tasks
syncing_comments errored_syncing_comments
synced
}
end
defp maybe_reset_sync_state(changeset) do
case changeset |> Changeset.get_field(:project_id) do
nil -> changeset |> Changeset.put_change(:sync_state, "unsynced")
_ -> changeset
end
end
end
<|start_filename|>lib/code_corps_web/views/user_role_view.ex<|end_filename|>
defmodule CodeCorpsWeb.UserRoleView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
has_one :user, type: "user", field: :user_id
has_one :role, type: "role", field: :role_id
end
<|start_filename|>priv/repo/migrations/20170814131722_link_task_to_github.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.LinkTaskToGithub do
use Ecto.Migration
def change do
alter table(:tasks) do
add :github_repo_id, references(:github_repos)
end
rename table(:tasks), :github_id, to: :github_issue_number
end
end
<|start_filename|>test/support/github/test_helpers.ex<|end_filename|>
defmodule CodeCorps.GitHub.TestHelpers do
import CodeCorps.Factories
@spec load_endpoint_fixture(String.t) :: map
def load_endpoint_fixture(id) do
"./test/fixtures/github/endpoints/#{id}.json" |> File.read! |> Poison.decode!
end
@spec load_event_fixture(String.t) :: map
def load_event_fixture(id) do
"./test/fixtures/github/events/#{id}.json" |> File.read! |> Poison.decode!
end
@spec setup_coderly_repo :: %CodeCorps.GithubRepo{}
def setup_coderly_repo do
# Data is from the coderly/github-app-testing repository
#
# Uses:
#
# - the real repository owner
# - the real repository name
# - the real GitHub user id of the repository owner
# - the real GitHub App id
# - the real GitHub repo id
setup_real_repo("coderly", "github-app-testing", 321667, 63365, 108674236)
end
@spec setup_real_repo(String.t, String.t, Integer.t, Integer.t, Integer.t) :: %CodeCorps.GithubRepo{}
def setup_real_repo(repo_owner, repo_name, repo_owner_id, app_github_id, repo_github_id) do
# Create the user
#
# Simulates:
#
# - user (the repo owner) connecting their account with GitHub
github_user = insert(:github_user, email: nil, github_id: repo_owner_id, avatar_url: "https://avatars3.githubusercontent.com/u/#{repo_owner_id}?v=4", type: "User", username: repo_owner)
user = insert(:user, github_avatar_url: "https://avatars3.githubusercontent.com/u/#{repo_owner_id}?v=4", github_id: repo_owner_id, github_user: github_user, github_username: repo_owner, type: "user")
# Create the organization and project for that organization
#
# Simulates:
#
# - user creating an organization
# - organization creating a project
# - project being bootstrapped with an inbox task list to receive new tasks
# - project being bootstrapped with a done task list to receive closed tasks
# - project being bootstrapped with a pull_requests tasks list to receive pull requests
organization = insert(:organization, owner: user)
project = insert(:project, organization: organization)
insert(:task_list, project: project, inbox: true)
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, pull_requests: true)
# Create the GitHub App installation on the organization
#
# Simulates:
#
# - installation webhook
# - user installing the organization
github_app_installation = insert(:github_app_installation, github_account_login: repo_owner, github_id: app_github_id, project: project, user: user)
insert(:organization_github_app_installation, github_app_installation: github_app_installation, organization: organization)
# Create the repo on the installation
#
# Simulates:
#
# - installation or installation_repositories webhook
# - user connecting the repository to the project
github_repo = insert(:github_repo, github_app_installation: github_app_installation, name: repo_name, github_account_id: repo_owner_id, github_account_avatar_url: "https://avatars3.githubusercontent.com/u/#{repo_owner_id}?v=4", github_account_type: "User", github_id: repo_github_id, project: project)
# Return the %CodeCorps.GithubRepo{} record
github_repo
end
@doc ~S"""
Allows setting a mock Github API module for usage in specific tests
To use it, define a module containing the methods expected to be called, then
pass in the block of code expected to call it into the macro:
```
defmodule MyApiModule do
def some_function, do: "foo"
end
with_mock_api(MyApiModule) do
execute_code_calling_api
end
```
"""
@spec with_mock_api(module, do: function) :: any
defmacro with_mock_api(mock_module, do: block) do
quote do
old_mock = Application.get_env(:code_corps, :github)
Application.put_env(:code_corps, :github, unquote(mock_module))
unquote(block)
Application.put_env(:code_corps, :github, old_mock)
end
end
@spec with_real_api(do: function) :: any
defmacro with_real_api(do: block) do
quote do
old_mock = Application.get_env(:code_corps, :github)
Application.put_env(:code_corps, :github, CodeCorps.GitHub.API.Gateway)
unquote(block)
Application.put_env(:code_corps, :github, old_mock)
end
end
end
<|start_filename|>priv/repo/migrations/20170725060612_add_github_account_fields_to_github_app_installation.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddGithubAccountFieldsToGithubAppInstallation do
@moduledoc """
These fields are used to hold account information, to be displayed in the
client UI.
"""
use Ecto.Migration
def change do
alter table(:github_app_installations) do
add :github_account_avatar_url, :string
add :github_account_id, :integer
add :github_account_login, :string
add :github_account_type, :string
end
end
end
<|start_filename|>lib/code_corps/stripe_service/stripe_connect_external_account_service.ex<|end_filename|>
defmodule CodeCorps.StripeService.StripeConnectExternalAccountService do
@moduledoc """
Used to perform actions on a `StripeConnectExternalAccount` record while
propagating to and from the associated `Stripe.BankAccount` record.
"""
alias CodeCorps.{Repo, StripeConnectAccount, StripeExternalAccount}
alias CodeCorps.StripeService.Adapters.StripeExternalAccountAdapter
@spec create(Stripe.BankAccount.t, StripeConnectAccount.t) :: {:ok, StripeExternalAccount.t}
def create(%Stripe.BankAccount{} = external_account, %StripeConnectAccount{} = connect_account) do
with {:ok, params} <- StripeExternalAccountAdapter.to_params(external_account, connect_account) do
%StripeExternalAccount{} |> StripeExternalAccount.changeset(params) |> Repo.insert
end
end
end
<|start_filename|>lib/code_corps/github/api/user.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.User do
@moduledoc """
Used to perform user actions on the github API
"""
alias CodeCorps.{Accounts, GitHub, User}
alias Ecto.Changeset
@single_endpoint "user"
@doc """
POSTs `code` and `state` to GitHub to receive an OAuth token,
then associates the given user with that OAuth token.
Also associates any orphaned `GithubAppInstallation` records matching their
`sender_github_id` field with the user's `github_id`
Also associates any existing tasks and comments to the newly connected user,
based on the user's `github_id`
Returns one of the following:
- `{:ok, %CodeCorps.User{}}`
- `{:error, %Ecto.Changeset{}}`
- `{:error, GitHub.api_error_struct}`
"""
@spec connect(User.t, String.t, String.t) ::
{:ok, User.t} | {:error, Changeset.t} | {:error, GitHub.api_error_struct}
def connect(%User{} = user, code, state) do
with {:ok, %{"access_token" => access_token}} <- GitHub.user_access_token_request(code, state),
{:ok, %{} = user_payload} <- access_token |> me()
do
user |> do_connect(user_payload, access_token)
else
{:ok, %{"error" => _} = error} -> handle_oauth_error(error)
{:error, error} -> {:error, error}
end
end
@spec do_connect(User.t, map, String.t) :: {:ok, User.t} | {:error, Changeset.t}
defp do_connect(%User{} = user, %{} = user_payload, access_token)
when is_binary(access_token) do
Accounts.update_from_github_oauth(user, user_payload, access_token)
end
@doc ~S"""
Requests the currently authenticated user payload from github
"""
@spec me(String.t, Keyword.t) :: {:ok, map} | {:error, GitHub.api_error_struct}
def me(access_token, opts \\ []) do
case GitHub.request(:get, @single_endpoint, %{}, %{}, opts ++ [access_token: access_token]) do
{:ok, response} -> {:ok, response}
{:error, error} -> {:error, error}
end
end
defp handle_oauth_error(%{"error_description" => message, "error_uri" => documentation_url}) do
{:error, GitHub.APIError.new({401, %{"message" => message, "documentation_url" => documentation_url}})}
end
end
<|start_filename|>test/support/factories.ex<|end_filename|>
defmodule CodeCorps.Factories do
@moduledoc false
# with Ecto
use ExMachina.Ecto, repo: CodeCorps.Repo
def category_factory do
%CodeCorps.Category{
name: sequence(:name, &"Category #{&1}"),
slug: sequence(:slug, &"category-#{&1}"),
description: sequence(:description, &"A description for category #{&1}"),
}
end
def comment_factory do
%CodeCorps.Comment{
body: "I love elixir!",
created_at: DateTime.utc_now,
markdown: "I love elixir!",
modified_at: DateTime.utc_now,
task: build(:task),
user: build(:user)
}
end
def conversation_factory do
%CodeCorps.Conversation{
status: "open",
read_at: nil,
message: build(:message),
user: build(:user)
}
end
def conversation_part_factory do
%CodeCorps.ConversationPart{
body: sequence(:body, &"Reply to conversation #{&1}"),
read_at: nil,
author: build(:user),
conversation: build(:conversation)
}
end
def donation_goal_factory do
%CodeCorps.DonationGoal{
amount: 100,
description: sequence(:description, &"A description for a donation goal #{&1}"),
project: build(:project)
}
end
def github_app_installation_factory do
%CodeCorps.GithubAppInstallation{
github_account_id: sequence(:github_account_login, &(&1)),
github_account_avatar_url: sequence(:github_account_avatar_url, &"http://test-#{&1}.com"),
github_account_login: sequence(:github_account_login, &"owner_#{&1}"),
github_id: sequence(:github_id, &(&1)),
github_account_type: "User",
project: build(:project),
user: build(:user)
}
end
def github_comment_factory do
%CodeCorps.GithubComment{
body: sequence(:body, &"I love elixir with GithubComment #{&1}"),
github_created_at: DateTime.utc_now,
github_id: sequence(:id, (fn number -> number end)),
github_updated_at: DateTime.utc_now,
github_issue: build(:github_issue)
}
end
def github_event_factory do
%CodeCorps.GithubEvent{}
end
def github_issue_factory do
%CodeCorps.GithubIssue{
body: "I love elixir!",
github_created_at: DateTime.utc_now,
github_id: sequence(:id, (fn number -> number end)),
github_updated_at: DateTime.utc_now,
locked: false,
number: sequence(:id, (fn number -> number end)),
state: "open",
title: "I love Elixir!",
github_repo: build(:github_repo)
}
end
def github_issue_assignee_factory do
%CodeCorps.GithubIssueAssignee{
github_issue: build(:github_issue),
github_user: build(:github_user)
}
end
def github_pull_request_factory do
%CodeCorps.GithubPullRequest{
body: "Here's a change!",
github_created_at: DateTime.utc_now,
github_id: sequence(:id, (fn number -> number end)),
github_updated_at: DateTime.utc_now,
locked: false,
merged: false,
number: sequence(:id, (fn number -> number end)),
state: "open",
title: "Here's a change!",
github_repo: build(:github_repo)
}
end
def github_repo_factory do
%CodeCorps.GithubRepo{
github_account_login: sequence(:github_account_login, &"owner_#{&1}"),
github_app_installation: build(:github_app_installation),
github_id: sequence(:github_id, &(&1)),
name: sequence(:name, &"repo_#{&1}"),
project: build(:project)
}
end
def github_user_factory do
%CodeCorps.GithubUser{
github_id: sequence(:id, (fn number -> number end))
}
end
def message_factory do
%CodeCorps.Message{
body: sequence(:body, &"Subject #{&1}"),
initiated_by: "admin",
subject: sequence(:subject, &"Subject #{&1}"),
author: build(:user),
project: build(:project)
}
end
def organization_factory do
%CodeCorps.Organization{
name: sequence(:username, &"Organization #{&1}"),
owner: build(:user),
slug: sequence(:slug, &"organization-#{&1}"),
description: sequence(:email, &"Description of organization #{&1}"),
}
end
def organization_invite_factory do
%CodeCorps.OrganizationInvite{
code: sequence(:code, &"n43crhiqR-#{&1}"),
email: sequence(:email, &"<EMAIL>#{&<EMAIL>"),
organization_name: sequence(:organization_name, &"organization-#{&1}")
}
end
def task_factory do
%CodeCorps.Task{
created_at: DateTime.utc_now,
markdown: "A test task",
modified_at: DateTime.utc_now,
status: "open",
title: "Test task",
project: build(:project),
user: build(:user),
task_list: build(:task_list)
}
end
def task_list_factory do
%CodeCorps.TaskList{
name: "Test task list",
position: 1,
project: build(:project)
}
end
def task_skill_factory do
%CodeCorps.TaskSkill{
skill: build(:skill),
task: build(:task)
}
end
def organization_github_app_installation_factory do
%CodeCorps.OrganizationGithubAppInstallation{
github_app_installation: build(:github_app_installation),
organization: build(:organization)
}
end
def project_factory do
%CodeCorps.Project{
approved: true,
long_description_markdown: sequence(:long_description_markdown, &"Description #{&1}"), # once approved, this MUST be set
slug: sequence(:slug, &"project-#{&1}"),
title: sequence(:title, &"Project #{&1}"),
website: sequence(:website, &"http://test-#{&1}.com"),
organization: build(:organization)
}
end
def project_user_factory do
%CodeCorps.ProjectUser{
project: build(:project),
user: build(:user),
role: "contributor"
}
end
def project_category_factory do
%CodeCorps.ProjectCategory{
project: build(:project),
category: build(:category)
}
end
def role_factory do
%CodeCorps.Role{
name: sequence(:name, &"Role #{&1}"),
ability: sequence(:ability, &"Ability for role #{&1}"),
kind: sequence(:kind, &"Kind for role #{&1}")
}
end
def role_skill_factory do
%CodeCorps.RoleSkill{
role: build(:role),
skill: build(:skill)
}
end
@spec set_password(CodeCorps.User.t, String.t) :: CodeCorps.User.t
def set_password(user, password) do
hashed_password = <PASSWORD>(password)
%{user | encrypted_password: <PASSWORD>}
end
def skill_factory do
%CodeCorps.Skill{
description: sequence(:description, &"A description for category #{&1}"),
title: sequence(:title, &"Category #{&1}"),
}
end
def slugged_route_factory do
%CodeCorps.SluggedRoute{
slug: sequence(:slug, &"slug-#{&1}")
}
end
def stripe_connect_account_factory do
%CodeCorps.StripeConnectAccount{
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
organization: build(:organization)
}
end
def stripe_connect_card_factory do
%CodeCorps.StripeConnectCard{
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
stripe_connect_account: build(:stripe_connect_account),
stripe_platform_card: build(:stripe_platform_card)
}
end
def stripe_connect_charge_factory do
%CodeCorps.StripeConnectCharge{
amount: 1000,
currency: "usd",
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
stripe_connect_account: build(:stripe_connect_account),
stripe_connect_customer: build(:stripe_connect_customer),
user: build(:user)
}
end
def stripe_connect_customer_factory do
%CodeCorps.StripeConnectCustomer{
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
stripe_connect_account: build(:stripe_connect_account),
stripe_platform_customer: build(:stripe_platform_customer),
user: build(:user)
}
end
def stripe_connect_plan_factory do
%CodeCorps.StripeConnectPlan{
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
project: build(:project)
}
end
def stripe_connect_subscription_factory do
stripe_connect_plan = build(:stripe_connect_plan)
%CodeCorps.StripeConnectSubscription{
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
plan_id_from_stripe: stripe_connect_plan.id_from_stripe,
stripe_connect_plan: stripe_connect_plan,
user: build(:user)
}
end
def stripe_event_factory do
%CodeCorps.StripeEvent{
endpoint: sequence(:endpoint, fn(_) -> Enum.random(~w{ connect platform }) end),
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
object_id: "cus_123",
object_type: "customer",
status: sequence(:status, fn(_) -> Enum.random(~w{ unprocessed processed errored }) end),
type: "test.type"
}
end
def stripe_external_account_factory do
%CodeCorps.StripeExternalAccount{
account_id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}")
}
end
def stripe_file_upload_factory do
%CodeCorps.StripeFileUpload{
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
}
end
def stripe_invoice_factory do
%CodeCorps.StripeInvoice{
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
charge_id_from_stripe: sequence(:id_from_stripe, &"charge_stripe_id_#{&1}"),
customer_id_from_stripe: sequence(:id_from_stripe, &"customer_stripe_id_#{&1}"),
subscription_id_from_stripe: sequence(:subscription_id_from_stripe, &"subscription_stripe_id_#{&1}"),
stripe_connect_subscription: build(:stripe_connect_subscription),
user: build(:user)
}
end
def stripe_platform_customer_factory do
%CodeCorps.StripePlatformCustomer{
created: Timex.now |> Timex.to_unix,
email: sequence(:email, &"<EMAIL>#{&<EMAIL>"),
id_from_stripe: sequence(:id_from_stripe, &"stripe_id_#{&1}"),
user: build(:user)
}
end
def stripe_platform_card_factory do
%CodeCorps.StripePlatformCard{
id_from_stripe: sequence(:id_from_stripe, &"card_testDataMiscCaps#{&1}"),
user: build(:user)
}
end
def user_factory do
%CodeCorps.User{
first_name: sequence(:first_name, &"First#{&1}"),
username: sequence(:username, &"user#{&1}"),
email: sequence(:email, &"email-#{&<EMAIL>")
}
end
def user_category_factory do
%CodeCorps.UserCategory{
user: build(:user),
category: build(:category)
}
end
def user_role_factory do
%CodeCorps.UserRole{
user: build(:user),
role: build(:role)
}
end
def user_skill_factory do
%CodeCorps.UserSkill{
user: build(:user),
skill: build(:skill)
}
end
def user_task_factory do
%CodeCorps.UserTask{
user: build(:user),
task: build(:task)
}
end
def project_skill_factory do
%CodeCorps.ProjectSkill{
project: build(:project),
skill: build(:skill)
}
end
def preview_factory do
%CodeCorps.Preview{
body: "Bar",
markdown: "Bar",
user: build(:user)
}
end
end
<|start_filename|>lib/code_corps/policy/github_event.ex<|end_filename|>
defmodule CodeCorps.Policy.GithubEvent do
alias CodeCorps.User
def index?(%User{admin: true}), do: true
def index?(%User{admin: false}), do: false
def show?(%User{admin: true}), do: true
def show?(%User{admin: false}), do: false
def update?(%User{admin: true}), do: true
def update?(%User{admin: false}), do: false
end
<|start_filename|>test/lib/code_corps_web/controllers/role_skill_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.RoleSkillControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :role_skill
describe "index" do
test "lists all entries on index", %{conn: conn} do
[role_skill_1, role_skill_2] = insert_pair(:role_skill)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([role_skill_1.id, role_skill_2.id])
end
test "filters resources on index", %{conn: conn} do
[role_skill_1, role_skill_2 | _] = insert_list(3, :role_skill)
path = "role-skills/?filter[id]=#{role_skill_1.id},#{role_skill_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([role_skill_1.id, role_skill_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
role_skill = insert(:role_skill)
conn
|> request_show(role_skill)
|> json_response(200)
|> assert_id_from_response(role_skill.id)
end
test "renders 404", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag authenticated: :admin
test "creates and renders resource when data is valid", %{conn: conn} do
role = insert(:role)
skill = insert(:skill)
attrs = %{role: role, skill: skill}
assert conn |> request_create(attrs) |> json_response(201)
end
@tag authenticated: :admin
test "renders 422 when data is invalid", %{conn: conn} do
invalid_attrs = %{}
assert conn |> request_create(invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "delete" do
@tag authenticated: :admin
test "deletes resource", %{conn: conn} do
assert conn |> request_delete |> response(204)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag :authenticated
test "renders 404", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>test/lib/code_corps/github/api/repository_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.API.RepositoryTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub.API.Repository
}
describe "issues/1" do
test "calls github API for issues returns response" do
owner = "baxterthehacker"
repo = "public-repo"
url = "https://api.github.com/repos/#{owner}/#{repo}/issues"
github_app_installation = insert(:github_app_installation, github_account_login: owner)
github_repo = insert(:github_repo, github_app_installation: github_app_installation, name: repo)
{:ok, issues} = Repository.issues(github_repo)
assert_received({
:get,
endpoint_url,
"",
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token" <> _tok}
],
[
{:params, [page: 1, per_page: 100, state: "all"]},
{:access_token, "<PASSWORD>"}
]
})
assert url == endpoint_url
assert Enum.count(issues) == 8
end
@tag acceptance: true
test "calls github API with the real API" do
owner = "coderly"
repo = "github-app-testing"
github_app_installation = insert(:github_app_installation, github_account_login: owner, github_id: 63365)
github_repo = insert(:github_repo, github_app_installation: github_app_installation, name: repo)
with_real_api do
{:ok, issues} = Repository.issues(github_repo)
assert Enum.count(issues) == 3
end
end
end
describe "issue_comments/1" do
test "calls github API for issues returns response" do
owner = "baxterthehacker"
repo = "public-repo"
url = "https://api.github.com/repos/#{owner}/#{repo}/issues/comments"
github_app_installation = insert(:github_app_installation, github_account_login: owner)
github_repo = insert(:github_repo, github_app_installation: github_app_installation, name: repo)
{:ok, comments} = Repository.issue_comments(github_repo)
assert_received({
:get,
endpoint_url,
"",
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token" <> _tok}
],
[
{:params, [page: 1, per_page: 100]},
{:access_token, "v1.1f69<PASSWORD>xxx"}
]
})
assert url == endpoint_url
assert Enum.count(comments) == 12
end
@tag acceptance: true
test "calls github API with the real API" do
owner = "coderly"
repo = "github-app-testing"
github_app_installation = insert(:github_app_installation, github_account_login: owner, github_id: 63365)
github_repo = insert(:github_repo, github_app_installation: github_app_installation, name: repo)
with_real_api do
{:ok, comments} = Repository.issue_comments(github_repo)
assert Enum.count(comments) == 2
end
end
end
describe "pulls/1" do
test "calls github API for pulls returns response" do
owner = "baxterthehacker"
repo = "public-repo"
url = "https://api.github.com/repos/#{owner}/#{repo}/pulls"
github_app_installation = insert(:github_app_installation, github_account_login: owner)
github_repo = insert(:github_repo, github_app_installation: github_app_installation, name: repo)
{:ok, pulls} = Repository.pulls(github_repo)
assert_received({
:get,
endpoint_url,
"",
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token" <> _tok}
],
[
{:params, [page: 1, per_page: 100, state: "all"]},
{:access_token, "v<PASSWORD>"}
]
})
assert url == endpoint_url
assert Enum.count(pulls) == 4
end
@tag acceptance: true
test "calls github API with the real API" do
owner = "coderly"
repo = "github-app-testing"
github_app_installation = insert(:github_app_installation, github_account_login: owner, github_id: 63365)
github_repo = insert(:github_repo, github_app_installation: github_app_installation, name: repo)
with_real_api do
{:ok, pulls} = Repository.pulls(github_repo)
assert Enum.count(pulls) == 1
end
end
end
end
<|start_filename|>test/lib/code_corps/messages/messages_test.exs<|end_filename|>
defmodule CodeCorps.MessagesTest do
@moduledoc false
use CodeCorps.DbAccessCase
use Phoenix.ChannelTest
use Bamboo.Test
import Ecto.Query, only: [where: 2]
alias CodeCorps.{Conversation, ConversationPart, Emails, Message, Messages}
alias Ecto.Changeset
defp get_and_sort_ids(records) do
records |> Enum.map(&Map.get(&1, :id)) |> Enum.sort
end
defp json_map(attrs) do
attrs
|> Poison.encode!
|> Poison.decode!
end
describe "list" do
test "returns all records by default" do
insert_list(3, :message)
assert Message |> Messages.list(%{}) |> Enum.count == 3
end
test "can filter by list of ids" do
[message_1, message_2, message_3] = insert_list(3, :message)
params = %{"filter" => %{"id" => "#{message_1.id},#{message_3.id}"}}
results = Message |> Messages.list(params)
assert results |> Enum.count == 2
assert results |> get_and_sort_ids() ==
[message_1, message_3] |> get_and_sort_ids()
params = %{"filter" => %{"id" => "#{message_2.id}"}}
results = Message |> Messages.list(params)
assert results |> Enum.count == 1
assert results |> get_and_sort_ids() ==
[message_2] |> get_and_sort_ids()
end
test "builds upon the provided scope" do
[%{id: project_1_id} = project_1, project_2] = insert_pair(:project)
[author_1, author_2] = insert_pair(:user)
message_p1_a1 = insert(:message, project: project_1, author: author_1)
message_p1_a2 = insert(:message, project: project_1, author: author_2)
message_p2_a1 = insert(:message, project: project_2, author: author_1)
message_p2_a2 = insert(:message, project: project_2, author: author_2)
params = %{"filter" => %{"id" => "#{message_p1_a1.id}"}}
result_ids =
Message
|> where(project_id: ^project_1_id)
|> Messages.list(params)
|> get_and_sort_ids()
assert message_p1_a1.id in result_ids
refute message_p1_a2.id in result_ids
refute message_p2_a1.id in result_ids
refute message_p2_a2.id in result_ids
end
end
describe "list_conversations/2" do
test "returns all records by default" do
insert_list(3, :conversation)
assert Conversation |> Messages.list_conversations(%{}) |> Enum.count == 3
end
test "can filter by project" do
[%{project: project_1} = message_1, %{project: project_2} = message_2] =
insert_pair(:message)
conversation_1 = insert(:conversation, message: message_1)
conversation_2 = insert(:conversation, message: message_2)
result_ids =
Conversation
|> Messages.list_conversations(%{"project_id" => project_1.id})
|> get_and_sort_ids()
assert result_ids |> Enum.count == 1
assert conversation_1.id in result_ids
refute conversation_2.id in result_ids
result_ids =
Conversation
|> Messages.list_conversations(%{"project_id" => project_2.id})
|> get_and_sort_ids()
assert result_ids |> Enum.count == 1
refute conversation_1.id in result_ids
assert conversation_2.id in result_ids
end
test "can filter by status" do
message_started_by_admin = insert(:message, initiated_by: "admin")
message_started_by_user = insert(:message, initiated_by: "user")
conversation_started_by_admin_without_reply =
insert(:conversation, message: message_started_by_admin)
conversation_started_by_admin_with_reply =
insert(:conversation, message: message_started_by_admin)
insert(
:conversation_part,
conversation: conversation_started_by_admin_with_reply
)
conversation_started_by_user_without_reply =
insert(:conversation, message: message_started_by_user)
conversation_started_by_user_with_reply =
insert(:conversation, message: message_started_by_user)
insert(
:conversation_part,
conversation: conversation_started_by_user_with_reply
)
result_ids =
Conversation
|> Messages.list_conversations(%{"active" => true})
|> get_and_sort_ids()
refute conversation_started_by_admin_without_reply.id in result_ids
assert conversation_started_by_admin_with_reply.id in result_ids
assert conversation_started_by_user_without_reply.id in result_ids
assert conversation_started_by_user_with_reply.id in result_ids
result_ids =
Conversation
|> Messages.list_conversations(%{"status" => "open"})
|> get_and_sort_ids()
assert conversation_started_by_admin_without_reply.id in result_ids
assert conversation_started_by_admin_with_reply.id in result_ids
assert conversation_started_by_user_without_reply.id in result_ids
assert conversation_started_by_user_with_reply.id in result_ids
end
test "builds upon the provided scope" do
[project_1, project_2] = insert_pair(:project)
[user_1, user_2] = insert_pair(:user)
message_p1 = insert(:message, project: project_1)
message_p2 = insert(:message, project: project_2)
conversation_u1_p1 =
insert(:conversation, user: user_1, message: message_p1)
conversation_u1_p2 =
insert(:conversation, user: user_1, message: message_p2)
conversation_u2_p1 =
insert(:conversation, user: user_2, message: message_p1)
conversation_u2_p2 =
insert(:conversation, user: user_2, message: message_p2)
params = %{"project_id" => project_1.id}
result_ids =
Conversation
|> where(user_id: ^user_1.id)
|> Messages.list_conversations(params)
|> get_and_sort_ids()
assert conversation_u1_p1.id in result_ids
refute conversation_u1_p2.id in result_ids
refute conversation_u2_p1.id in result_ids
refute conversation_u2_p2.id in result_ids
end
test "supports multiple filters at once" do
## we create two messages started by admin, each on a different project
%{project: project_1} = message_1_started_by_admin =
insert(:message, initiated_by: "admin")
%{project: project_2} = message_2_started_by_admin =
insert(:message, initiated_by: "admin")
# we create one conversation without a reply, to test the "status" filter
conversation_started_by_admin_without_reply =
insert(:conversation, message: message_1_started_by_admin)
# we create two conversations with replies, on on each message
# since the messages are on different projects, this allows us to
# test the project filter
conversation_started_by_admin_with_reply =
insert(:conversation, message: message_1_started_by_admin)
insert(
:conversation_part,
conversation: conversation_started_by_admin_with_reply
)
other_conversation_started_by_admin_with_reply =
insert(:conversation, message: message_2_started_by_admin)
insert(
:conversation_part,
conversation: other_conversation_started_by_admin_with_reply
)
params = %{"active" => true, "project_id" => project_1.id}
result_ids =
Conversation
|> Messages.list_conversations(params)
|> get_and_sort_ids()
# this means the status filter worked, because the first conv. belongs to
# the message with the correct project
refute conversation_started_by_admin_without_reply.id in result_ids
# this conversation is active and belongs to the message with the
# correct project
assert conversation_started_by_admin_with_reply.id in result_ids
# this conversation is active, but belongs to a message with a different
# project
refute other_conversation_started_by_admin_with_reply.id in result_ids
params = %{"active" => true, "project_id" => project_2.id}
result_ids =
Conversation
|> Messages.list_conversations(params)
|> get_and_sort_ids()
refute conversation_started_by_admin_without_reply.id in result_ids
refute conversation_started_by_admin_with_reply.id in result_ids
assert other_conversation_started_by_admin_with_reply.id in result_ids
end
end
describe "list_parts/2" do
test "returns all records by default" do
insert_list(3, :conversation_part)
assert ConversationPart |> Messages.list_parts(%{}) |> Enum.count == 3
end
end
describe "get_conversation/1" do
test "gets a single conversation" do
conversation = insert(:conversation)
result = Messages.get_conversation(conversation.id)
assert result.id == conversation.id
end
end
describe "get_part/1" do
test "gets a single part" do
conversation_part = insert(:conversation_part)
result = Messages.get_part(conversation_part.id)
assert result.id == conversation_part.id
end
end
describe "add_part/1" do
test "creates a conversation part" do
conversation = insert(:conversation, updated_at: Timex.now |> Timex.shift(minutes: -5))
user = insert(:user)
attrs = %{
author_id: user.id,
body: "Test <PASSWORD>",
conversation_id: conversation.id
}
{:ok, %ConversationPart{} = conversation_part} = Messages.add_part(attrs |> json_map())
conversation_part =
conversation_part
|> Repo.preload([:author, conversation: [message: [[project: :organization]]]])
assert conversation_part.author_id == user.id
assert conversation_part.body == "Test body"
assert conversation_part.conversation_id == conversation.id
assert conversation_part.updated_at == conversation_part.conversation.updated_at
end
test "broadcasts event on phoenix channel" do
conversation = insert(:conversation)
user = insert(:user)
attrs = %{
author_id: user.id,
body: "Test body",
conversation_id: conversation.id
}
CodeCorpsWeb.Endpoint.subscribe("conversation:#{conversation.id}")
{:ok, %ConversationPart{id: id}} = Messages.add_part(attrs |> json_map())
assert_broadcast("new:conversation-part", %{id: ^id})
CodeCorpsWeb.Endpoint.unsubscribe("conversation:#{conversation.id}")
end
test "when replied by project admin, sends appropriate email to other participants" do
part_author = insert(:user)
%{author: message_author} = message = insert(:message)
%{user: target_user} = conversation = insert(:conversation, message: message)
%{author: other_participant} = insert(:conversation_part, conversation: conversation)
attrs = %{
author_id: part_author.id,
body: "Test body",
conversation_id: conversation.id
}
{:ok, %ConversationPart{} = part} = Messages.add_part(attrs |> json_map())
part = part |> Repo.preload([:author, conversation: [message: [[project: :organization]]]])
refute_delivered_email Emails.ReplyToConversationEmail.create(part, part_author)
assert_delivered_email Emails.ReplyToConversationEmail.create(part, target_user)
assert_delivered_email Emails.ReplyToConversationEmail.create(part, message_author)
assert_delivered_email Emails.ReplyToConversationEmail.create(part, other_participant)
end
test "when replied by conversation user, sends appropriate email to other participants" do
part_author = insert(:user)
%{author: message_author} = message = insert(:message)
%{user: target_user} = conversation = insert(:conversation, message: message)
%{author: other_participant} = insert(:conversation_part, conversation: conversation)
attrs = %{
author_id: part_author.id,
body: "Test body",
conversation_id: conversation.id
}
{:ok, %ConversationPart{} = part} = Messages.add_part(attrs |> json_map())
part = part |> Repo.preload([:author, conversation: [message: [[project: :organization]]]])
refute_delivered_email Emails.ReplyToConversationEmail.create(part, part_author)
assert_delivered_email Emails.ReplyToConversationEmail.create(part, target_user)
assert_delivered_email Emails.ReplyToConversationEmail.create(part, message_author)
assert_delivered_email Emails.ReplyToConversationEmail.create(part, other_participant)
end
end
describe "create/1" do
test "creates a message" do
%{project: project, user: user} = insert(:project_user, role: "admin")
params = %{
author_id: user.id,
body: "Foo",
initiated_by: "admin",
project_id: project.id,
subject: "Bar"
}
{:ok, %Message{} = message} = params |> Messages.create
assert message |> Map.take(params |> Map.keys) == params
end
test "creates a conversation if attributes are provided" do
%{project: project, user: user} = insert(:project_user, role: "admin")
recipient = insert(:user)
params = %{
author_id: user.id,
body: "Foo",
conversations: [%{user_id: recipient.id}],
initiated_by: "admin",
project_id: project.id,
subject: "Bar"
}
{:ok, %Message{} = message} = params |> Messages.create
assert message |> Map.take(params |> Map.delete(:conversations) |> Map.keys) == params |> Map.delete(:conversations)
assert Conversation |> Repo.get_by(message_id: message.id, status: "open", user_id: recipient.id)
end
test "requires author_id, body, initiated_by, project_id" do
{:error, %Changeset{} = changeset} = %{} |> Messages.create
assert changeset.errors[:author_id]
assert changeset.errors[:body]
assert changeset.errors[:initiated_by]
assert changeset.errors[:project_id]
end
test "requires subject if initiated by admin" do
{:error, %Changeset{} = changeset} =
%{initiated_by: "admin"} |> Messages.create
assert changeset.errors[:subject]
end
test "allows blank subject if initiated by user" do
{:error, %Changeset{} = changeset} =
%{initiated_by: "user"} |> Messages.create
refute changeset.errors[:subject]
end
test "fails on project validation if id invalid" do
user = insert(:user)
params = %{
author_id: user.id,
body: "Foo",
initiated_by: "admin",
project_id: 1,
subject: "Bar"
}
{:error, %Changeset{} = changeset} = params |> Messages.create
assert changeset.errors[:project]
end
test "fails on user validation if id invalid" do
project = insert(:project)
params = %{
author_id: -1,
body: "Foo",
initiated_by: "admin",
project_id: project.id,
subject: "Bar"
}
{:error, %Changeset{} = changeset} = params |> Messages.create
assert changeset.errors[:author]
end
test "requires conversation user_id" do
params = %{conversations: [%{}]}
{:error, %Changeset{} = changeset} = params |> Messages.create
conversation_changeset = changeset.changes.conversations |> List.first
assert conversation_changeset.errors[:user_id]
end
test "fails on conversation user validation if id invalid" do
%{project: project, user: user} = insert(:project_user, role: "admin")
params = %{
author_id: user.id,
body: "Foo",
conversations: [%{user_id: -1}],
initiated_by: "admin",
project_id: project.id,
subject: "Bar"
}
{:error, %Changeset{} = changeset} = params |> Messages.create
conversation_changeset = changeset.changes.conversations |> List.first
assert conversation_changeset.errors[:user]
end
test "when initiated by admin, sends email to each conversation user" do
%{project: project, user: user} = insert(:project_user, role: "admin")
[recipient_1, recipient_2] = insert_pair(:user)
params = %{
author_id: user.id,
body: "Foo",
conversations: [%{user_id: recipient_1.id}, %{user_id: recipient_2.id}],
initiated_by: "admin",
project_id: project.id,
subject: "Bar"
}
{:ok, %Message{} = message} = params |> Messages.create
%{conversations: [conversation_1, conversation_2]} = message =
message |> Repo.preload([:project, [conversations: :user]])
assert_delivered_email Emails.MessageInitiatedByProjectEmail.create(message, conversation_1)
assert_delivered_email Emails.MessageInitiatedByProjectEmail.create(message, conversation_2)
end
end
end
<|start_filename|>test/lib/code_corps_web/views/category_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.CategoryViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
category = insert(:category)
project_category = insert(:project_category, category: category)
category = CodeCorpsWeb.CategoryController.preload(category)
rendered_json = render(CodeCorpsWeb.CategoryView, "show.json-api", data: category)
expected_json = %{
"data" => %{
"attributes" => %{
"description" => category.description,
"name" => category.name,
"slug" => category.slug
},
"id" => category.id |> Integer.to_string,
"relationships" => %{
"project-categories" => %{
"data" => [
%{"id" => project_category.id |> Integer.to_string, "type" => "project-category"}
]
}
},
"type" => "category",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps/github/event/handler.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.Handler do
@moduledoc ~S"""
Default behavior for all GitHub webhook event handlers.
"""
@doc ~S"""
The only entry point a GitHub webhook event handler function should contain.
Receives the GitHub payload, returns an `:ok` tuple if the process was
successful, or an `:error` tuple, where the second element is an atom, if it
failed.
"""
@callback handle(map) :: {:ok, any} | {:error, atom} | {:error, atom, any}
end
<|start_filename|>test/lib/code_corps/policy/organization_github_app_installation_test.exs<|end_filename|>
defmodule CodeCorps.Policy.OrganizationGithubAppInstallationTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.OrganizationGithubAppInstallation, only: [create?: 2, delete?: 2]
describe "create?/2" do
test "returns true when user is creating installation for organization where they're an owner" do
user = insert(:user)
organization = insert(:organization, owner: user)
github_app_installation = insert(:github_app_installation)
assert create?(user, %{github_app_installation_id: github_app_installation.id, organization_id: organization.id})
end
test "returns false for normal user" do
user = insert(:user)
organization = insert(:organization)
github_app_installation = insert(:github_app_installation)
refute create?(user, %{github_app_installation_id: github_app_installation.id, organization_id: organization.id})
end
end
describe "delete?/2" do
test "returns true when user is owner of the organization" do
user = insert(:user)
organization = insert(:organization, owner: user)
github_app_installation = insert(:github_app_installation)
organization_github_app_installation = insert(:organization_github_app_installation, github_app_installation: github_app_installation, organization: organization)
assert delete?(user, organization_github_app_installation)
end
test "returns false for normal user" do
user = insert(:user)
organization = insert(:organization)
github_app_installation = insert(:github_app_installation)
organization_github_app_installation = insert(:organization_github_app_installation, github_app_installation: github_app_installation, organization: organization)
refute delete?(user, organization_github_app_installation)
end
end
end
<|start_filename|>lib/code_corps_web/controllers/project_skill_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.ProjectSkillController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Analytics.SegmentTracker, ProjectSkill, User, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with project_skills <- ProjectSkill |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: project_skills)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %ProjectSkill{} = project_skill <- ProjectSkill |> Repo.get(id) do
conn |> render("show.json-api", data: project_skill)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %ProjectSkill{}, params),
{:ok, %ProjectSkill{} = project_skill} <- %ProjectSkill{} |> ProjectSkill.create_changeset(params) |> Repo.insert do
current_user.id |> SegmentTracker.track("Added Project Skill", project_skill)
conn |> put_status(:created) |> render("show.json-api", data: project_skill)
end
end
@spec delete(Conn.t, map) :: Conn.t
def delete(%Conn{} = conn, %{"id" => id} = _params) do
with %ProjectSkill{} = project_skill <- ProjectSkill |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:delete, project_skill),
{:ok, %ProjectSkill{} = project_skill} <- project_skill |> Repo.delete
do
current_user.id |> SegmentTracker.track("Removed Project Skill", project_skill)
conn |> Conn.assign(:project_skill, project_skill) |> send_resp(:no_content, "")
end
end
end
<|start_filename|>test/lib/code_corps/policy/user_skill_test.exs<|end_filename|>
defmodule CodeCorps.Policy.UserSkillTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.UserSkill, only: [create?: 2, delete?: 2]
describe "create?" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
assert create?(user, %{"user_id" => user.id})
end
test "returns true if user is creating their own record" do
user = insert(:user)
assert create?(user, %{"user_id" => user.id})
end
test "returns false if user is creating someone else's record" do
user = build(:user)
refute create?(user, %{"user_id" => "someone-else"})
end
end
describe "delete?" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
user_skill = insert(:user_skill)
assert delete?(user, user_skill)
end
test "returns true if user is creating their own record" do
user = insert(:user)
user_skill = insert(:user_skill, user: user)
assert delete?(user, user_skill)
end
test "returns false if user is creating someone else's record" do
user = build(:user)
user_skill = insert(:user_skill)
refute delete?(user, user_skill)
end
end
end
<|start_filename|>test/lib/code_corps_web/views/user_skill_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.UserSkillViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
user_skill = insert(:user_skill)
rendered_json = render(CodeCorpsWeb.UserSkillView, "show.json-api", data: user_skill)
expected_json = %{
"data" => %{
"id" => user_skill.id |> Integer.to_string,
"type" => "user-skill",
"attributes" => %{},
"relationships" => %{
"skill" => %{
"data" => %{"id" => user_skill.skill_id |> Integer.to_string, "type" => "skill"}
},
"user" => %{
"data" => %{"id" => user_skill.user_id |> Integer.to_string, "type" => "user"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps/model/category.ex<|end_filename|>
defmodule CodeCorps.Category do
@moduledoc """
Represents a category on Code Corps, e.g. "Society" and "Technology".
"""
use CodeCorps.Model
import CodeCorps.Helpers.Slug
@type t :: %__MODULE__{}
schema "categories" do
field :name, :string
field :slug, :string
field :description, :string
has_many :project_categories, CodeCorps.ProjectCategory
has_many :projects, through: [:project_categories, :project]
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:name, :description])
|> validate_required([:name])
end
@doc """
Builds a changeset for creating an category.
"""
def create_changeset(struct, params) do
struct
|> changeset(params)
|> generate_slug(:name, :slug)
|> validate_required([:slug])
|> unique_constraint(:slug, name: :index_categories_on_slug)
end
end
<|start_filename|>test/lib/code_corps_web/views/organization_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.OrganizationViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
user = insert(:user)
organization = insert(:organization, owner: user, default_color: "blue")
github_app_installation = insert(:github_app_installation)
organization_github_app_installation = insert(:organization_github_app_installation, github_app_installation: github_app_installation, organization: organization)
project = insert(:project, organization: organization)
slugged_route = insert(:slugged_route, organization: organization)
stripe_connect_account = insert(:stripe_connect_account, organization: organization)
host = Application.get_env(:code_corps, :asset_host)
organization = CodeCorpsWeb.OrganizationController.preload(organization)
rendered_json = render(CodeCorpsWeb.OrganizationView, "show.json-api", data: organization)
expected_json = %{
"data" => %{
"attributes" => %{
"approved" => organization.approved,
"cloudinary-public-id" => nil,
"description" => organization.description,
"icon-large-url" => "#{host}/icons/organization_default_large_blue.png",
"icon-thumb-url" => "#{host}/icons/organization_default_thumb_blue.png",
"inserted-at" => organization.inserted_at,
"name" => organization.name,
"slug" => organization.slug,
"updated-at" => organization.updated_at,
},
"id" => organization.id |> Integer.to_string,
"relationships" => %{
"organization-github-app-installations" => %{
"data" => [
%{"id" => organization_github_app_installation.id |> Integer.to_string, "type" => "organization-github-app-installation"}
]
},
"owner" => %{
"data" => %{"id" => user.id |> Integer.to_string, "type" => "user"}
},
"projects" => %{
"data" => [
%{"id" => project.id |> Integer.to_string, "type" => "project"}
]
},
"slugged-route" => %{
"data" => %{"id" => slugged_route.id |> Integer.to_string, "type" => "slugged-route"}
},
"stripe-connect-account" => %{
"data" => %{"id" => stripe_connect_account.id |> Integer.to_string, "type" => "stripe-connect-account"}
},
},
"type" => "organization",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/lib/code_corps_web/controllers/stripe_connect_plan_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectPlanControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :stripe_connect_plan
describe "show" do
@tag :authenticated
test "shows resource when authenticated and authorized", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
stripe_connect_plan = insert(:stripe_connect_plan, project: project)
conn
|> request_show(stripe_connect_plan)
|> json_response(200)
|> assert_id_from_response(stripe_connect_plan.id)
end
test "renders 401 when unauthenticated", %{conn: conn} do
stripe_connect_plan = insert(:stripe_connect_plan)
assert conn |> request_show(stripe_connect_plan) |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
stripe_connect_plan = insert(:stripe_connect_plan)
assert conn |> request_show(stripe_connect_plan) |> json_response(403)
end
@tag :authenticated
test "renders 404 when record not found", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when user is authenticated and authorized", %{conn: conn, current_user: current_user} do
organization = insert(:organization)
insert(:stripe_connect_account, organization: organization, charges_enabled: true, payouts_enabled: true)
project = insert(:project, organization: organization)
insert(:project_user, project: project, user: current_user, role: "owner")
insert(:donation_goal, project: project)
assert conn |> request_create(%{project: project}) |> json_response(201)
user_id = current_user.id
assert_received {:track, ^user_id, "Created Stripe Connect Plan", %{}}
end
test "does not create resource and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
organization = insert(:organization)
project = insert(:project, organization: organization)
assert conn |> request_create(%{project: project}) |> json_response(403)
end
@tag :authenticated
test "does not create resource and renders 422 when no donation goals exist and payouts not enabled", %{conn: conn, current_user: current_user} do
organization = insert(:organization)
insert(:stripe_connect_account, organization: organization, payouts_enabled: false)
project = insert(:project, organization: organization)
insert(:project_user, project: project, user: current_user, role: "owner")
assert conn |> request_create(%{project: project}) |> json_response(422)
end
end
end
<|start_filename|>lib/code_corps/policy/project_user.ex<|end_filename|>
defmodule CodeCorps.Policy.ProjectUser do
@moduledoc """
Handles `User` authorization of actions on `ProjectUser` records
"""
import CodeCorps.Policy.Helpers, only: [get_membership: 2, get_project: 1, get_role: 1]
alias CodeCorps.{ProjectUser, Repo, User}
@spec create?(User.t(), map) :: boolean
def create?(%User{id: user_id}, %{"user_id" => author_id, "role" => "pending"})
when user_id == author_id do
# Non-member can only make pending if they're the user
true
end
def create?(%User{} = user, %{"user_id" => _, "project_id" => _} = params) do
user_role =
params
|> get_project()
|> get_membership(user)
|> get_role()
new_role = Map.get(params, "role")
do_create?(user_role, new_role)
end
def create?(_, _), do: false
defp do_create?("pending", _), do: false
defp do_create?("contributor", _), do: false
defp do_create?("admin", "pending"), do: true
defp do_create?("admin", "contributor"), do: true
defp do_create?("admin", _), do: false
defp do_create?("owner", _), do: true
defp do_create?(_, _), do: false
@spec update?(User.t(), ProjectUser.t(), map) :: boolean
def update?(%User{} = user, %ProjectUser{} = existing_record, params) do
user_role =
existing_record
|> get_project_membership(user)
|> get_role()
old_role = existing_record |> get_role()
new_role = Map.get(params, "role")
do_update?(user_role, old_role, new_role)
end
defp do_update?(nil, _, _), do: false
defp do_update?("pending", _, _), do: false
defp do_update?("contributor", _, _), do: false
defp do_update?("admin", "pending", "contributor"), do: true
defp do_update?("admin", _, _), do: false
defp do_update?("owner", "owner", _), do: false
defp do_update?("owner", _, _), do: true
defp do_update?(_, _, _), do: false
@spec delete?(User.t(), ProjectUser.t()) :: boolean
def delete?(%User{} = user, %ProjectUser{} = record) do
record |> get_project_membership(user) |> do_delete?(record)
end
defp do_delete?(%ProjectUser{} = user_m, %ProjectUser{} = current_m) when user_m == current_m,
do: true
defp do_delete?(%ProjectUser{role: "owner"}, %ProjectUser{}), do: true
defp do_delete?(%ProjectUser{role: "admin"}, %ProjectUser{role: role})
when role in ~w(pending contributor),
do: true
defp do_delete?(_, _), do: false
defp get_project_membership(%ProjectUser{user_id: nil}, %User{id: nil}), do: nil
defp get_project_membership(%ProjectUser{user_id: m_id} = membership, %User{id: u_id})
when m_id == u_id,
do: membership
defp get_project_membership(%ProjectUser{project_id: project_id}, %User{id: user_id}) do
ProjectUser |> Repo.get_by(project_id: project_id, user_id: user_id)
end
end
<|start_filename|>test/lib/code_corps/policy/stripe_platform_customer_test.exs<|end_filename|>
defmodule CodeCorps.Policy.StripePlatformCustomerTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.StripePlatformCustomer, only: [show?: 2]
describe "show?" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
stripe_platform_customer = build(:stripe_platform_customer)
assert show?(user, stripe_platform_customer)
end
test "returns true when user is viewing their own information" do
user = insert(:user)
stripe_platform_customer = insert(:stripe_platform_customer, user: user)
assert show?(user, stripe_platform_customer)
end
test "returns false when user id is not the StripePlatformCustomer's user_id" do
[user, another_user] = insert_pair(:user)
stripe_platform_customer = insert(:stripe_platform_customer, user: user)
refute show?(another_user, stripe_platform_customer)
end
end
end
<|start_filename|>lib/code_corps/admin/github_event_query.ex<|end_filename|>
defmodule CodeCorps.Admin.GithubEventQuery do
@moduledoc ~S"""
Holds helpers to query `CodeCorps.GithubEvent` records using a map of params.
"""
import Ecto.Query
alias Ecto.Queryable
@doc ~S"""
Filters a `CodeCorps.GithubEvent` query by `action`, if specified in params
"""
@spec action_filter(Queryable.t, map) :: Queryable.t
def action_filter(queryable, %{"action" => action}) do
queryable
|> where([c], c.action == ^action)
end
def action_filter(queryable, %{}), do: queryable
@doc ~S"""
Filters a `CodeCorps.GithubEvent` query by `status`, if specified in params
"""
@spec status_filter(Queryable.t, map) :: Queryable.t
def status_filter(queryable, %{"status" => status}) do
queryable
|> where([c], c.status == ^status)
end
def status_filter(queryable, %{}), do: queryable
@doc ~S"""
Filters a `CodeCorps.GithubEvent` query by `type`, if specified in params
"""
@spec type_filter(Queryable.t, map) :: Queryable.t
def type_filter(queryable, %{"type" => type}) do
queryable
|> where([c], c.type == ^type)
end
def type_filter(queryable, %{}), do: queryable
end
<|start_filename|>priv/repo/migrations/20171025184225_add_missing_indexes.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddMissingIndexes do
use Ecto.Migration
def change do
create index(:comments, [:github_comment_id])
create index(:github_comments, [:github_issue_id])
create index(:github_issues, [:github_repo_id])
create index(:github_issues, [:github_pull_request_id])
create index(:github_pull_requests, [:github_repo_id])
create index(:organizations, [:owner_id])
create index(:previews, [:user_id])
create index(:project_categories, [:category_id])
create index(:project_categories, [:project_id])
create index(:project_github_repos, [:github_repo_id])
create index(:project_github_repos, [:project_id])
create index(:project_users, [:project_id])
create index(:project_users, [:user_id])
create index(:projects, [:organization_id])
create index(:role_skills, [:role_id])
create index(:role_skills, [:skill_id])
create index(:slugged_routes, [:organization_id])
create index(:slugged_routes, [:user_id])
create index(:stripe_connect_cards, [:stripe_connect_account_id])
create index(:stripe_connect_charges, [:stripe_connect_account_id])
create index(:stripe_connect_charges, [:stripe_connect_customer_id])
create index(:stripe_connect_charges, [:user_id])
create index(:stripe_connect_customers, [:stripe_platform_customer_id])
create index(:stripe_connect_customers, [:user_id])
create index(:stripe_external_accounts, [:stripe_connect_account_id])
create index(:stripe_file_upload, [:stripe_connect_account_id])
create index(:stripe_invoices, [:stripe_connect_subscription_id])
create index(:stripe_invoices, [:user_id])
create index(:task_skills, [:task_id])
create index(:task_skills, [:skill_id])
create index(:tasks, [:github_issue_id])
create index(:tasks, [:github_repo_id])
create index(:tasks, [:task_list_id])
create index(:user_categories, [:category_id])
create index(:user_categories, [:user_id])
create index(:user_roles, [:role_id])
create index(:user_roles, [:user_id])
create index(:user_skills, [:skill_id])
create index(:user_skills, [:user_id])
create index(:user_tasks, [:task_id])
create index(:user_tasks, [:user_id])
end
end
<|start_filename|>lib/code_corps/project/query.ex<|end_filename|>
defmodule CodeCorps.Project.Query do
@moduledoc ~S"""
Contains queries for retrieving projects
"""
alias CodeCorps.{
Helpers.Query,
Project,
SluggedRoute,
Repo
}
@doc ~S"""
Returns a list of `Project` records based on the provided filter.
If the filter contains a `slug` key, returns all projects for the specified
`Organization.`
If the filter does not contain a `slug` key, filters by optional params.
"""
@spec list(map) :: list(Project.t)
def list(%{"slug" => slug}) do
SluggedRoute
|> Repo.get_by(slug: slug |> String.downcase)
|> Repo.preload([organization: :projects])
|> Map.get(:organization)
|> Map.get(:projects)
end
def list(%{} = params) do
Project
|> Query.optional_filters(params, ~w(approved)a)
|> Repo.all()
end
@doc ~S"""
Finds and returns a single `Project` record based on a map of parameters.
If the map contains a `project_slug` key, retrieves record by `slug`.
If the map contains an `id`, retrieves by id.
"""
@spec find(map) :: Project.t | nil
def find(%{"project_slug" => slug}) do
Project |> Repo.get_by(slug: slug |> String.downcase)
end
def find(%{"id" => id}) do
Project |> Repo.get(id)
end
end
<|start_filename|>priv/repo/migrations/20171026010933_add_more_missing_indexes.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddMoreMissingIndexes do
use Ecto.Migration
def change do
create index(:tasks, [:archived])
create index(:tasks, [:status])
end
end
<|start_filename|>lib/code_corps/stripe_testing/account.ex<|end_filename|>
defmodule CodeCorps.StripeTesting.Account do
alias CodeCorps.StripeTesting.Helpers
@extra_keys ~w(business_logo business_primary_color support_url transfer_schedule transfer_statement_descriptor)
def create(attributes) do
{:ok, create_stripe_record(attributes)}
end
def retrieve("account_with_multiple_external_accounts") do
{:ok, load_fixture("account_with_multiple_external_accounts")}
end
def retrieve(id) do
{:ok, create_stripe_record(%{"id" => id})}
end
def update(id, attributes) do
{:ok, create_stripe_record(attributes |> Map.merge(%{id: id}))}
end
def load_fixture(id) do
id
|> Helpers.load_raw_fixture
|> Map.drop(@extra_keys)
|> Stripe.Converter.convert_result
end
defp create_stripe_record(attributes) do
transformed_attributes =
attributes
|> CodeCorps.MapUtils.keys_to_string
|> Map.merge("account" |> Helpers.load_raw_fixture)
|> add_external_account
|> Map.drop(@extra_keys)
Stripe.Converter.convert_result(transformed_attributes)
end
defp add_external_account(%{"id" => account_id, "external_account" => external_account_id} = map) do
external_accounts_map = %{
"object" => "list",
"data" => [%{"id" => external_account_id, "object" => "bank_account"}],
"has_more" => false,
"total_count" => 1,
"url" => "/v1/accounts/#{account_id}/external_accounts"
}
map
|> Map.put("external_accounts", external_accounts_map)
|> Map.drop(["external_account"])
end
defp add_external_account(%{"id" => account_id} = map) do
external_accounts_map = %{
"object" => "list",
"data" => [],
"has_more" => false,
"total_count" => 1,
"url" => "/v1/accounts/#{account_id}/external_accounts"
}
Map.put(map, "external_accounts", external_accounts_map)
end
end
<|start_filename|>lib/code_corps/github/event/validator.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.Validator do
@moduledoc ~S"""
Default behavior for all GitHub webhook event payload validators.
"""
@doc ~S"""
The only entry point a GitHub webhook event validator function should contain.
Receives the GitHub payload, returns `true` if the payload is in the expected
format, `false` otherwise.
"""
@callback valid?(map) :: boolean
end
<|start_filename|>lib/code_corps_web/plugs/data_to_attributes.ex<|end_filename|>
defmodule CodeCorpsWeb.Plug.DataToAttributes do
@moduledoc ~S"""
Converts params in the JSON api format into flat params convient for
changeset casting.
For base parameters, this is done using `JaSerializer.Params.to_attributes/1`
For included records, this is done using custom code.
"""
alias Plug.Conn
@spec init(Keyword.t) :: Keyword.t
def init(opts), do: opts
@spec call(Conn.t, Keyword.t) :: Plug.Conn.t
def call(%Conn{params: %{} = params} = conn, opts \\ []) do
attributes =
params
|> Map.delete("data")
|> Map.delete("included")
|> Map.merge(params |> parse_data())
|> Map.merge(params |> parse_included(opts))
conn |> Map.put(:params, attributes)
end
@spec parse_data(map) :: map
defp parse_data(%{"data" => data}), do: data |> JaSerializer.Params.to_attributes
defp parse_data(%{}), do: %{}
@spec parse_included(map, Keyword.t) :: map
defp parse_included(%{"included" => included}, opts) do
included |> Enum.reduce(%{}, fn (%{"data" => %{"type" => type}} = params, parsed) ->
attributes = params |> parse_data()
if opts |> Keyword.get(:includes_many, []) |> Enum.member?(type) do
# this is an explicitly specified has_many,
# update existing data by adding new record
pluralized_type = type |> Inflex.pluralize
parsed |> Map.update(pluralized_type, [attributes], fn data ->
data ++ [attributes]
end)
else
# this is a belongs to, put a new submap into payload
parsed |> Map.put(type, attributes)
end
end)
end
defp parse_included(%{}, _opts), do: %{}
end
<|start_filename|>lib/code_corps/helpers/query.ex<|end_filename|>
defmodule CodeCorps.Helpers.Query do
import CodeCorps.Helpers.String, only: [coalesce_id_string: 1]
import Ecto.Query, only: [where: 3, limit: 2, order_by: 2]
@spec id_filter(Ecto.Queryable.t, map | String.t) :: Ecto.Queryable.t
def id_filter(query, %{"filter" => %{"id" => id_csv}}) do
query |> id_filter(id_csv)
end
def id_filter(query, %{}), do: query
def id_filter(query, id_list) when is_binary(id_list) do
ids = id_list |> coalesce_id_string
query |> where([object], object.id in ^ids)
end
# skill queries
def limit_filter(query, %{"limit" => count}), do: query |> add_limit(count |> Integer.parse)
def limit_filter(query, _), do: query
defp add_limit(query, {count, _rem}), do: query |> limit(^count)
defp add_limit(query, _other), do: query
def title_filter(query, %{"query" => title}) do
query |> where([object], ilike(object.title, ^"%#{title}%"))
end
def title_filter(query, _), do: query
# end skill queries
# task queries
def project_filter(query, %{"project_id" => project_id}) do
query |> where([object], object.project_id == ^project_id)
end
def project_filter(query, _), do: query
# end task queries
# user queries
def user_filter(query, %{"query" => query_string}) do
query
|> where(
[object],
ilike(object.first_name, ^"%#{query_string}%") or
ilike(object.last_name, ^"%#{query_string}%") or
ilike(object.username, ^"%#{query_string}%")
)
end
def user_filter(query, _), do: query
# end user queries
# sorting
def sort_by_order(query), do: query |> order_by([asc: :order])
# end sorting
# finders
def slug_finder(query, slug) do
query |> CodeCorps.Repo.get_by(slug: slug |> String.downcase)
end
# end finders
@doc ~S"""
Applies optional filters by key-value to query dynamically.
Used by piping a queryable with a map of parameters and a list of keys to
filter by.
For each key in the list, the params map has a value for that key,
the query condition for that `{key, value}` is applied to the queriable.
"""
@spec optional_filters(Ecto.Queryable.t, map, list) :: Ecto.Queryable.t
def optional_filters(query, %{} = params, [key | other_keys]) do
case params |> Map.get(key |> Atom.to_string) do
nil -> query |> optional_filters(params, other_keys)
value -> query |> where([o], field(o, ^key) == ^value)
end
end
def optional_filters(query, %{} = _params, []), do: query
end
<|start_filename|>lib/code_corps/tasks/query.ex<|end_filename|>
defmodule CodeCorps.Tasks.Query do
@moduledoc ~S"""
Holds queries used to retrieve a list of, or a single `Task` record from the
database, using a provided map of parameters/filters.
"""
import Ecto.Query
import ScoutApm.Tracing
alias CodeCorps.{Helpers, Project, Task, Repo}
alias Ecto.Queryable
@doc ~S"""
Returns a list of `Task` records, filtered by a map of parameters.
Accepted parameters are a `project_id`, or a list of comma separated
`task_list_ids`, combined with a `status`.
The records are returned ordered by the `:order` field, ascending.
"""
@spec list(map) :: list(Project.t)
def list(%{} = params) do
timing("Tasks.Query", "list") do
Task
|> Helpers.Query.id_filter(params)
|> apply_archived_status(params)
|> apply_status(params)
|> apply_optional_filters(params)
|> order_by([asc: :order])
|> Repo.all()
end
end
@spec apply_optional_filters(Queryable.t, map) :: Queryable.t
defp apply_optional_filters(query, %{"filter" => %{} = params}) do
query |> apply_optional_filters(params)
end
defp apply_optional_filters(query, %{"project_id" => project_id} = params) do
query
|> where(project_id: ^project_id)
|> apply_optional_filters(params |> Map.delete("project_id"))
end
defp apply_optional_filters(query, %{"task_list_ids" => task_list_ids} = params) do
task_list_ids = task_list_ids |> Helpers.String.coalesce_id_string
query
|> where([r], r.task_list_id in ^task_list_ids)
|> apply_optional_filters(params |> Map.delete("task_list_ids"))
end
defp apply_optional_filters(query, %{}), do: query
@spec apply_archived_status(Queryable.t, map) :: Queryable.t
defp apply_archived_status(query, %{"archived" => archived}) do
query
|> where(archived: ^archived)
end
defp apply_archived_status(query, %{}) do
query
|> where(archived: false)
end
@spec apply_status(Queryable.t, map) :: Queryable.t
defp apply_status(query, %{"status" => status}) do
query
|> where(status: ^status)
end
defp apply_status(query, %{}), do: query
@doc ~S"""
Returns a `Task` record retrived using a set of parameters.
This set can be
- a combination of `project_id` and `number`
- a combination of `task_list_id` and `number`
- an `id`
"""
@spec find(map) :: Queryable.t
def find(%{"project_id" => project_id, "number" => number}) do
Task |> Repo.get_by(project_id: project_id, number: number)
end
def find(%{"task_list_id" => task_list_id, "number" => number}) do
Task |> Repo.get_by(task_list_id: task_list_id, number: number)
end
def find(%{"id" => id}) do
Task |> Repo.get(id)
end
end
<|start_filename|>test/lib/code_corps/model/comment_test.exs<|end_filename|>
defmodule CodeCorps.CommentTest do
@moduledoc false
use CodeCorps.ModelCase
alias CodeCorps.Comment
alias Ecto.Changeset
@valid_attrs %{markdown: "I love elixir!", state: "published"}
@invalid_attrs %{}
describe "changeset/2" do
test "with valid attributes" do
changeset = Comment.changeset(%Comment{}, @valid_attrs)
assert changeset.valid?
end
end
describe "create_changeset/2" do
test "with valid attributes" do
attrs =
@valid_attrs
|> Map.put(:task_id, 1)
|> Map.put(:user_id, 1)
changeset = Comment.create_changeset(%Comment{}, attrs)
assert changeset.valid?
end
test "with invalid attributes" do
changeset = Comment.create_changeset(%Comment{}, @invalid_attrs)
refute changeset.valid?
end
test "sets created_at and modified_at to the same time" do
task = insert(:task)
user = insert(:user)
changes = Map.merge(@valid_attrs, %{
task_id: task.id,
user_id: user.id
})
changeset = Comment.create_changeset(%Comment{}, changes)
assert changeset.valid?
{:ok, %Comment{created_at: created_at, modified_at: modified_at}} = Repo.insert(changeset)
assert created_at == modified_at
end
test "sets modified_from to 'code_corps'" do
assert(
%Comment{}
|> Comment.create_changeset(%{})
|> Changeset.get_field(:modified_from) == "code_corps"
)
end
end
describe "update_changeset/2" do
test "sets modified_at to the new time" do
comment = insert(:comment)
changeset = Comment.update_changeset(comment, %{})
assert comment.modified_at < changeset.changes[:modified_at]
end
test "sets modified_from to 'code_corps'" do
assert(
:comment
|> insert(modified_from: "github")
|> Comment.update_changeset(%{})
|> Changeset.get_field(:modified_from) == "code_corps"
)
end
end
end
<|start_filename|>lib/code_corps/model/github_user.ex<|end_filename|>
defmodule CodeCorps.GithubUser do
use Ecto.Schema
@type t :: %__MODULE__{}
schema "github_users" do
field :avatar_url, :string
field :email, :string
field :github_id, :integer
field :type, :string
field :username, :string
has_one :user, CodeCorps.User
timestamps()
end
end
<|start_filename|>lib/code_corps_web/views/stripe_platform_customer_view.ex<|end_filename|>
defmodule CodeCorpsWeb.StripePlatformCustomerView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:created, :currency, :delinquent, :email, :id_from_stripe, :inserted_at, :updated_at]
has_one :user, type: "user", field: :user_id
@doc """
Returns the email or an empty string, depending on the stripe_platform_customer record
being rendered is the authenticated user's record, or some other user's.
Users can only see their own emails. Everyone else's are private.
"""
def email(stripe_platform_customer, %Plug.Conn{assigns: %{current_user: current_user}}) do
if stripe_platform_customer.user == current_user, do: stripe_platform_customer.email, else: ""
end
def email(_stripe_platform_customer, _conn), do: ""
@doc """
Returns the id_from_stripe or an empty string, depending on the stripe_platform_customer record
being rendered is the authenticated user's record, or some other user's.
Users can only see their own stripe ids. Everyone else's are private.
"""
def id_from_stripe(stripe_platform_customer, %Plug.Conn{assigns: %{current_user: current_user}}) do
if stripe_platform_customer.user_id == current_user.id, do: stripe_platform_customer.id_from_stripe, else: ""
end
def id_from_stripe(_stripe_platform_customer, _conn), do: ""
end
<|start_filename|>test/lib/code_corps/github/sync/github_repo/github_repo_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubRepoTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GithubAppInstallation,
GithubRepo,
GitHub.Sync,
Repo
}
# from fixture
@installation_repositories load_endpoint_fixture("installation_repositories")
@app_github_id 2
describe "sync_installation/1" do
test "syncs repos by performing a diff using payload as master list" do
installation = insert(:github_app_installation, github_id: @app_github_id, state: "initiated_on_code_corps")
%{
"repositories" => [
%{"id" => matched_repo_github_id},
%{"id" => new_repo_github_id}
]
} = @installation_repositories
unmatched_repo = insert(:github_repo, github_app_installation: installation)
_matched_repo = insert(:github_repo, github_app_installation: installation, github_id: matched_repo_github_id)
{:ok, {synced_repos, deleted_repos}} =
installation |> Sync.GithubRepo.sync_installation()
assert synced_repos |> Enum.count == 2
assert deleted_repos |> Enum.count == 1
%GithubAppInstallation{state: end_state} =
Repo.get(GithubAppInstallation, installation.id)
assert end_state == "processed"
# unmatched repo was on record, but not in the payload, so it got deleted
refute Repo.get(GithubRepo, unmatched_repo.id)
# matched repo was both on record and in the payload, so it got updated
assert Repo.get_by(GithubRepo, github_id: matched_repo_github_id)
# new_repo was not on record, but was in the payload, so it got created
assert Repo.get_by(GithubRepo, github_id: new_repo_github_id)
# ensure no other repos have been created
assert GithubRepo |> Repo.aggregate(:count, :id) == 2
end
defmodule InvalidRepoRequest do
def request(:get, "https://api.github.com/installation/repositories", _, _, _) do
good_payload = "installation_repositories" |> load_endpoint_fixture()
%{"repositories" => [repo_1, repo_2]} = good_payload
bad_repo_1 = repo_1 |> Map.put("name", nil)
bad_payload =
good_payload |> Map.put("repositories", [bad_repo_1, repo_2])
{:ok, body} = bad_payload |> Poison.encode
{:ok, %HTTPoison.Response{status_code: 200, body: body}}
end
def request(method, endpoint, body, headers, options) do
CodeCorps.GitHub.SuccessAPI.request(method, endpoint, body, headers, options)
end
end
test "fails if there are repo validation erorrs" do
installation = insert(:github_app_installation, github_id: @app_github_id, state: "initiated_on_code_corps")
with_mock_api(InvalidRepoRequest) do
{:error, {:sync, {repos, changesets}}} =
installation |> Sync.GithubRepo.sync_installation()
assert repos |> Enum.count == 1
assert changesets |> Enum.count == 1
end
end
end
end
<|start_filename|>lib/code_corps/policy/project_skill.ex<|end_filename|>
defmodule CodeCorps.Policy.ProjectSkill do
import CodeCorps.Policy.Helpers, only: [get_project: 1, administered_by?: 2]
alias CodeCorps.{ProjectSkill, User}
@spec create?(User.t, map) :: boolean
def create?(%User{} = user, %{} = params) do
params |> get_project |> administered_by?(user)
end
@spec delete?(User.t, ProjectSkill.t) :: boolean
def delete?(%User{} = user, %ProjectSkill{} = project_skill) do
project_skill |> get_project |> administered_by?(user)
end
end
<|start_filename|>lib/code_corps_web/controllers/slugged_route_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.SluggedRouteController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{SluggedRoute, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"slug" => slug}) do
with %SluggedRoute{} = slugged_route <- SluggedRoute |> Query.slug_finder(slug) do
conn |> render("show.json-api", data: slugged_route)
end
end
end
<|start_filename|>lib/code_corps/sentry/sentry.ex<|end_filename|>
defmodule CodeCorps.Sentry do
@sentry Application.get_env(:code_corps, :sentry)
def capture_exception(exception, opts \\ []) do
@sentry.capture_exception(exception, opts)
end
end
<|start_filename|>lib/code_corps/messages/conversation_parts.ex<|end_filename|>
defmodule CodeCorps.Messages.ConversationParts do
@moduledoc ~S"""
An individual part of a conversation in a `CodeCorps.Conversation` thread,
i.e. a reply to the `CodeCorps.Conversation` by any participant.
"""
import Ecto.Changeset, only: [
assoc_constraint: 2,
cast: 3,
validate_required: 2,
validate_inclusion: 3
]
alias CodeCorps.{
Conversation,
ConversationPart,
Messages,
Repo
}
alias CodeCorpsWeb.ConversationChannel
alias Ecto.{
Changeset,
Multi
}
@spec create(map) :: {:ok, ConversationPart.t} | {:error, Changeset.t}
def create(%{"conversation_id" => id} = attrs) do
with %Conversation{} = conversation <- Repo.get(Conversation, id),
{:ok, %ConversationPart{} = conversation_part} <- do_create(attrs, conversation) do
ConversationChannel.broadcast_new_conversation_part(conversation_part)
{:ok, conversation_part}
end
end
@spec do_create(map, Conversation.t) :: {:ok, Conversation.t} | {:error, Changeset.t}
defp do_create(attrs, conversation) do
Multi.new
|> Multi.insert(:conversation_part, create_changeset(%ConversationPart{}, attrs))
|> Multi.update(:conversation, Messages.Conversations.part_added_changeset(conversation))
|> Repo.transaction()
|> marshall_result()
end
@spec marshall_result(tuple) :: {:ok, ConversationPart.t} | {:error, Changeset.t}
defp marshall_result({:ok, %{conversation_part: %ConversationPart{} = conversation_part}}), do: {:ok, conversation_part}
defp marshall_result({:error, :conversation_part, %Changeset{} = changeset, _steps}), do: {:error, changeset}
defp marshall_result({:error, :conversation, %Changeset{} = changeset, _steps}), do: {:error, changeset}
@doc false
@spec create_changeset(ConversationPart.t, map) :: Ecto.Changeset.t
def create_changeset(%ConversationPart{} = conversation_part, attrs) do
conversation_part
|> cast(attrs, [:author_id, :body, :conversation_id, :part_type])
|> validate_required([:author_id, :body, :conversation_id])
|> validate_inclusion(:part_type, part_types())
|> assoc_constraint(:author)
|> assoc_constraint(:conversation)
end
defp part_types do
~w{ closed comment note reopened }
end
end
<|start_filename|>test/lib/code_corps/stripe_service/adapters/stripe_connect_account_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripeConnectAccountTest do
use ExUnit.Case, async: true
alias CodeCorps.StripeService.Adapters.StripeConnectAccountAdapter
defp test_account() do
# If a `Stripe.Account` has multiple `Stripe.ExternalAccount` records, we
# want the adapter to deal with that by only taking one, so we load the
# appropriate fixture
"account_with_multiple_external_accounts"
|> CodeCorps.StripeTesting.Account.load_fixture
end
@local_map %{
"id_from_stripe" => "account_with_multiple_external_accounts",
"business_name" => "Some Company Inc.",
"business_url" => "somecompany.org",
"charges_enabled" => false,
"country" => "US",
"default_currency" => "usd",
"details_submitted" => false,
"display_name" => "<NAME>",
"email" => "<EMAIL>",
"external_account" => "ba_222222222222222222222222",
"legal_entity_address_city" => nil,
"legal_entity_address_country" => "US",
"legal_entity_address_line1" => nil,
"legal_entity_address_line2" => nil,
"legal_entity_address_postal_code" => nil,
"legal_entity_address_state" => nil,
"legal_entity_business_name" => "Some Company Inc.",
"legal_entity_business_tax_id" => nil,
"legal_entity_business_tax_id_provided" => false,
"legal_entity_business_vat_id" => nil,
"legal_entity_business_vat_id_provided" => false,
"legal_entity_dob_day" => nil,
"legal_entity_dob_month" => nil,
"legal_entity_dob_year" => nil,
"legal_entity_first_name" => "John",
"legal_entity_gender" => nil,
"legal_entity_last_name" => "Doe",
"legal_entity_maiden_name" => nil,
"legal_entity_personal_address_city" => nil,
"legal_entity_personal_address_country" => "US",
"legal_entity_personal_address_line2" => nil,
"legal_entity_personal_address_line1" => nil,
"legal_entity_personal_address_postal_code" => nil,
"legal_entity_personal_address_state" => nil,
"legal_entity_personal_id_number" => nil,
"legal_entity_personal_id_number_provided" => false,
"legal_entity_phone_number" => nil,
"legal_entity_ssn_last_4" => nil,
"legal_entity_ssn_last_4_provided" => false,
"legal_entity_type" => "sole_prop",
"legal_entity_verification_details" => nil,
"legal_entity_verification_details_code" => "failed_other",
"legal_entity_verification_document" => "fil_12345",
"legal_entity_verification_status" => "unverified",
"payouts_enabled" => false,
"support_email" => nil,
"support_phone" => "1234567890",
"support_url" => nil,
"tos_acceptance_date" => nil,
"tos_acceptance_ip" => nil,
"tos_acceptance_user_agent" => nil,
"type" => "custom",
"verification_disabled_reason" => "fields_needed",
"verification_due_by" => nil,
"verification_fields_needed" => [
"business_url",
"external_account",
"tos_acceptance.date",
"tos_acceptance.ip"
]
}
describe "to_params/2" do
test "converts from stripe map to local properly" do
test_attributes = %{"organization_id" => 123, "foo" => "bar"}
expected_attributes = %{"organization_id" => 123}
{:ok, result} = StripeConnectAccountAdapter.to_params(test_account(), test_attributes)
expected_map = Map.merge(@local_map, expected_attributes)
assert result == expected_map
end
end
describe "from_params/1" do
test "converts from local to stripe map properly" do
# add some junk data to ensure that gets removed
test_input = Map.merge(@local_map, %{"organization_id" => 123, "foo" => "bar"})
{:ok, result} = StripeConnectAccountAdapter.from_params(test_input)
assert result == %{
business_name: "Some Company Inc.",
business_url: "somecompany.org",
charges_enabled: false,
country: "US",
default_currency: "usd",
details_submitted: false,
display_name: "<NAME>",
email: "<EMAIL>",
id: "account_with_multiple_external_accounts",
support_phone: "1234567890",
payouts_enabled: false,
type: "custom",
legal_entity: %{
business_name: "Some Company Inc.",
business_tax_id_provided: false,
business_vat_id_provided: false,
first_name: "John",
last_name: "Doe",
personal_id_number_provided: false,
ssn_last_4_provided: false,
type: "sole_prop",
address: %{country: "US"},
personal_address: %{country: "US"},
verification: %{details_code: "failed_other", document: "fil_12345", status: "unverified"}
},
verification: %{
disabled_reason: "fields_needed",
fields_needed: ["business_url", "external_account", "tos_acceptance.date", "tos_acceptance.ip"]
},
external_account: "ba_222222222222222222222222"
}
end
end
end
<|start_filename|>lib/code_corps/model/github_app_installation.ex<|end_filename|>
defmodule CodeCorps.GithubAppInstallation do
@moduledoc ~S"""
Represents an installation of the CodeCorps app to a user or an organization on GitHub.
"""
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "github_app_installations" do
field :access_token, :string
field :access_token_expires_at, :utc_datetime
field :github_account_avatar_url, :string
field :github_account_id, :integer
field :github_account_login, :string
field :github_account_type, :string
field :github_id, :integer
field :installed, :boolean
field :origin, :string, default: "codecorps" # "codecorps" or "github"
field :sender_github_id, :integer
# "unprocessed", "processing", "processed" or "errored"
field :state, :string, default: "unprocessed"
belongs_to :project, CodeCorps.Project # The originating project
belongs_to :user, CodeCorps.User
has_many :github_repos, CodeCorps.GithubRepo
has_many :organization_github_app_installations, CodeCorps.OrganizationGithubAppInstallation
timestamps()
end
@doc ~S"""
Changeset used to create a GithubAppInstallation record from CodeCorps
"""
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:project_id, :user_id])
|> validate_required([:project_id, :user_id])
|> assoc_constraint(:project)
|> assoc_constraint(:user)
|> put_change(:origin, "codecorps")
|> put_change(:state, "unprocessed")
end
@doc ~S"""
Changeset used to refresh an access token for a GithubAppInstallation
"""
def access_token_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:access_token, :access_token_expires_at])
|> validate_required([:access_token, :access_token_expires_at])
end
end
<|start_filename|>lib/code_corps_web/views/role_view.ex<|end_filename|>
defmodule CodeCorpsWeb.RoleView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:name, :ability, :kind, :inserted_at, :updated_at]
has_many :role_skills, serializer: CodeCorpsWeb.RoleSkillView, identifiers: :always
end
<|start_filename|>lib/code_corps/analytics/segment_traits_builder.ex<|end_filename|>
defmodule CodeCorps.Analytics.SegmentTraitsBuilder do
@moduledoc """
Builds Segment traits from provided data
"""
alias CodeCorps.Repo
@spec build(struct | map) :: map
def build(record), do: traits(record)
@spec traits(struct | map) :: map
defp traits(%CodeCorps.Comment{} = comment) do
comment = comment |> Repo.preload(:task)
%{
comment_id: comment.id,
task: comment.task.title,
task_id: comment.task.id,
project_id: comment.task.project_id
}
end
defp traits(%CodeCorps.DonationGoal{} = donation_goal) do
%{
amount: donation_goal.amount,
current: donation_goal.current,
project_id: donation_goal.project_id
}
end
defp traits(%CodeCorps.GithubAppInstallation{} = installation) do
%{
access_token_expires_at: installation.access_token_expires_at,
github_account_login: installation.github_account_login,
github_account_type: installation.github_account_type,
github_id: installation.github_id,
origin: installation.origin,
state: installation.state,
project_id: installation.project_id,
user_id: installation.user_id
}
end
defp traits(%CodeCorps.GithubRepo{} = record) do
project_title =
record
|> Repo.preload([:project])
|> Map.get(:project)
|> (&(&1 || %{})).()
|> Map.get(:title, "")
%{
id: record.id,
github_account_login: record.github_account_login,
github_account_type: record.github_account_type,
github_id: record.github_id,
github_repo_name: record.name,
project: project_title,
project_id: record.project_id
}
end
defp traits(%CodeCorps.Project{} = record) do
record = record |> Repo.preload([:organization])
%{
id: record.id,
approval_requested: record.approval_requested,
approved: record.approved,
description: record.description,
slug: record.slug,
title: record.title,
total_monthly_donated: record.total_monthly_donated,
website: record.website
}
end
defp traits(%CodeCorps.ProjectSkill{} = record) do
record = record |> Repo.preload([:project, :skill])
%{
skill: record.skill.title,
skill_id: record.skill_id,
project: record.project.title,
project_id: record.project_id
}
end
defp traits(%CodeCorps.ProjectUser{} = record) do
record = record |> Repo.preload([:project, :user])
%{
project: record.project.title,
project_id: record.project_id,
member: record.user.username,
member_id: record.user.id
}
end
defp traits(%CodeCorps.StripeConnectAccount{} = account) do
%{
id: account.id,
business_name: account.business_name,
display_name: account.display_name,
email: account.email,
id_from_stripe: account.id_from_stripe,
organization_id: account.organization_id,
}
end
defp traits(%CodeCorps.StripeConnectCharge{} = charge) do
# NOTE: this only works for some currencies
revenue = charge.amount / 100
currency = String.capitalize(charge.currency) # ISO 4127 format
%{
charge_id: charge.id,
currency: currency,
revenue: revenue,
user_id: charge.user_id
}
end
defp traits(%CodeCorps.StripeConnectPlan{} = plan) do
%{
id: plan.id,
amount: plan.amount,
created: plan.created,
id_from_stripe: plan.id_from_stripe,
name: plan.name,
project_id: plan.project_id
}
end
defp traits(%CodeCorps.StripeConnectSubscription{} = subscription) do
subscription = subscription |> Repo.preload(:stripe_connect_plan)
%{
id: subscription.id,
created: subscription.created,
cancelled_at: subscription.cancelled_at,
current_period_start: subscription.current_period_start,
current_period_end: subscription.current_period_end,
ended_at: subscription.ended_at,
id_from_stripe: subscription.id_from_stripe,
quantity: subscription.quantity,
status: subscription.status,
start: subscription.start,
plan_id: subscription.stripe_connect_plan_id,
user_id: subscription.user_id,
project_id: subscription.stripe_connect_plan.project_id
}
end
defp traits(%CodeCorps.StripePlatformCard{} = card) do
%{
id: card.id,
brand: card.brand,
exp_month: card.exp_month,
exp_year: card.exp_year,
id_from_stripe: card.id_from_stripe,
last4: card.last4,
name: card.name,
user_id: card.user_id
}
end
defp traits(%CodeCorps.StripePlatformCustomer{} = customer) do
%{
id: customer.id,
created: customer.created,
currency: customer.currency,
delinquent: customer.delinquent,
email: customer.email,
id_from_stripe: customer.id_from_stripe,
user_id: customer.user_id
}
end
defp traits(%CodeCorps.Task{} = task) do
%{
order: task.order,
task: task.title,
task_id: task.id,
task_list_id: task.task_list_id,
project_id: task.project_id
}
end
defp traits(%CodeCorps.TaskSkill{} = task_skill) do
task_skill = task_skill |> Repo.preload([:skill, :task])
%{
skill: task_skill.skill.title,
skill_id: task_skill.skill.id,
task: task_skill.task.title
}
end
defp traits(%CodeCorps.User{} = user) do
%{
admin: user.admin,
biography: user.biography,
created_at: user.inserted_at,
email: user.email,
first_name: user.first_name,
github_id: user.github_id,
github_username: user.github_username,
last_name: user.last_name,
sign_up_context: user.sign_up_context,
state: user.state,
twitter: user.twitter,
type: user.type,
username: user.username,
website: user.website
}
end
defp traits(%CodeCorps.UserCategory{} = user_category) do
user_category = user_category |> Repo.preload(:category)
%{
category: user_category.category.name,
category_id: user_category.category.id
}
end
defp traits(%CodeCorps.UserRole{} = user_role) do
user_role = user_role |> Repo.preload(:role)
%{
role: user_role.role.name,
role_id: user_role.role.id
}
end
defp traits(%CodeCorps.UserSkill{} = user_skill) do
user_skill = user_skill |> Repo.preload(:skill)
%{
skill: user_skill.skill.title,
skill_id: user_skill.skill.id
}
end
defp traits(%CodeCorps.UserTask{} = user_task) do
user_task = user_task |> Repo.preload(:task)
%{
task: user_task.task.title,
task_id: user_task.task_id
}
end
defp traits(%{token: _, user_id: _}), do: %{}
defp traits(%{acceptor: user, project_user: project_user}) do
project_user
|> traits()
|> Map.merge(%{acceptor_id: user.id, acceptor: user.username})
end
end
<|start_filename|>test/lib/code_corps/tasks/tasks_test.exs<|end_filename|>
defmodule CodeCorps.TasksTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{GithubIssue, Repo, Task, Tasks}
@base_attrs %{
"title" => "Test task",
"markdown" => "A test task",
"status" => "open"
}
@issue_payload load_endpoint_fixture("issue")
defp valid_attrs() do
project = insert(:project)
task_list = insert(:task_list, project: project, inbox: true)
user = insert(:user)
@base_attrs
|> Map.put("project_id", project.id)
|> Map.put("task_list_id", task_list.id)
|> Map.put("user_id", user.id)
end
describe "create_task/2" do
test "creates task" do
{:ok, task} = valid_attrs() |> Tasks.create_task
assert task.title == @base_attrs["title"]
assert task.markdown == @base_attrs["markdown"]
assert task.body
assert task.status == "open"
refute task.github_issue_id
refute task.github_repo_id
refute_received({:post, "https://api.github.com/" <> _rest, _body, _headers, _options})
end
test "sets modified_from to 'code_corps'" do
{:ok, task} = valid_attrs() |> Tasks.create_task
assert task.modified_from == "code_corps"
end
test "returns errored changeset if attributes are invalid" do
{:error, changeset} = Tasks.create_task(@base_attrs)
refute changeset.valid?
refute Repo.one(Task)
refute_received({:post, "https://api.github.com/" <> _rest, _body, _headers, _options})
end
test "if task is assigned a github repo, creates github issue on assigned repo" do
attrs = valid_attrs()
project = Repo.one(CodeCorps.Project)
github_repo =
:github_repo
|> insert(github_account_login: "foo", name: "bar")
insert(:github_repo, project: project)
{:ok, task} =
attrs
|> Map.put("github_repo_id", github_repo.id)
|> Tasks.create_task
assert task.title == @base_attrs["title"]
assert task.markdown == @base_attrs["markdown"]
assert task.body
assert task.status == "open"
assert task.github_issue_id
assert task.github_repo_id == github_repo.id
assert_received({:post, "https://api.github.com/repos/foo/bar/issues", _body, _headers, _options})
end
test "if github process fails, returns {:error, :github}" do
attrs = valid_attrs()
project = Repo.one(CodeCorps.Project)
github_repo =
:github_repo
|> insert(github_account_login: "foo", name: "bar")
insert(:github_repo, project: project)
with_mock_api(CodeCorps.GitHub.FailureAPI) do
assert {:error, :github} ==
attrs
|> Map.put("github_repo_id", github_repo.id)
|> Tasks.create_task
end
refute Repo.one(Task)
assert_received({:post, "https://api.github.com/repos/foo/bar/issues", _body, _headers, _options})
end
end
describe "update_task/2" do
@update_attrs %{"title" => "foo", "markdown" => "bar", "status" => "closed"}
test "updates task" do
task = insert(:task)
{:ok, updated_task} = task |> Tasks.update_task(@update_attrs)
assert updated_task.id == task.id
assert updated_task.title == @update_attrs["title"]
assert updated_task.markdown == @update_attrs["markdown"]
refute updated_task.body == task.body
refute task.github_issue_id
refute task.github_repo_id
refute_received({:patch, "https://api.github.com/" <> _rest, _body, _headers, _options})
end
test "sets modified_from to 'code_corps'" do
task = insert(:task, modified_from: "github")
{:ok, updated_task} = task |> Tasks.update_task(@update_attrs)
assert updated_task.modified_from == "code_corps"
end
test "returns {:error, changeset} if there are validation errors" do
task = insert(:task)
{:error, changeset} = task |> Tasks.update_task(%{"title" => nil})
refute changeset.valid?
refute_received({:patch, "https://api.github.com/" <> _rest, _body, _headers, _options})
end
test "creates a github issue if task is just now connected to a repo" do
github_repo =
:github_repo
|> insert(github_account_login: "foo", name: "bar")
task = insert(:task)
attrs = @update_attrs |> Map.put("github_repo_id", github_repo.id)
{:ok, updated_task} = task |> Tasks.update_task(attrs)
assert updated_task.github_issue_id
assert updated_task.github_repo_id == github_repo.id
assert_received({:post, "https://api.github.com/repos/foo/bar/issues", _body, _headers, _options})
end
test "propagates changes to github if task is synced to github issue" do
github_repo =
:github_repo
|> insert(github_account_login: "foo", name: "bar")
github_issue = insert(:github_issue, number: 5)
task = insert(:task, github_repo: github_repo, github_issue: github_issue)
{:ok, updated_task} = task |> Tasks.update_task(@update_attrs)
assert updated_task.id == task.id
assert updated_task.title == @update_attrs["title"]
assert updated_task.markdown == @update_attrs["markdown"]
refute updated_task.body == task.body
assert updated_task.github_issue_id
assert updated_task.github_repo_id
assert_received({:patch, "https://api.github.com/repos/foo/bar/issues/5", _body, _headers, _options})
end
test "propagates changes to github if task is synced to github pull request" do
%{
"id" => issue_github_id,
"number" => number
} = @issue_payload
github_repo = insert(:github_repo, github_account_login: "octocat", name: "Hello-World")
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_id: issue_github_id, number: number, github_pull_request: github_pull_request, github_repo: github_repo)
task = insert(:task, github_repo: github_repo, github_issue: github_issue)
{:ok, updated_task} = task |> Tasks.update_task(@update_attrs)
assert_received({:patch, "https://api.github.com/repos/octocat/Hello-World/issues/1347", _body, _headers, _options})
assert updated_task.id == task.id
assert updated_task.title == @update_attrs["title"]
assert updated_task.markdown == @update_attrs["markdown"]
refute updated_task.body == task.body
assert updated_task.github_issue_id == github_issue.id
assert updated_task.github_repo_id == github_repo.id
updated_github_issue = Repo.one(GithubIssue)
assert updated_github_issue.github_pull_request_id == github_pull_request.id
end
test "reports {:error, :github}, makes no changes at all if there is a github api error" do
github_repo =
:github_repo
|> insert(github_account_login: "foo", name: "bar")
github_issue = insert(:github_issue, number: 5)
task = insert(:task, github_repo: github_repo, github_issue: github_issue)
with_mock_api(CodeCorps.GitHub.FailureAPI) do
assert {:error, :github} == task |> Tasks.update_task(@update_attrs)
end
updated_task = Repo.one(Task)
assert updated_task.id == task.id
assert updated_task.title == task.title
assert updated_task.markdown == task.markdown
assert updated_task.body == task.body
assert updated_task.github_issue_id == task.github_issue_id
assert updated_task.github_repo_id == task.github_repo_id
assert_received({:patch, "https://api.github.com/repos/foo/bar/issues/5", _body, _headers, _options})
end
end
end
<|start_filename|>lib/code_corps_web/views/stripe_connect_account_view.ex<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectAccountView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
alias CodeCorps.StripeConnectAccount
def attributes(record, _conn), do: %{
bank_account_bank_name: record |> bank_account_bank_name,
bank_account_last4: record |> bank_account_last4,
bank_account_routing_number: record |> bank_account_routing_number,
bank_account_status: record |> bank_account_status,
business_name: record.business_name,
business_url: record.business_url,
can_accept_donations: record |> can_accept_donations,
charges_enabled: record.charges_enabled,
country: record.country,
default_currency: record.default_currency,
details_submitted: record.details_submitted,
display_name: record.display_name,
email: record.email,
id_from_stripe: record.id_from_stripe,
inserted_at: record.inserted_at,
legal_entity_address_city: record.legal_entity_address_city,
legal_entity_address_country: record.legal_entity_address_country,
legal_entity_address_line1: record.legal_entity_address_line1,
legal_entity_address_line2: record.legal_entity_address_line2,
legal_entity_address_postal_code: record.legal_entity_address_postal_code,
legal_entity_address_state: record.legal_entity_address_state,
legal_entity_business_name: record.legal_entity_business_name,
legal_entity_business_tax_id: record.legal_entity_business_tax_id,
legal_entity_business_tax_id_provided: record.legal_entity_business_tax_id_provided,
legal_entity_business_vat_id: record.legal_entity_business_vat_id,
legal_entity_business_vat_id_provided: record.legal_entity_business_vat_id_provided,
legal_entity_dob_day: record.legal_entity_dob_day,
legal_entity_dob_month: record.legal_entity_dob_month,
legal_entity_dob_year: record.legal_entity_dob_year,
legal_entity_first_name: record.legal_entity_first_name,
legal_entity_last_name: record.legal_entity_last_name,
legal_entity_gender: record.legal_entity_gender,
legal_entity_maiden_name: record.legal_entity_maiden_name,
legal_entity_personal_address_city: record.legal_entity_personal_address_city,
legal_entity_personal_address_country: record.legal_entity_personal_address_country,
legal_entity_personal_address_line1: record.legal_entity_personal_address_line1,
legal_entity_personal_address_line2: record.legal_entity_personal_address_line2,
legal_entity_personal_address_postal_code: record.legal_entity_personal_address_postal_code,
legal_entity_personal_address_state: record.legal_entity_personal_address_state,
legal_entity_phone_number: record.legal_entity_phone_number,
legal_entity_personal_id_number: record.legal_entity_personal_id_number,
legal_entity_personal_id_number_provided: record.legal_entity_personal_id_number_provided,
legal_entity_ssn_last_4: record.legal_entity_ssn_last_4,
legal_entity_ssn_last_4_provided: record.legal_entity_ssn_last_4_provided,
legal_entity_type: record.legal_entity_type,
legal_entity_verification_details: record.legal_entity_verification_details,
legal_entity_verification_details_code: record.legal_entity_verification_details_code,
legal_entity_verification_document: record.legal_entity_verification_document,
legal_entity_verification_status: record.legal_entity_verification_status,
payouts_enabled: record.payouts_enabled,
personal_id_number_status: record |> personal_id_number_status,
recipient_status: record |> recipient_status,
support_email: record.support_email,
support_phone: record.support_phone,
support_url: record.support_url,
type: record.type,
updated_at: record.updated_at,
verification_disabled_reason: record.verification_disabled_reason,
verification_due_by: record.verification_due_by,
verification_document_status: record |> verification_document_status,
verification_fields_needed: record.verification_fields_needed
}
has_one :organization, type: "organization", field: :organization_id
def can_accept_donations(stripe_connect_account) do
case Application.get_env(:code_corps, :stripe_env) do
:prod -> stripe_connect_account.charges_enabled
_ -> true
end
end
def bank_account_bank_name(%{stripe_external_account: nil}), do: nil
def bank_account_bank_name(%{stripe_external_account: %{bank_name: bank_name}}), do: bank_name
def bank_account_last4(%{stripe_external_account: nil}), do: nil
def bank_account_last4(%{stripe_external_account: %{last4: last4}}), do: last4
def bank_account_routing_number(%{stripe_external_account: nil}), do: nil
def bank_account_routing_number(%{stripe_external_account: %{routing_number: routing_number}}), do: routing_number
# recipient_status mapping
@doc ~S"""
Returns an inferred recipient verification status for the account, based on
the legal entity verification status and required fields for verification.
The default assumed status is "required".
If the verification status is "pending" and "legal_entity" fields are needed,
the returned status is "required".
If the veficication status
"""
@spec recipient_status(StripeConnectAccount.t) :: String.t
def recipient_status(%StripeConnectAccount{
legal_entity_verification_status: "pending",
verification_fields_needed: needed_fields}) do
case needed_fields |> includes_field_from?("legal_entity") do
true -> "required"
false -> "verified"
end
end
def recipient_status(%StripeConnectAccount{legal_entity_verification_status: "verified"}), do: "verified"
def recipient_status(_), do: "required"
# https://stripe.com/docs/api#account_object-verification-fields_needed
# Check if the list of required fields includes any fields from the specified
# group.
# Required fields are listed as an array, nested in groups using `.`, example:
# `group_a.field_a`, `group_a.field_b`, `group_b.field_a`, etc.
@spec includes_field_from?(list, String.t) :: boolean
def includes_field_from?(fields, field_group) do
fields
|> Enum.map(&String.split(&1, "."))
|> Enum.map(&List.first/1)
|> Enum.member?(field_group)
end
@doc ~S"""
Returns the inferred verification document status, based on verification
fields needed, the verification status, and the document field itself:
- If status is already verified, returns verified
- If there is no document and fields needed include the document,
returns `required`
- If there is no document and fields needed do not include the document,
returns `pending_requirement`
- If there is a document and verification status is pending,
returns 'verifying'
- If there is a document and fields needed include the document, status is
`errored`
- If there is a document and fields needed do not include the document,
status is `verified`
"""
@spec verification_document_status(StripeConnectAccount.t) :: String.t
def verification_document_status(
%StripeConnectAccount{
legal_entity_verification_status: "verified"
}), do: "verified"
def verification_document_status(%StripeConnectAccount{
legal_entity_verification_document: nil,
verification_fields_needed: fields
}) when length(fields) > 0 do
case Enum.member?(fields, "legal_entity.verification.document") do
true -> "required"
false -> "pending_requirement"
end
end
def verification_document_status(%StripeConnectAccount{
legal_entity_verification_document: _,
legal_entity_verification_status: "pending"
}), do: "verifying"
def verification_document_status(%StripeConnectAccount{
legal_entity_verification_document: _,
verification_fields_needed: fields
}) when length(fields) > 0 do
case Enum.member?(fields, "legal_entity.verification.document") do
true -> "errored"
false -> "verified"
end
end
def verification_document_status(_), do: "pending_requirement"
# personal_id_number_status
def personal_id_number_status(%StripeConnectAccount{
legal_entity_personal_id_number_provided: false,
verification_fields_needed: fields
}) when length(fields) > 0 do
case Enum.member?(fields, "legal_entity.personal_id_number") do
true -> "required"
false -> "pending_requirement"
end
end
def personal_id_number_status(%StripeConnectAccount{
legal_entity_personal_id_number_provided: true,
legal_entity_verification_status: "pending"
}), do: "verifying"
def personal_id_number_status(%StripeConnectAccount{
legal_entity_personal_id_number_provided: true
}), do: "verified"
def personal_id_number_status(_), do: "pending_requirement"
# bank_account_status
def bank_account_status(%StripeConnectAccount{
verification_fields_needed: fields
}) when length(fields) > 0 do
case Enum.member?(fields, "external_account") do
true -> "required"
false -> "pending_requirement"
end
end
def bank_account_status(%StripeConnectAccount{
external_account: external_account
}) when not is_nil(external_account), do: "verified"
def bank_account_status(_), do: "pending_requirement"
end
<|start_filename|>lib/code_corps/model/stripe_external_account.ex<|end_filename|>
defmodule CodeCorps.StripeExternalAccount do
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "stripe_external_accounts" do
field :id_from_stripe, :string, null: false
field :account_id_from_stripe, :string, null: false
field :account_holder_name, :string
field :account_holder_type, :string
field :bank_name, :string
field :country, :string
field :currency, :string
field :default_for_currency, :boolean
field :fingerprint, :string
field :last4, :string
field :routing_number, :string
field :status, :string
belongs_to :stripe_connect_account, CodeCorps.StripeConnectAccount
timestamps()
end
@create_params [
:id_from_stripe, :account_id_from_stripe, :account_holder_name, :account_holder_type, :bank_name,
:country, :currency, :default_for_currency, :fingerprint, :last4, :routing_number, :status,
:stripe_connect_account_id
]
@required_create_params [:id_from_stripe, :account_id_from_stripe]
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, @create_params)
|> validate_required(@required_create_params)
|> assoc_constraint(:stripe_connect_account)
end
end
<|start_filename|>test/lib/code_corps/policy/project_user_test.exs<|end_filename|>
defmodule CodeCorps.Policy.ProjectUserTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.ProjectUser, only: [create?: 2, update?: 3, delete?: 2]
describe "create?/2" do
test "when user is creating their own pending membership" do
user = insert(:user)
project = insert(:project)
params = %{"project_id" => project.id, "user_id" => user.id, "role" => "pending"}
assert create?(user, params)
end
test "when user is creating any other membership" do
user = insert(:user)
project = insert(:project)
params = %{"project_id" => project.id, "user_id" => user.id, "role" => "contributor"}
refute create?(user, params)
end
test "when normal user is creating someone else's membership" do
user = insert(:user)
project = insert(:project)
params = %{"project_id" => project.id, "user_id" => "someone_else"}
refute create?(user, params)
end
test "when pending user is creating someone else's membership" do
pending = insert(:user)
project = insert(:project)
insert(:project_user, role: "pending", user: pending, project: project)
params = %{"project_id" => project.id, "user_id" => "someone_else"}
refute create?(pending, params)
end
test "when contributor is creating someone else's membership" do
contributor = insert(:user)
project = insert(:project)
insert(:project_user, role: "contributor", user: contributor, project: project)
params = %{"project_id" => project.id, "user_id" => "someone_else"}
refute create?(contributor, params)
end
test "when user is admin and role is contributor" do
admin = insert(:user)
project = insert(:project)
insert(:project_user, role: "admin", user: admin, project: project)
params = %{"project_id" => project.id, "user_id" => "someone_else", "role" => "contributor"}
assert create?(admin, params)
end
test "when user is admin and role is admin" do
admin = insert(:user)
project = insert(:project)
insert(:project_user, role: "admin", user: admin, project: project)
params = %{"project_id" => project.id, "user_id" => "someone_else", "role" => "admin"}
refute create?(admin, params)
end
test "when user is owner" do
owner = insert(:user)
project = insert(:project)
insert(:project_user, role: "owner", user: owner, project: project)
params = %{"project_id" => project.id, "user_id" => "someone_else", "role" => "owner"}
assert create?(owner, params)
end
end
describe "update?/2" do
test "returns false when user is non-member" do
user = insert(:user)
project_user = insert(:project_user)
refute update?(user, project_user, %{})
end
test "returns false when user is pending" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "pending", user: user, project: project)
project_user = insert(:project_user, project: project)
refute update?(user, project_user, %{})
end
test "returns false when user is contributor" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "contributor", user: user, project: project)
project_user = insert(:project_user, project: project)
refute update?(user, project_user, %{})
end
test "returns true when user is admin, approving a pending membership" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "admin", user: user, project: project)
project_user = insert(:project_user, project: project, role: "pending")
assert update?(user, project_user, %{"role" => "contributor"})
end
test "returns false when user is admin, doing something other than approving a pending membership" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "admin", user: user, project: project)
project_user = insert(:project_user, project: project, role: "contributor")
refute update?(user, project_user, %{})
end
test "returns true when user is owner and is changing a role other than owner" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "owner", user: user, project: project)
project_user = insert(:project_user, project: project, role: "admin")
assert update?(user, project_user, %{})
end
test "returns false when user is owner and is changing another owner" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "owner", user: user, project: project)
project_user = insert(:project_user, project: project, role: "owner")
refute update?(user, project_user, %{})
end
end
describe "delete?/2" do
test "returns true when contributor is deleting their own membership" do
user = insert(:user)
project = insert(:project)
project_user = insert(:project_user, project: project, user: user, role: "contributor")
assert delete?(user, project_user)
end
test "returns true when admin is deleting a pending membership" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "admin", user: user, project: project)
project_user = insert(:project_user, project: project, role: "pending")
assert delete?(user, project_user)
end
test "returns true when admin is deleting a contributor" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "admin", user: user, project: project)
project_user = insert(:project_user, project: project, role: "contributor")
assert delete?(user, project_user)
end
test "returns false when admin is deleting another admin" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "admin", user: user, project: project)
project_user = insert(:project_user, project: project, role: "admin")
refute delete?(user, project_user)
end
test "returns false when admin is deleting an owner" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "admin", user: user, project: project)
project_user = insert(:project_user, project: project, role: "owner")
refute delete?(user, project_user)
end
test "returns true when owner is deleting an admin" do
user = insert(:user)
project = insert(:project)
insert(:project_user, role: "owner", user: user, project: project)
project_user = insert(:project_user, project: project, role: "admin")
assert delete?(user, project_user)
end
end
end
<|start_filename|>lib/code_corps/github/api/headers.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.Headers do
alias CodeCorps.GitHub.API.JWT
@typep header :: {String.t, String.t}
@type t :: list(header)
@spec user_request(%{String.t => String.t} | %{}, list) :: t
def user_request(%{} = headers, options) do
headers
|> add_default_headers()
|> add_access_token_header(options)
|> Map.to_list()
end
@spec integration_request(%{String.t => String.t} | %{}) :: t
def integration_request(%{} = headers) do
headers
|> add_default_headers()
|> add_jwt_header()
|> Map.to_list()
end
@spec access_token_request :: t
def access_token_request do
%{"Accept" => "application/json", "Content-Type" => "application/json"}
|> add_default_headers()
|> Map.to_list()
end
@spec add_default_headers(%{String.t => String.t}) :: %{String.t => String.t}
defp add_default_headers(%{} = headers) do
Map.merge(%{"Accept" => "application/vnd.github.machine-man-preview+json"}, headers)
end
@spec add_access_token_header(%{String.t => String.t}, list) :: %{String.t => String.t}
defp add_access_token_header(%{} = headers, opts) do
case opts[:access_token] do
nil -> headers
token -> headers |> Map.put("Authorization", "token #{token}")
end
end
@spec add_jwt_header(%{String.t => String.t}) :: %{String.t => String.t}
defp add_jwt_header(%{} = headers) do
Map.put(headers, "Authorization", "Bearer #{JWT.generate}")
end
end
<|start_filename|>test/lib/code_corps/policy/user_test.exs<|end_filename|>
defmodule CodeCorps.Policy.UserTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.User, only: [update?: 2]
describe "update?" do
test "returns true if user is updating their own record" do
user = insert(:user)
assert update?(user, user)
end
test "returns false if user is updating someone else's record" do
[user, another_user] = insert_pair(:user)
refute update?(user, another_user)
end
end
end
<|start_filename|>test/lib/code_corps/github/sync/github_user/changeset_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.GithubUser.ChangesetTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub.Adapters,
GitHub.Sync,
GithubUser
}
alias Ecto.Changeset
describe "changeset/2" do
test "assigns correct changes" do
attrs =
"issues_opened"
|> load_event_fixture()
|> Kernel.get_in(["issue", "user"])
|> Adapters.User.to_github_user()
changeset = %GithubUser{} |> Sync.GithubUser.Changeset.changeset(attrs)
assert changeset |> Changeset.get_change(:avatar_url) == attrs.avatar_url
assert changeset |> Changeset.get_change(:email) == attrs.email
assert changeset |> Changeset.get_change(:github_id) == attrs.github_id
assert changeset |> Changeset.get_change(:username) == attrs.username
assert changeset |> Changeset.get_change(:type) == attrs.type
assert changeset.valid?
end
test "validates correct required attributes" do
changeset = %GithubUser{} |> Sync.GithubUser.Changeset.changeset(%{})
refute changeset.valid?
assert changeset.errors[:avatar_url]
assert changeset.errors[:github_id]
assert changeset.errors[:username]
assert changeset.errors[:type]
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/slugged_route_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.SluggedRouteControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :slugged_route
test "shows chosen resource", %{conn: conn} do
slug = "test-slug"
slugged_route = insert(:slugged_route, slug: slug)
conn
|> get("/#{slug}")
|> json_response(200)
|> assert_id_from_response(slugged_route.id)
end
test "is case insensitive", %{conn: conn} do
slug = "test"
insert(:slugged_route, slug: slug)
assert conn |> get("/test") |> json_response(200)
assert conn |> get("/tEst") |> json_response(200)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
<|start_filename|>test/lib/code_corps/emails/base_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.BaseEmailTest do
use CodeCorps.ModelCase
use Bamboo.Test
alias CodeCorps.Emails.BaseEmail
describe "get_name/1" do
test "get_name returns there on nil name" do
user = %CodeCorps.User{}
assert BaseEmail.get_name(user) == "there"
end
test "get_name returns first_name of user" do
user = %CodeCorps.User{first_name: "Zacck"}
assert BaseEmail.get_name(user) == "Zacck"
end
end
end
<|start_filename|>lib/code_corps_web/controllers/preview_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.PreviewController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Preview, User}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %Preview{}, params),
{:ok, %Preview{} = preview} <- %Preview{} |> Preview.create_changeset(params) |> Repo.insert do
conn |> put_status(:created) |> render("show.json-api", data: preview)
end
end
end
<|start_filename|>test/lib/code_corps_web/views/page_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.PageViewTest do
use CodeCorpsWeb.ViewCase
end
<|start_filename|>lib/code_corps/accounts/accounts.ex<|end_filename|>
defmodule CodeCorps.Accounts do
@moduledoc ~S"""
Main entry-point for managing accounts.
All actions to accounts should go through here.
"""
alias CodeCorps.{
Accounts.Changesets,
Comment,
GitHub.Adapters,
GithubAppInstallation,
GithubUser,
Processor,
Task,
User,
Repo
}
alias Ecto.{Changeset, Multi}
import Ecto.Query
@doc ~S"""
Creates a user record using attributes from a GitHub payload.
"""
@spec create_from_github(map) :: {:ok, User.t} | {:error, Changeset.t}
def create_from_github(%{} = attrs) do
with {:ok, user} <- do_create_from_github(attrs) do
user |> upload_github_photo_async
{:ok, user}
else
error -> error
end
end
@spec do_create_from_github(map) :: {:ok, User.t} | {:error, Changeset.t}
defp do_create_from_github(%{} = attrs) do
%User{}
|> Changesets.create_from_github_changeset(attrs)
|> Repo.insert
end
@doc ~S"""
Creates a user record using attributes from a GitHub payload.
"""
@spec create_from_github_user(GithubUser.t) :: {:ok, User.t} | {:error, Changeset.t}
def create_from_github_user(%GithubUser{} = github_user) do
with {:ok, user} <- do_create_from_github_user(github_user) do
user |> upload_github_photo_async
{:ok, user}
else
error -> error
end
end
@spec do_create_from_github_user(GithubUser.t) :: {:ok, User.t} | {:error, Changeset.t}
defp do_create_from_github_user(%GithubUser{} = github_user) do
%User{}
|> Changesets.create_from_github_changeset(github_user |> Adapters.User.to_user_attrs())
|> Changeset.put_assoc(:github_user, github_user)
|> Repo.insert
end
@spec update_with_github_user(User.t, GithubUser.t) :: {:ok, User.t} | {:error, Changeset.t}
def update_with_github_user(%User{} = user, %GithubUser{} = github_user) do
with {:ok, user} <- do_update_with_github_user(user, github_user) do
user |> upload_github_photo_async
{:ok, user}
else
error -> error
end
end
@spec do_update_with_github_user(User.t, GithubUser.t) :: {:ok, User.t} | {:error, Changeset.t}
defp do_update_with_github_user(%User{} = user, %GithubUser{} = github_user) do
user
|> Changesets.update_with_github_user_changeset(github_user |> Adapters.User.to_user_attrs())
|> Changeset.put_assoc(:github_user, github_user)
|> Repo.update
end
@doc ~S"""
Updates a user record using attributes from a GitHub payload along with the
access token.
"""
@spec update_from_github_oauth(User.t, map, String.t) :: {:ok, User.t} | {:error, Changeset.t}
def update_from_github_oauth(%User{} = user, %{} = params, access_token) do
params =
params
|> Adapters.User.to_user()
|> Map.put(:github_auth_token, access_token)
changeset = user |> Changesets.update_from_github_oauth_changeset(params)
multi =
Multi.new
|> Multi.run(:existing_user, fn _ -> params |> update_existing_user_if_any() end)
|> Multi.update(:user, changeset)
|> Multi.run(:installations, fn %{user: %User{} = user} -> user |> associate_installations() end)
|> Multi.run(:tasks, fn %{user: %User{} = user} -> user |> associate_tasks() end)
|> Multi.run(:comments, fn %{user: %User{} = user} -> user |> associate_comments() end)
case Repo.transaction(multi) do
{:ok, %{user: %User{} = user, installations: installations}} ->
user |> upload_github_photo_async
{:ok, user |> Map.put(:github_app_installations, installations)}
{:error, :user, %Changeset{} = changeset, _actions_done} ->
{:error, changeset}
end
end
defp update_existing_user_if_any(%{github_id: github_id}) do
case Repo.get_by(User, github_id: github_id, sign_up_context: "github") do
%User{} = existing_user -> existing_user |> do_update_existing_user()
_ -> {:ok, nil}
end
end
defp do_update_existing_user(%User{github_id: github_id} = user) do
params = %{github_id: nil, github_id_was: github_id}
user
|> Changesets.dissociate_github_user_changeset(params)
|> Repo.update()
end
@spec upload_github_photo_async(User.t) :: User.t | Processor.result
defp upload_github_photo_async(%User{cloudinary_public_id: nil} = user) do
Processor.process(fn -> upload_github_photo(user) end)
end
defp upload_github_photo_async(%User{} = user), do: user
defp upload_github_photo(%User{github_avatar_url: github_avatar_url} = user) do
{:ok, %Cloudex.UploadedImage{public_id: cloudinary_public_id}} =
github_avatar_url
|> CodeCorps.Cloudex.Uploader.upload()
user
|> Changeset.change(%{cloudinary_public_id: cloudinary_public_id})
|> Repo.update!
end
@spec associate_installations(User.t) :: {:ok, list(GithubAppInstallation.t)}
defp associate_installations(%User{id: user_id, github_id: github_id}) do
updates = [set: [user_id: user_id]]
update_options = [returning: true]
GithubAppInstallation
|> where([i], i.sender_github_id == ^github_id)
|> where([i], is_nil(i.user_id))
|> Repo.update_all(updates, update_options)
|> (fn {_count, installations} -> {:ok, installations} end).()
end
@spec associate_tasks(User.t) :: {:ok, list(Task.t)}
defp associate_tasks(%User{id: user_id, github_id: github_id}) do
updates = [set: [user_id: user_id]]
update_options = [returning: true]
existing_user_ids =
User
|> where(github_id_was: ^github_id)
|> select([u], u.id)
|> Repo.all
Task
|> where([t], t.user_id in ^existing_user_ids)
|> Repo.update_all(updates, update_options)
|> (fn {_count, tasks} -> {:ok, tasks} end).()
end
@spec associate_comments(User.t) :: {:ok, list(Comment.t)}
defp associate_comments(%User{id: user_id, github_id: github_id}) do
updates = [set: [user_id: user_id]]
update_options = [returning: true]
existing_user_ids =
User
|> where(github_id_was: ^github_id)
|> select([u], u.id)
|> Repo.all
Comment
|> where([c], c.user_id in ^existing_user_ids)
|> Repo.update_all(updates, update_options)
|> (fn {_count, comments} -> {:ok, comments} end).()
end
end
<|start_filename|>lib/code_corps_web/views/organization_invite_view.ex<|end_filename|>
defmodule CodeCorpsWeb.OrganizationInviteView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [
:email, :inserted_at, :organization_name, :updated_at
]
has_one :organization, type: "organization", field: :organization_id
end
<|start_filename|>lib/code_corps/emails/message_initiated_by_project_email.ex<|end_filename|>
defmodule CodeCorps.Emails.MessageInitiatedByProjectEmail do
import Bamboo.Email, only: [to: 2]
import Bamboo.PostmarkHelper
alias CodeCorps.{
Conversation,
Emails.BaseEmail,
Message,
Project,
User,
WebClient
}
@spec create(Message.t, Conversation.t) :: Bamboo.Email.t
def create(
%Message{project: %Project{} = project},
%Conversation{user: %User{} = user} = conversation) do
BaseEmail.create
|> to(user.email)
|> template(template_id(), %{
conversation_url: conversation |> conversation_url(),
name: user.first_name,
project_title: project.title,
subject: "You have a new message from #{project.title}"
})
end
@spec template_id :: String.t
defp template_id, do: Application.get_env(:code_corps, :postmark_message_initiated_by_project_template)
@spec conversation_url(Conversation.t) :: String.t
defp conversation_url(%Conversation{id: id}) do
WebClient.url()
|> URI.merge("conversations/#{id}")
|> URI.to_string
end
end
<|start_filename|>test/lib/code_corps/github/sync/github_comment/github_comment_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Comment.GithubCommentTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub.Adapters,
GitHub.Sync,
GithubComment,
Repo
}
@payload load_event_fixture("issue_comment_created")
describe "create_or_update_comment/1" do
test "creates comment if none exists" do
%{"comment" => attrs} = @payload
github_issue = insert(:github_issue)
{:ok, %GithubComment{} = created_comment} = Sync.GithubComment.create_or_update_comment(github_issue, attrs)
assert Repo.one(GithubComment)
created_attributes = attrs |> Adapters.Comment.to_github_comment
returned_comment = Repo.get_by(GithubComment, created_attributes)
assert returned_comment.id == created_comment.id
assert returned_comment.github_issue_id == github_issue.id
end
test "updates issue if it already exists" do
%{"comment" => %{"id" => comment_id} = attrs} = @payload
github_issue = insert(:github_issue)
github_comment = insert(:github_comment, github_id: comment_id, github_issue: github_issue)
{:ok, %GithubComment{} = updated_comment} = Sync.GithubComment.create_or_update_comment(github_issue, attrs)
assert updated_comment.id == github_comment.id
assert updated_comment.github_issue_id == github_issue.id
end
test "returns changeset if payload is somehow not as expected" do
bad_payload = @payload |> put_in(["comment", "body"], nil)
%{"comment" => attrs} = bad_payload
github_issue = insert(:github_issue)
{:error, changeset} = Sync.GithubComment.create_or_update_comment(github_issue, attrs)
refute changeset.valid?
end
end
describe "delete/1" do
test "deletes the GithubComment" do
github_comment = insert(:github_comment)
{:ok, deleted_github_comment} =
github_comment.github_id
|> Sync.GithubComment.delete()
assert Repo.aggregate(GithubComment, :count, :id) == 0
assert deleted_github_comment.id == github_comment.id
end
test "works when there are no GithubComment reocrds" do
assert Repo.aggregate(GithubComment, :count, :id) == 0
{:ok, %GithubComment{} = empty_github_comment} =
"123"
|> Sync.GithubComment.delete()
refute empty_github_comment.id
end
end
end
<|start_filename|>test/lib/code_corps_web/views/organization_invite_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.OrganizationInviteViewTest do
@moduledoc false
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
organization = insert(:organization)
organization_invite = insert(:organization_invite, organization: organization)
rendered_json = render(CodeCorpsWeb.OrganizationInviteView, "show.json-api", data: organization_invite)
expected_json = %{
"data" => %{
"id" => organization_invite.id |> Integer.to_string,
"type" => "organization-invite",
"attributes" => %{
"email" => organization_invite.email,
"inserted-at" => organization_invite.inserted_at,
"organization-name" => organization_invite.organization_name,
"updated-at" => organization_invite.updated_at
},
"relationships" => %{
"organization" => %{
"data" => %{
"id" => organization.id |> Integer.to_string,
"type" => "organization"
}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/lib/code_corps_web/controllers/task_skill_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.TaskSkillControllerTest do
@moduledoc false
use CodeCorpsWeb.ApiCase, resource_name: :task_skill
alias CodeCorps.{Repo, TaskSkill}
describe "index" do
test "lists all entries on index", %{conn: conn} do
[task_skill_1, task_skill_2] = insert_pair(:task_skill)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([task_skill_1.id, task_skill_2.id])
end
test "filters resources on index", %{conn: conn} do
[task_skill_1, task_skill_2 | _] = insert_list(3, :task_skill)
path = "task-skills/?filter[id]=#{task_skill_1.id},#{task_skill_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([task_skill_1.id, task_skill_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
skill = insert(:skill)
task = insert(:task)
task_skill = insert(:task_skill, task: task, skill: skill)
conn
|> request_show(task_skill)
|> json_response(200)
|> assert_id_from_response(task_skill.id)
end
test "renders 404 error when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
skill = insert(:skill)
attrs = %{task: task, skill: skill}
assert conn |> request_create(attrs) |> json_response(201)
end
@tag :authenticated
test "tracks event when data is valid", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
skill = insert(:skill)
user_id = current_user.id
attrs = %{task: task, skill: skill}
json = conn |> request_create(attrs) |> json_response(201)
assert json
expected_data =
TaskSkill
|> Repo.get(json["data"]["id"])
|> CodeCorps.Analytics.SegmentTraitsBuilder.build
assert_received {:track, ^user_id, "Added Task Skill", ^expected_data}
end
@tag :authenticated
test "renders 422 error when data is invalid", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
invalid_attrs = %{task: task, skill: nil}
assert conn |> request_create(invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
task = insert(:task)
skill = insert(:skill)
attrs = %{task: task, skill: skill}
assert conn |> request_create(attrs) |> json_response(403)
end
end
describe "delete" do
@tag :authenticated
test "deletes chosen resource", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
task_skill = insert(:task_skill, task: task)
assert conn |> request_delete(task_skill) |> response(204)
end
@tag :authenticated
test "tracks event", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
task_skill = insert(:task_skill, task: task)
user_id = current_user.id
expected_data =
TaskSkill
|> Repo.get(task_skill.id)
|> CodeCorps.Analytics.SegmentTraitsBuilder.build
assert conn |> request_delete(task_skill) |> response(204)
assert_received {:track, ^user_id, "Removed Task Skill", ^expected_data}
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent on delete", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>lib/code_corps_web/router.ex<|end_filename|>
defmodule CodeCorpsWeb.Router do
use CodeCorpsWeb, :router
use Plug.ErrorHandler
use Sentry.Plug
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json-api", "json"]
plug JaSerializer.Deserializer
end
pipeline :bearer_auth do
plug CodeCorps.Auth.BearerAuthPipeline
end
pipeline :ensure_auth do
plug CodeCorps.Auth.EnsureAuthPipeline
end
pipeline :current_user do
plug CodeCorpsWeb.Plug.CurrentUser
plug CodeCorpsWeb.Plug.SetTimberUserContext
plug CodeCorpsWeb.Plug.SetSentryUserContext
plug CodeCorpsWeb.Plug.AnalyticsIdentify
end
pipeline :stripe_webhooks do
plug :accepts, ["json"]
end
pipeline :github_webhooks do
plug :accepts, ["json"]
end
pipeline :tracking do
plug CodeCorpsWeb.Plug.Segment
end
scope "/", CodeCorpsWeb do
pipe_through [:browser] # Use the default browser stack
get "/", PageController, :index
end
if Mix.env == :dev do
forward "/sent_emails", Bamboo.EmailPreviewPlug
end
scope "/", CodeCorpsWeb, host: "api." do
pipe_through [:stripe_webhooks]
post "/webhooks/stripe/connect", StripeConnectEventsController, :create
post "/webhooks/stripe/platform", StripePlatformEventsController, :create
end
scope "/", CodeCorpsWeb, host: "api." do
pipe_through [:github_webhooks]
post "/webhooks/github", GithubEventController, :create, as: :github_events
end
scope "/", CodeCorpsWeb, host: "api." do
pipe_through [:api, :bearer_auth, :ensure_auth, :current_user, :tracking]
resources "/categories", CategoryController, only: [:create, :update]
resources "/comments", CommentController, only: [:create, :update]
resources "/conversations", ConversationController, only: [:index, :show, :update]
resources "/conversation-parts", ConversationPartController, only: [:index, :show, :create]
resources "/donation-goals", DonationGoalController, only: [:create, :update, :delete]
post "/oauth/github", UserController, :github_oauth
resources "/github-app-installations", GithubAppInstallationController, only: [:create]
resources "/github-events", GithubEventController, only: [:index, :show, :update]
resources "/github-repos", GithubRepoController, only: [:update]
resources "/messages", MessageController, only: [:index, :show, :create]
resources "/organization-github-app-installations", OrganizationGithubAppInstallationController, only: [:create, :delete]
resources "/organizations", OrganizationController, only: [:create, :update]
resources "/organization-invites", OrganizationInviteController, only: [:create, :update]
resources "/previews", PreviewController, only: [:create]
resources "/project-categories", ProjectCategoryController, only: [:create, :delete]
resources "/project-skills", ProjectSkillController, only: [:create, :delete]
resources "/project-users", ProjectUserController, only: [:create, :update, :delete]
resources "/projects", ProjectController, only: [:create, :update]
resources "/role-skills", RoleSkillController, only: [:create, :delete]
resources "/roles", RoleController, only: [:create]
resources "/skills", SkillController, only: [:create]
resources "/stripe-connect-accounts", StripeConnectAccountController, only: [:show, :create, :update]
resources "/stripe-connect-plans", StripeConnectPlanController, only: [:show, :create]
resources "/stripe-connect-subscriptions", StripeConnectSubscriptionController, only: [:show, :create]
resources "/stripe-platform-cards", StripePlatformCardController, only: [:show, :create]
resources "/stripe-platform-customers", StripePlatformCustomerController, only: [:show, :create]
resources "/task-skills", TaskSkillController, only: [:create, :delete]
resources "/tasks", TaskController, only: [:create, :update]
resources "/user-categories", UserCategoryController, only: [:create, :delete]
resources "/user-roles", UserRoleController, only: [:create, :delete]
resources "/user-skills", UserSkillController, only: [:create, :delete]
resources "/user-tasks", UserTaskController, only: [:create, :update, :delete]
resources "/users", UserController, only: [:update]
end
scope "/", CodeCorpsWeb, host: "api." do
pipe_through [:api, :bearer_auth, :current_user, :tracking]
post "/token", TokenController, :create
post "/token/refresh", TokenController, :refresh
post "/password/reset", PasswordResetController, :reset_password
resources "/categories", CategoryController, only: [:index, :show]
resources "/comments", CommentController, only: [:index, :show]
resources "/donation-goals", DonationGoalController, only: [:index, :show]
resources "/github-app-installations", GithubAppInstallationController, only: [:index, :show]
resources "/github-issues", GithubIssueController, only: [:index, :show]
resources "/github-pull-requests", GithubPullRequestController, only: [:index, :show]
resources "/github-repos", GithubRepoController, only: [:index, :show]
resources "/organization-github-app-installations", OrganizationGithubAppInstallationController, only: [:index, :show]
resources "/organizations", OrganizationController, only: [:index, :show]
resources "/organization-invites", OrganizationInviteController, only: [:index, :show]
post "/password/forgot", PasswordController, :forgot_password
resources "/project-categories", ProjectCategoryController, only: [:index, :show]
resources "/project-skills", ProjectSkillController, only: [:index, :show]
resources "/project-users", ProjectUserController, only: [:index, :show]
resources "/projects", ProjectController, only: [:index, :show] do
resources "/task-lists", TaskListController, only: [:index, :show]
get "/tasks/:number", TaskController, :show
resources "/tasks", TaskController, only: [:index]
end
resources "/role-skills", RoleSkillController, only: [:index, :show]
resources "/roles", RoleController, only: [:index, :show]
resources "/skills", SkillController, only: [:index, :show]
resources "/task-lists", TaskListController, only: [:index, :show] do
resources "/tasks", TaskController, only: [:index]
get "/tasks/:number", TaskController, :show
end
resources "/task-skills", TaskSkillController, only: [:index, :show]
resources "/tasks", TaskController, only: [:index, :show]
resources "/user-categories", UserCategoryController, only: [:index, :show]
resources "/user-roles", UserRoleController, only: [:index, :show]
resources "/user-skills", UserSkillController, only: [:index, :show]
resources "/user-tasks", UserTaskController, only: [:index, :show]
get "/users/email_available", UserController, :email_available
get "/users/username_available", UserController, :username_available
resources "/users", UserController, only: [:index, :show, :create]
get "/:slug", SluggedRouteController, :show
get "/:slug/projects", ProjectController, :index
get "/:slug/:project_slug", ProjectController, :show
end
end
<|start_filename|>test/lib/code_corps_web/views/project_skill_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ProjectSkillViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
project_skill = insert(:project_skill)
rendered_json = render(CodeCorpsWeb.ProjectSkillView, "show.json-api", data: project_skill)
expected_json = %{
"data" => %{
"id" => project_skill.id |> Integer.to_string,
"type" => "project-skill",
"attributes" => %{},
"relationships" => %{
"project" => %{
"data" => %{"id" => project_skill.project_id |> Integer.to_string, "type" => "project"}
},
"skill" => %{
"data" => %{"id" => project_skill.skill_id |> Integer.to_string, "type" => "skill"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/lib/code_corps_web/controllers/password_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.PasswordControllerTest do
@moduledoc false
use CodeCorpsWeb.ApiCase, resource_name: :password
use Bamboo.Test
alias CodeCorps.AuthToken
test "Unauthenticated - creates and renders resource when email is valid", %{conn: conn} do
user = insert(:user)
attrs = %{"email" => user.email}
conn = post conn, password_path(conn, :forgot_password), attrs
response = json_response(conn, 200)
assert response == %{ "email" => user.email }
%AuthToken{value: token} = Repo.get_by(AuthToken, user_id: user.id)
assert_delivered_email CodeCorps.Emails.ForgotPasswordEmail.create(user, token)
end
@tag :authenticated
test "Authenticated - creates and renders resource when email is valid and removes session", %{conn: conn} do
user = insert(:user)
attrs = %{"email" => user.email}
conn = post conn, password_path(conn, :forgot_password), attrs
response = json_response(conn, 200)
assert response == %{ "email" => user.email }
%AuthToken{value: token} = Repo.get_by(AuthToken, user_id: user.id)
assert_delivered_email CodeCorps.Emails.ForgotPasswordEmail.create(user, token)
refute CodeCorps.Guardian.Plug.authenticated?(conn)
end
test "does not create resource and renders 200 when email is invalid", %{conn: conn} do
user = insert(:user)
attrs = %{"email" => "<EMAIL>"}
conn = post conn, password_path(conn, :forgot_password), attrs
response = json_response(conn, 200)
assert response == %{ "email" => "<EMAIL>" }
refute_delivered_email CodeCorps.Emails.ForgotPasswordEmail.create(user, nil)
end
end
<|start_filename|>test/lib/code_corps/github/api/user_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.API.UserTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{Comment, GitHub, GithubAppInstallation, Task, User}
describe "connect/2" do
test "posts to github, returns updated user" do
original_email = "<EMAIL>"
user = insert(:user, email: original_email)
%{
"avatar_url" => avatar_url,
"email" => github_email,
"id" => github_id,
"login" => login,
"type" => type
} = load_endpoint_fixture("user")
{:ok, %User{} = returned_user} = GitHub.API.User.connect(user, "foo_code", "foo_state")
assert returned_user.id == user.id
assert returned_user.github_auth_token == "foo_auth_token"
assert returned_user.github_avatar_url == avatar_url
assert returned_user.email == original_email
refute returned_user.email == github_email
assert returned_user.github_id == github_id
assert returned_user.github_username == login
assert returned_user.type == String.downcase(type)
end
test "posts to github, associates user and installations" do
user = insert(:user)
%{"id" => github_id} = load_endpoint_fixture("user")
# 3 test installations
# this one should associate, because it's orphaned and matches github id
installation_1 = insert(:github_app_installation, user: nil, sender_github_id: github_id)
# this one matches github id, but is not orphaned, so should not associate
installation_2 = insert(:github_app_installation, sender_github_id: github_id)
# this one is orphaned, but does not match github id, should not associate
installation_3 = insert(:github_app_installation, user: nil, sender_github_id: 234)
{:ok, %User{} = returned_user} = GitHub.API.User.connect(user, "foo_code", "foo_state")
assert Enum.count(returned_user.github_app_installations) == 1
assert Repo.get(GithubAppInstallation, installation_1.id).user_id == returned_user.id
refute Repo.get(GithubAppInstallation, installation_2.id).user_id == returned_user.id
refute Repo.get(GithubAppInstallation, installation_3.id).user_id == returned_user.id
end
test "posts to github, associates user and tasks" do
%{"email" => email} = load_endpoint_fixture("user")
user = insert(:user, email: email)
insert(:user)
# 2 test tasks
# this one should associate,
# because the associated user has the same email
task_1 = insert(:task, user: user)
# this one should not associate, because the associated user has a
# different (or no) github id
task_2 = insert(:task)
{:ok, %User{} = returned_user} = GitHub.API.User.connect(user, "foo_code", "foo_state")
assert Repo.get(Task, task_1.id).user_id == returned_user.id
refute Repo.get(Task, task_2.id).user_id == returned_user.id
end
test "posts to github, associates user and comments" do
%{"email" => email} = load_endpoint_fixture("user")
user = insert(:user, email: email)
insert(:user)
# 2 test comments
# this one should associate,
# because the associated user has the same github id
comment_1 = insert(:comment, user: user)
# this one should not associate, because the associated user has a
# different (or no) github id
comment_2 = insert(:comment)
{:ok, %User{} = returned_user} = GitHub.API.User.connect(user, "foo_code", "foo_state")
assert Repo.get(Comment, comment_1.id).user_id == returned_user.id
refute Repo.get(Comment, comment_2.id).user_id == returned_user.id
end
defmodule NotFoundRequest do
@moduledoc false
def request(:get, "https://api.github.com/user", _, _, _) do
{:ok, body} = %{"error" => "Not Found"} |> Poison.encode
{:ok, %HTTPoison.Response{status_code: 404, body: body}}
end
def request(method, endpoint, body, headers, options) do
CodeCorps.GitHub.SuccessAPI.request(method, endpoint, body, headers, options)
end
end
test "posts to github, returns error if reply is not ok" do
user = insert(:user)
error = GitHub.APIError.new({404, %{"message" => "{\"error\":\"Not Found\"}"}})
with_mock_api(NotFoundRequest) do
assert {:error, error} == GitHub.API.User.connect(user, "foo_code", "foo_state")
end
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/task_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.TaskControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :task
alias CodeCorps.{Analytics.SegmentTraitsBuilder, Task}
@valid_attrs %{
title: "Test task",
markdown: "A test task",
status: "open"
}
@invalid_attrs %{
title: nil,
status: "nonexistent"
}
describe "index" do
test "lists all entries", %{conn: conn} do
[task_1, task_2] = insert_pair(:task)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([task_1.id, task_2.id])
end
test "lists all entries, ordered correctly", %{conn: conn} do
# Has to be done manually. Inserting as a list is too quick.
# Field lacks the resolution to differentiate.
task_1 = insert(:task, order: 3000)
task_2 = insert(:task, order: 2000)
task_3 = insert(:task, order: 1000)
path = conn |> task_path(:index)
json = conn |> get(path) |> json_response(200)
ids =
json["data"]
|> Enum.map(&Map.get(&1, "id"))
|> Enum.map(&Integer.parse/1)
|> Enum.map(fn({id, _rem}) -> id end)
assert ids == [task_3.id, task_2.id, task_1.id]
end
test "lists all tasks for a project", %{conn: conn} do
project_1 = insert(:project)
project_2 = insert(:project)
user = insert(:user)
insert(:task, project: project_1, user: user)
insert(:task, project: project_1, user: user)
insert(:task, project: project_2, user: user)
json =
conn
|> get("projects/#{project_1.id}/tasks")
|> json_response(200)
assert json["data"] |> Enum.count == 2
end
test "lists all tasks filtered by status", %{conn: conn} do
project = insert(:project)
task_1 = insert(:task, status: "open", project: project)
task_2 = insert(:task, status: "closed", project: project)
json =
conn
|> get("projects/#{project.id}/tasks?status=open")
|> json_response(200)
assert json["data"] |> Enum.count == 1
[task] = json["data"]
assert task["id"] == task_1.id |> Integer.to_string
json =
conn
|> get("projects/#{project.id}/tasks?status=closed")
|> json_response(200)
assert json["data"] |> Enum.count == 1
[task] = json["data"]
assert task["id"] == task_2.id |> Integer.to_string
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
task = insert(:task)
conn
|> request_show(task)
|> json_response(200)
|> assert_id_from_response(task.id)
end
test "shows task by number for project", %{conn: conn} do
task = insert(:task)
path = conn |> project_task_path(:show, task.project_id, task.number)
data = conn |> get(path) |> json_response(200)
assert data["data"]["id"] == "#{task.id}"
assert data["data"]["type"] == "task"
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
project = insert(:project)
task_list = insert(:task_list, project: project)
attrs = @valid_attrs |> Map.merge(%{project: project, user: current_user, task_list: task_list})
json = conn |> request_create(attrs) |> json_response(201)
# ensure record is reloaded from database before serialized, since number is added
# on database level upon insert
assert json["data"]["attributes"]["number"] == 1
user_id = current_user.id
traits = Task |> Repo.one |> SegmentTraitsBuilder.build
assert_received {:track, ^user_id, "Created Task", ^traits}
end
@tag :authenticated
test "tracks connecting to github", %{conn: conn, current_user: current_user} do
%{project: project} = github_repo = insert(:github_repo)
task_list = insert(:task_list, project: project)
assocs = %{
project: project,
user: current_user,
task_list: task_list,
github_repo: github_repo
}
attrs = @valid_attrs |> Map.merge(assocs)
conn |> request_create(attrs)
traits = Task |> Repo.one |> SegmentTraitsBuilder.build
user_id = current_user.id
assert_received {:track, ^user_id, "Connected Task to GitHub", ^traits}
end
@tag :authenticated
test "renders 422 when data is invalid", %{conn: conn, current_user: current_user} do
project = insert(:project)
attrs = @invalid_attrs |> Map.merge(%{project: project, user: current_user})
assert conn |> request_create(attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
end
describe "update" do
@tag :authenticated
test "updates and renders chosen resource when data is valid", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
assert conn |> request_update(task, @valid_attrs) |> json_response(200)
user_id = current_user.id
traits = Task |> Repo.get(task.id) |> SegmentTraitsBuilder.build
assert_received {:track, ^user_id, "Edited Task", ^traits}
end
@tag :authenticated
test "tracks connecting to github", %{conn: conn, current_user: current_user} do
%{project: project} = github_repo = insert(:github_repo)
task_list = insert(:task_list, project: project)
task = insert(:task, task_list: task_list, project: project, user: current_user)
attrs = @valid_attrs |> Map.merge(%{github_repo_id: github_repo.id})
conn |> request_update(task, attrs)
traits = Task |> Repo.get(task.id) |> SegmentTraitsBuilder.build
user_id = current_user.id
assert_received {:track, ^user_id, "Connected Task to GitHub", ^traits}
end
@tag :authenticated
test "does not track connecting to github if already connected", %{conn: conn, current_user: current_user} do
%{project: project} = github_repo = insert(:github_repo)
task_list = insert(:task_list, project: project)
github_issue = insert(:github_issue, github_repo: github_repo)
task = insert(:task, task_list: task_list, project: project, user: current_user, github_repo: github_repo, github_issue: github_issue)
attrs = @valid_attrs |> Map.merge(%{github_repo_id: github_repo.id})
conn |> request_update(task, attrs)
user_id = current_user.id
refute_received {:track, ^user_id, "Connected Task to GitHub", _}
end
@tag :authenticated
test "tracks move between task lists", %{conn: conn, current_user: current_user} do
%{project: project} = task = insert(:task, user: current_user)
task_list = insert(:task_list, project: project)
attrs = @valid_attrs |> Map.put(:task_list_id, task_list.id)
conn |> request_update(task, attrs)
traits = Task |> Repo.get(task.id) |> SegmentTraitsBuilder.build
user_id = current_user.id
assert_received {:track, ^user_id, "Moved Task Between Lists", ^traits}
end
@tag :authenticated
test "does not track move between task lists if no move took place", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
conn |> request_update(task, @valid_attrs)
user_id = current_user.id
refute_received {:track, ^user_id, "Moved Task Between Lists", _}
end
@tag :authenticated
test "tracks title change", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
attrs = @valid_attrs |> Map.put(:title, "New title")
conn |> request_update(task, attrs)
traits = Task |> Repo.get(task.id) |> SegmentTraitsBuilder.build
user_id = current_user.id
assert_received {:track, ^user_id, "Edited Task Title", ^traits}
end
@tag :authenticated
test "does not track title change if none took place", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
attrs = @valid_attrs |> Map.delete(:title)
conn |> request_update(task, attrs)
user_id = current_user.id
refute_received {:track, ^user_id, "Edited Task Title", _}
end
@tag :authenticated
test "tracks closing task", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user, status: "open")
attrs = @valid_attrs |> Map.put(:status, "closed")
conn |> request_update(task, attrs)
traits = Task |> Repo.get(task.id) |> SegmentTraitsBuilder.build
user_id = current_user.id
assert_received {:track, ^user_id, "Closed Task", ^traits}
end
@tag :authenticated
test "does not track closing task if no close took place", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user, status: "open")
attrs = @valid_attrs |> Map.delete(:status)
conn |> request_update(task, attrs)
user_id = current_user.id
refute_received {:track, ^user_id, "Closed Task", _}
end
@tag :authenticated
test "tracks archiving task", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user, archived: false)
attrs = @valid_attrs |> Map.put(:archived, true)
conn |> request_update(task, attrs)
traits = Task |> Repo.get(task.id) |> SegmentTraitsBuilder.build
user_id = current_user.id
assert_received {:track, ^user_id, "Archived Task", ^traits}
end
@tag :authenticated
test "does not track archiving task if no archive took place", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user, archived: false)
attrs = @valid_attrs |> Map.delete(:archived)
conn |> request_update(task, attrs)
user_id = current_user.id
refute_received {:track, ^user_id, "Archived Task", _}
end
@tag :authenticated
test "renders 422 when data is invalid", %{conn: conn, current_user: current_user} do
task = insert(:task, user: current_user)
assert conn |> request_update(task, @invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "does not update resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_update |> json_response(403)
end
end
end
<|start_filename|>lib/code_corps/validators/time_validator.ex<|end_filename|>
defmodule CodeCorps.Validators.TimeValidator do
@moduledoc """
Used for validating timestamp fields in a given changeset.
"""
alias Ecto.Changeset
@doc """
Validates the new time is not before the previous time.
Works at second-level accuracy by truncating both timestamps to the second.
"""
def validate_time_not_before(%{data: data} = changeset, field) do
previous_time = Map.get(data, field)
new_time = Changeset.get_change(changeset, field)
case new_time do
nil -> changeset
_ -> do_validate_time_not_before(changeset, field, previous_time, new_time)
end
end
defp do_validate_time_not_before(changeset, field, previous_time, new_time) do
previous_time = previous_time |> truncate(:second)
new_time = new_time |> truncate(:second)
case Timex.before?(new_time, previous_time) do
true -> Changeset.add_error(changeset, field, "cannot be before the last recorded time")
false -> changeset
end
end
# TODO: Replace this with DateTime.truncate/2 when Elixir 1.6 releases
@spec truncate(DateTime.t, :microsecond | :millisecond | :second) :: DateTime.t
def truncate(%DateTime{microsecond: microsecond} = datetime, precision) do
%{datetime | microsecond: do_truncate(microsecond, precision)}
end
defp do_truncate(_, :second), do: {0, 0}
end
<|start_filename|>test/lib/code_corps/model/task_list_test.exs<|end_filename|>
defmodule CodeCorps.TaskListTest do
use CodeCorps.ModelCase
alias CodeCorps.TaskList
alias Ecto.Changeset
@valid_attrs %{name: "some content", position: 42}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = TaskList.changeset(%TaskList{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = TaskList.changeset(%TaskList{}, @invalid_attrs)
refute changeset.valid?
end
test "defaults :done to 'false'" do
{:ok, record} =
%TaskList{} |> TaskList.changeset(@valid_attrs) |> Repo.insert
assert record.done == false
end
test "defaults :inbox to 'false'" do
{:ok, record} =
%TaskList{} |> TaskList.changeset(@valid_attrs) |> Repo.insert
assert record.inbox == false
end
test "defaults :pull_requests to 'false'" do
{:ok, record} =
%TaskList{} |> TaskList.changeset(@valid_attrs) |> Repo.insert
assert record.pull_requests == false
end
describe "create_changeset" do
test "casts done" do
attrs = @valid_attrs |> Map.merge(%{done: true})
changeset = %TaskList{} |> TaskList.create_changeset(attrs)
assert changeset |> Changeset.get_change(:done) == true
end
test "casts inbox" do
attrs = @valid_attrs |> Map.merge(%{inbox: true})
changeset = %TaskList{} |> TaskList.create_changeset(attrs)
assert changeset |> Changeset.get_change(:inbox) == true
end
test "casts pull_requests" do
attrs = @valid_attrs |> Map.merge(%{pull_requests: true})
changeset = %TaskList{} |> TaskList.create_changeset(attrs)
assert changeset |> Changeset.get_change(:pull_requests) == true
end
test "requires done" do
attrs = @valid_attrs |> Map.merge(%{done: nil})
changeset = %TaskList{} |> TaskList.create_changeset(attrs)
refute changeset.valid?
assert changeset |> Map.get(:errors) |> Keyword.get(:done)
end
test "requires inbox" do
attrs = @valid_attrs |> Map.merge(%{inbox: nil})
changeset = %TaskList{} |> TaskList.create_changeset(attrs)
refute changeset.valid?
assert changeset |> Map.get(:errors) |> Keyword.get(:inbox)
end
test "requires pull_requests" do
attrs = @valid_attrs |> Map.merge(%{pull_requests: nil})
changeset = %TaskList{} |> TaskList.create_changeset(attrs)
refute changeset.valid?
assert changeset |> Map.get(:errors) |> Keyword.get(:pull_requests)
end
test "ensures a unique 'done' task list per project" do
%{id: project_id} = insert(:project)
attrs = @valid_attrs |> Map.merge(%{done: true})
{:ok, _task_list} =
%TaskList{}
|> TaskList.create_changeset(attrs)
|> Changeset.put_change(:project_id, project_id)
|> Repo.insert
{:error, changeset} =
%TaskList{}
|> TaskList.create_changeset(attrs)
|> Changeset.put_change(:project_id, project_id)
|> Repo.insert
refute changeset.valid?
assert changeset |> Map.get(:errors) |> Keyword.get(:done)
end
test "ensures a unique 'inbox' task list per project" do
%{id: project_id} = insert(:project)
attrs = @valid_attrs |> Map.merge(%{inbox: true})
{:ok, _task_list} =
%TaskList{}
|> TaskList.create_changeset(attrs)
|> Changeset.put_change(:project_id, project_id)
|> Repo.insert
{:error, changeset} =
%TaskList{}
|> TaskList.create_changeset(attrs)
|> Changeset.put_change(:project_id, project_id)
|> Repo.insert
refute changeset.valid?
assert changeset |> Map.get(:errors) |> Keyword.get(:inbox)
end
test "ensures a unique 'pull_requests' task list per project" do
%{id: project_id} = insert(:project)
attrs = @valid_attrs |> Map.merge(%{pull_requests: true})
{:ok, _task_list} =
%TaskList{}
|> TaskList.create_changeset(attrs)
|> Changeset.put_change(:project_id, project_id)
|> Repo.insert
{:error, changeset} =
%TaskList{}
|> TaskList.create_changeset(attrs)
|> Changeset.put_change(:project_id, project_id)
|> Repo.insert
refute changeset.valid?
assert changeset |> Map.get(:errors) |> Keyword.get(:pull_requests)
end
end
end
<|start_filename|>lib/code_corps_web/controllers/organization_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.OrganizationController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Helpers.Query, Organization, Organizations, User}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
organizations =
Organization
|> Query.id_filter(params)
|> Repo.all
|> preload()
conn |> render("index.json-api", data: organizations)
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %Organization{} = organization <- Organization |> Repo.get(id) |> preload()
do
conn |> render("show.json-api", data: organization)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %Organization{}, params),
{:ok, %Organization{} = organization} <- Organizations.create(params),
organization <- preload(organization)
do
conn |> put_status(:created) |> render("show.json-api", data: organization)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %Organization{} = organization <- Organization |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, organization),
{:ok, %Organization{} = organization} <- organization |> Organization.update_changeset(params) |> Repo.update,
organization <- preload(organization)
do
conn |> render("show.json-api", data: organization)
end
end
@preloads [:organization_github_app_installations, :projects, :slugged_route, :stripe_connect_account]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>test/lib/code_corps/policy/stripe_platform_card_test.exs<|end_filename|>
defmodule CodeCorps.Policy.StripePlatformCardTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.StripePlatformCard, only: [create?: 2, show?: 2]
describe "create?" do
test "returns true if user is creating their own record" do
user = insert(:user)
stripe_platform_card = insert(:stripe_platform_card, user: user)
assert create?(user, stripe_platform_card)
end
test "returns false if user is creating someone else's record" do
user = build(:user)
stripe_platform_card = insert(:stripe_platform_card)
refute create?(user, stripe_platform_card)
end
end
describe "show?" do
test "returns true if user is viewing their own record" do
user = insert(:user)
stripe_platform_card = insert(:stripe_platform_card, user: user)
assert show?(user, stripe_platform_card)
end
test "returns false if user is viewing someone else's record" do
user = insert(:user)
stripe_platform_card = insert(:stripe_platform_card)
refute show?(user, stripe_platform_card)
end
end
end
<|start_filename|>lib/code_corps/github/sync/github_pull_request/gitub_pull_request.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubPullRequest do
@moduledoc ~S"""
In charge of finding a pull request to link with a `GithubPullRequest` record
when processing a GitHub Pull Request payload.
The only entry point is `create_or_update_pull_request/2`.
"""
alias CodeCorps.{
GitHub.Adapters,
GitHub.Sync,
GithubPullRequest,
GithubRepo,
GithubUser,
Repo
}
@typep linking_result :: {:ok, GithubPullRequest.t()} |
{:error, Ecto.Changeset.t()}
@doc ~S"""
Finds or creates a `GithubPullRequest` using the data in a GitHub PullRequest
payload.
The process is as follows:
- Search for the pull request in our database with the payload data.
- If we return a single `GithubPullRequest`, then the `GithubPullRequest`
should be updated.
- If there are no matching `GithubPullRequest` records, then a
`GithubPullRequest`should be created.
"""
@spec create_or_update_pull_request(map, GithubRepo.t()) :: linking_result
def create_or_update_pull_request(%{} = payload, %GithubRepo{} = github_repo) do
with {:ok, %GithubUser{} = github_user} <- Sync.GithubUser.create_or_update_github_user(payload) do
attrs = to_params(payload, github_repo, github_user)
case payload |> find_pull_request() do
nil -> create_pull_request(attrs)
%GithubPullRequest{} = pull_request ->
update_pull_request(pull_request, attrs)
end
else
{:error, error} -> {:error, error}
end
end
@spec find_pull_request(map) :: GithubPullRequest.t() | nil
defp find_pull_request(%{"id" => github_id}) do
Repo.get_by(GithubPullRequest, github_id: github_id)
end
@spec create_pull_request(map) :: linking_result
defp create_pull_request(params) do
%GithubPullRequest{}
|> GithubPullRequest.create_changeset(params)
|> Repo.insert()
end
@spec update_pull_request(GithubPullRequest.t(), map) :: linking_result
defp update_pull_request(%GithubPullRequest{} = github_pull_request, params) do
github_pull_request
|> GithubPullRequest.update_changeset(params)
|> Repo.update()
end
@spec to_params(map, GithubRepo.t(), GithubUser.t()) :: map
defp to_params(attrs, %GithubRepo{id: github_repo_id}, %GithubUser{id: github_user_id}) do
attrs
|> Adapters.PullRequest.from_api()
|> Map.put(:github_repo_id, github_repo_id)
|> Map.put(:github_user_id, github_user_id)
end
end
<|start_filename|>emails/project_user_request.html<|end_filename|>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>{{user_first_name}} wants to join {{project_title}}</title>
<!--
Make sure you copy the styles from styles.css into the email template in Postmark before saving there.
<style type="text/css" rel="stylesheet" media="all">
</style>
-->
<link rel="stylesheet" type="text/css" href="styles.css" media="screen" />
</head>
<body>
<span class="preheader">Head over to the app to process their request.</span>
<table class="email-wrapper" width="100%" cellpadding="0" cellspacing="0">
<tr>
<td align="center">
<table class="email-content" width="100%" cellpadding="0" cellspacing="0">
<tr>
<td class="email-masthead" width="100%" cellpadding="0" cellspacing="0">
<table class="email-masthead_inner" align="center" width="570" cellpadding="0" cellspacing="0">
<tr>
<td>
<a href="https://www.codecorps.org">
<img src="https://d3pgew4wbk2vb1.cloudfront.net/emails/images/logo-small@2x.png" class="email-masthead_logo" />
</a>
</td>
</tr>
</table>
</td>
</tr>
<!-- Email Body -->
<tr>
<td class="email-body" width="100%" cellpadding="0" cellspacing="0">
<table class="email-body_inner" align="center" width="570" cellpadding="0" cellspacing="0">
<!-- Body content -->
<tr>
<td class="content-cell">
<table class="donation" width="100%" cellpadding="0" cellspacing="0">
<tr>
<td>
<ul class="joined_images center">
<li class="photo">
<img src="{{project_logo_url}}" width="70" height="70" />
</li>
<li class="icon">
<img src="https://d3pgew4wbk2vb1.cloudfront.net/emails/images/requested-email@2x.png" width="25" height="70" />
</li>
<li class="photo">
<img src="{{user_image_url}}" width="70" height="70" />
</li>
</ul>
<p>
Hi {{project_title}} team,
</p>
<p class="project-membership--requested">
<strong>{{user_first_name}}</strong> just requested to join {{project_title}}.
</p>
<p>
You can head over to <a href="{{contributors_url}}">your project's contributors page</a> to process their request.
</p>
<p>
Take a look at the skills they bring and think about how you can integrate them into your project.
</p>
<p>
We also recommend reading GitHub's guides for <a href="https://opensource.guide/building-community/#setting-your-project-up-for-success">how to build a welcoming open source community</a>.
</p>
</td>
</tr>
</table>
<table class="body-signature">
<tr>
<td>
<p>
Cheers,
<br><strong>The Code Corps Team</strong>
</p>
</td>
</tr>
</table>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td>
<table class="email-footer" align="center" width="570" cellpadding="0" cellspacing="0">
<tr>
<td class="email-footer__cell" align="center">
<p class="align-center">
Questions? Feedback? Visit our <strong><a href="https://help.codecorps.org">Help Center</a></strong> or just reply to this email.
</p>
</td>
</tr>
<tr>
<td class="email-footer__cell" align="center">
<p class="align-center">
Send us 🐌 mail!
<br>
<br>Code Corps PBC
<br>4166 Wilson Ave #1
<br>San Diego, CA 92104
</p>
</td>
</tr>
</table>
</td>
</tr>
</table>
</td>
</tr>
</table>
</body>
</html>
<|start_filename|>lib/code_corps_web/views/github_pull_request_view.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubPullRequestView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:github_created_at, :github_updated_at, :html_url, :merged, :number, :state]
has_one :github_repo, type: "github-repo", field: :github_repo_id
end
<|start_filename|>lib/code_corps/github/sync/github_user/changeset.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubUser.Changeset do
@moduledoc ~S"""
In charge of changesets for actions on `CodeCorps.GithubUser` records.
"""
alias CodeCorps.GithubUser
alias Ecto.Changeset
@doc ~S"""
Builds a changeset for creating or updating a `CodeCorps.GithubUser` record.
"""
@spec changeset(GithubUser.t(), map) :: Changeset.t()
def changeset(%GithubUser{} = struct, %{} = attrs) do
struct
|> Changeset.cast(attrs, [:avatar_url, :email, :github_id, :username, :type])
|> Changeset.validate_required([:avatar_url, :github_id, :username, :type])
end
end
<|start_filename|>test/lib/code_corps_web/controllers/stripe_platform_card_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripePlatformCardControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :stripe_platform_card
describe "show" do
@tag :authenticated
test "shows resource when authenticated and authorized", %{conn: conn, current_user: current_user} do
stripe_platform_card = insert(:stripe_platform_card, user: current_user)
conn
|> request_show(stripe_platform_card)
|> json_response(200)
|> assert_id_from_response(stripe_platform_card.id)
end
test "renders 401 when unauthenticated", %{conn: conn} do
stripe_platform_card = insert(:stripe_platform_card)
assert conn |> request_show(stripe_platform_card) |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
stripe_platform_card = insert(:stripe_platform_card)
assert conn |> request_show(stripe_platform_card) |> json_response(403)
end
@tag :authenticated
test "renders 404 when record not found", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
defp build_payload(%{stripe_token: stripe_token, user: user}) do
%{
"data" => %{
"type" => "stripe-platform-card",
"attributes" => stripe_token |> to_attributes,
"relationships" => user |> to_relationships
},
}
end
defp build_payload(%{}), do: %{"data" => %{"type" => "stripe-platform-card"}}
defp to_attributes(stripe_token), do: %{"stripe-token" => stripe_token}
defp to_relationships(user), do: %{"user" => %{"data" => %{"id" => user.id, "type" => "user"}}}
defp make_create_request(conn, attrs \\ %{}) do
path = conn |> stripe_platform_card_path(:create)
payload = build_payload(attrs)
conn |> post(path, payload)
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
insert(:stripe_platform_customer, user: current_user)
valid_attrs = %{stripe_token: "tok_<PASSWORD>", user: current_user}
assert conn |> make_create_request(valid_attrs) |> json_response(201)
user_id = current_user.id
assert_received {:track, ^user_id, "Created Stripe Platform Card", %{}}
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> make_create_request |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> make_create_request |> json_response(403)
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/project_category_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ProjectCategoryControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :project_category
describe "index" do
test "lists all entries on index", %{conn: conn} do
[project_category_1, project_category_2] = insert_pair(:project_category)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([project_category_1.id, project_category_2.id])
end
test "filters resources on index", %{conn: conn} do
[project_category_1, project_category_2 | _] = insert_list(3, :project_category)
path = "project-categories/?filter[id]=#{project_category_1.id},#{project_category_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([project_category_1.id, project_category_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
category = insert(:category)
project = insert(:project)
project_category = insert(:project_category, project: project, category: category)
conn
|> request_show(project_category)
|> json_response(200)
|> assert_id_from_response(project_category.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
category = insert(:category)
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
attrs = %{category: category, project: project}
assert conn |> request_create(attrs) |> json_response(201)
end
@tag :authenticated
test "renders 422 when data is invalid", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
invalid_attrs = %{project: project}
assert conn |> request_create(invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "delete" do
@tag :authenticated
test "deletes resource", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
project_category = insert(:project_category, project: project)
assert conn |> request_delete(project_category) |> response(204)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>lib/code_corps_web/views/slugged_route_view.ex<|end_filename|>
defmodule CodeCorpsWeb.SluggedRouteView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:slug, :inserted_at, :updated_at]
has_one :organization, type: "organization", field: :organization_id
has_one :user, type: "user", field: :user_id
end
<|start_filename|>test/lib/code_corps/github/sync/github_pull_request/body_parser_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubPullRequest.BodyParserTest do
@moduledoc false
use ExUnit.Case, async: true
alias CodeCorps.{
GitHub.Sync.GithubPullRequest.BodyParser
}
describe "extract_closing_ids/1" do
test "correctly extracts ids using supported closing keywords" do
content =
"""
close #2, closes #3 closed #4: fixed #5 fixes #6 fix #7.
resolve #8 resolves #9 #resolved #10
"""
assert content |> BodyParser.extract_closing_ids == 2..10 |> Enum.to_list
end
test "only returns unique results" do
content =
"""
close #2, closes #2 closed #3: fixed #4 fixes #5 fix #6.
resolve #7 resolves #8 #resolved #8
"""
assert content |> BodyParser.extract_closing_ids == 2..8 |> Enum.to_list
end
end
end
<|start_filename|>test/lib/code_corps_web/views/password_reset_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.PasswordResetViewTest do
use CodeCorpsWeb.ViewCase
test "renders show" do
args = %{
email: "<EMAIL>",
token: "abc123",
user_id: 123
}
rendered_json = render(CodeCorpsWeb.PasswordResetView, "show.json", args)
expected_json = %{
email: "<EMAIL>",
token: "abc123",
user_id: 123
}
assert expected_json == rendered_json
end
end
<|start_filename|>test/lib/code_corps_web/controllers/preview_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.PreviewControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :preview
@valid_attrs %{markdown: "A **strong** element"}
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
attrs = @valid_attrs |> Map.merge(%{user: current_user})
assert conn |> request_create(attrs) |> json_response(201)
end
test "does not create resource, and responds with 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create(@valid_attrs) |> json_response(401)
end
@tag :authenticated
test "does not update resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create(@valid_attrs) |> json_response(403)
end
end
end
<|start_filename|>lib/code_corps/model/github_issue.ex<|end_filename|>
defmodule CodeCorps.GithubIssue do
use Ecto.Schema
import Ecto.Changeset
@type t :: %__MODULE__{}
schema "github_issues" do
field :body, :string
field :closed_at, :utc_datetime
field :comments_url, :string
field :events_url, :string
field :github_created_at, :utc_datetime
field :github_id, :integer
field :github_updated_at, :utc_datetime
field :html_url, :string
field :labels_url, :string
field :locked, :boolean
field :number, :integer
field :state, :string
field :title, :string
field :url, :string
belongs_to :github_pull_request, CodeCorps.GithubPullRequest
belongs_to :github_repo, CodeCorps.GithubRepo
belongs_to :github_user, CodeCorps.GithubUser
has_many :github_comments, CodeCorps.GithubComment
has_many :github_issue_assignees, CodeCorps.GithubIssueAssignee
has_one :task, CodeCorps.Task
timestamps()
end
@doc false
def changeset(struct, params) do
struct
|> cast(params, [:body, :closed_at, :comments_url, :events_url, :github_created_at, :github_id, :github_updated_at, :html_url, :labels_url, :locked, :number, :state, :title, :url])
|> validate_required([:comments_url, :events_url, :github_created_at, :github_id, :github_updated_at, :html_url, :labels_url, :locked, :number, :state, :title, :url])
|> unique_constraint(:github_id)
end
end
<|start_filename|>test/lib/code_corps/model/github_pull_request_test.exs<|end_filename|>
defmodule CodeCorps.GithubPullRequestTest do
use CodeCorps.ModelCase
alias CodeCorps.GithubPullRequest
@valid_attrs %{
github_created_at: "2075-05-05T23:40:27Z",
github_id: 34778301,
github_updated_at: "2075-05-05T23:40:27Z",
html_url: "https://github.com/baxterthehacker/public-repo/pull/1",
locked: false,
merged: false,
number: 1,
state: "open",
title: "Update the README with new information",
url: "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1"
}
@invalid_attrs %{}
describe "changeset/2" do
test "with merged set" do
attrs = @valid_attrs |> Map.put(:merged, true)
changeset = GithubPullRequest.changeset(%GithubPullRequest{}, attrs)
assert changeset.valid?
assert changeset.changes[:merged] == true
end
test "with merged_at set" do
attrs = @valid_attrs |> Map.put(:merged_at, Timex.now) |> Map.delete(:merged)
changeset = GithubPullRequest.changeset(%GithubPullRequest{}, attrs)
assert changeset.valid?
assert changeset.changes[:merged] == true
end
test "with neither merged nor merged_at set" do
attrs = @valid_attrs |> Map.delete(:merged) |> Map.delete(:merged_at)
changeset = GithubPullRequest.changeset(%GithubPullRequest{merged: true}, attrs)
assert changeset.valid?
assert changeset.changes[:merged] == false
end
end
describe "create_changeset/2" do
test "with valid attributes" do
changeset = GithubPullRequest.create_changeset(%GithubPullRequest{}, @valid_attrs)
assert changeset.valid?
end
test "with invalid attributes" do
changeset = GithubPullRequest.create_changeset(%GithubPullRequest{}, @invalid_attrs)
refute changeset.valid?
end
end
end
<|start_filename|>lib/code_corps/policy/project_category.ex<|end_filename|>
defmodule CodeCorps.Policy.ProjectCategory do
import CodeCorps.Policy.Helpers, only: [get_project: 1, administered_by?: 2]
alias CodeCorps.{ProjectCategory, User}
@spec create?(User.t, map) :: boolean
def create?(%User{} = user, %{} = params) do
params |> get_project |> administered_by?(user)
end
@spec delete?(User.t, ProjectCategory.t) :: boolean
def delete?(%User{} = user, %ProjectCategory{} = project_category) do
project_category |> get_project |> administered_by?(user)
end
end
<|start_filename|>lib/code_corps/auth/bearer_auth_pipeline.ex<|end_filename|>
defmodule CodeCorps.Auth.BearerAuthPipeline do
use Guardian.Plug.Pipeline, otp_app: :code_corps,
module: CodeCorps.Guardian,
error_handler: CodeCorps.Auth.ErrorHandler
plug Guardian.Plug.VerifyHeader, realm: "Bearer"
plug Guardian.Plug.LoadResource, allow_blank: true
end
<|start_filename|>lib/code_corps/stripe_service/adapters/stripe_connect_account.ex<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripeConnectAccountAdapter do
alias CodeCorps.MapUtils
alias CodeCorps.Adapter.MapTransformer
# Mapping of stripe record attributes to locally stored attributes
# Format is {:local_key, [:nesting, :of, :stripe, :keys]}
@stripe_mapping [
{:id_from_stripe, [:id]},
{:business_name, [:business_name]},
{:business_url, [:business_url]},
{:charges_enabled, [:charges_enabled]},
{:country, [:country]},
{:default_currency, [:default_currency]},
{:details_submitted, [:details_submitted]},
{:display_name, [:display_name]},
{:email, [:email]},
{:external_account, [:external_account]},
{:legal_entity_address_city, [:legal_entity, :address, :city]},
{:legal_entity_address_country, [:legal_entity, :address, :country]},
{:legal_entity_address_line1, [:legal_entity, :address, :line1]},
{:legal_entity_address_line2, [:legal_entity, :address, :line2]},
{:legal_entity_address_postal_code, [:legal_entity, :address, :postal_code]},
{:legal_entity_address_state, [:legal_entity, :address, :state]},
{:legal_entity_business_name, [:legal_entity, :business_name]},
{:legal_entity_business_tax_id, [:legal_entity, :business_tax_id]},
{:legal_entity_business_tax_id_provided, [:legal_entity, :business_tax_id_provided]},
{:legal_entity_business_vat_id, [:legal_entity, :business_vat_id]},
{:legal_entity_business_vat_id_provided, [:legal_entity, :business_vat_id_provided]},
{:legal_entity_dob_day, [:legal_entity, :dob, :day]},
{:legal_entity_dob_month, [:legal_entity, :dob, :month]},
{:legal_entity_dob_year, [:legal_entity, :dob, :year]},
{:legal_entity_first_name, [:legal_entity, :first_name]},
{:legal_entity_gender, [:legal_entity, :gender]},
{:legal_entity_last_name, [:legal_entity, :last_name]},
{:legal_entity_maiden_name, [:legal_entity, :maiden_name]},
{:legal_entity_personal_address_city, [:legal_entity, :personal_address, :city]},
{:legal_entity_personal_address_country, [:legal_entity, :personal_address, :country]},
{:legal_entity_personal_address_line1, [:legal_entity, :personal_address, :line1]},
{:legal_entity_personal_address_line2, [:legal_entity, :personal_address, :line2]},
{:legal_entity_personal_address_postal_code, [:legal_entity, :personal_address, :postal_code]},
{:legal_entity_personal_address_state, [:legal_entity, :personal_address, :state]},
{:legal_entity_phone_number, [:legal_entity, :phone_number]},
{:legal_entity_personal_id_number, [:legal_entity, :personal_id_number]},
{:legal_entity_personal_id_number_provided, [:legal_entity, :personal_id_number_provided]},
{:legal_entity_ssn_last_4, [:legal_entity, :ssn_last_4]},
{:legal_entity_ssn_last_4_provided, [:legal_entity, :ssn_last_4_provided]},
{:legal_entity_type, [:legal_entity, :type]},
{:legal_entity_verification_details, [:legal_entity, :verification, :details]},
{:legal_entity_verification_details_code, [:legal_entity, :verification, :details_code]},
{:legal_entity_verification_document, [:legal_entity, :verification, :document]},
{:legal_entity_verification_status, [:legal_entity, :verification, :status]},
{:payouts_enabled, [:payouts_enabled]},
{:support_email, [:support_email]},
{:support_phone, [:support_phone]},
{:support_url, [:support_url]},
{:tos_acceptance_date, [:tos_acceptance, :date]},
{:tos_acceptance_ip, [:tos_acceptance, :ip]},
{:tos_acceptance_user_agent, [:tos_acceptance, :user_agent]},
{:type, [:type]},
{:verification_disabled_reason, [:verification, :disabled_reason]},
{:verification_due_by, [:verification, :due_by]},
{:verification_fields_needed, [:verification, :fields_needed]}
]
@doc """
Transforms a set of local attributes into a map of parameters used to
update a `%Stripe.Account{}`.
"""
def from_params(%{} = attributes) do
result =
attributes
|> remove_attributes()
|> MapUtils.keys_to_atom()
|> MapTransformer.transform_inverse(@stripe_mapping)
{:ok, result}
end
def from_params_update(%{} = attributes) do
result =
attributes
|> remove_attributes()
|> MapUtils.keys_to_atom()
|> MapTransformer.transform_inverse(@stripe_mapping)
|> Map.drop([:type])
{:ok, result}
end
@doc """
Transforms a `%Stripe.Account{}` and a set of local attributes into a
map of parameters used to create or update a `StripeConnectAccount` record.
"""
def to_params(%Stripe.Account{} = stripe_account, %{} = attributes) do
result =
stripe_account
|> Map.from_struct
|> MapTransformer.transform(@stripe_mapping)
|> add_nested_attributes(stripe_account)
|> MapUtils.keys_to_string()
|> add_non_stripe_attributes(attributes)
{:ok, result}
end
# Names of attributes which we need to store localy,
# but are not part of the Stripe API record
@non_stripe_attributes ["organization_id"]
defp add_non_stripe_attributes(%{} = params, %{} = attributes) do
attributes
|> get_non_stripe_attributes
|> add_to(params)
end
defp get_non_stripe_attributes(%{} = attributes) do
attributes |> Map.take(@non_stripe_attributes)
end
defp add_to(%{} = attributes, %{} = params) do
params |> Map.merge(attributes)
end
defp add_nested_attributes(map, stripe_account) do
map |> add_external_account(stripe_account)
end
defp add_external_account(map, %Stripe.Account{external_accounts: %Stripe.List{data: list}}) do
latest = list |> List.last
map |> do_add_external_account(latest)
end
defp do_add_external_account(map, nil), do: map
defp do_add_external_account(map, %Stripe.BankAccount{id: id}) do
map |> Map.put(:external_account, id)
end
defp remove_attributes(%{"legal_entity_verification_status" => "verified"} = attributes) do
attributes |> Map.delete("legal_entity_verification_document")
end
defp remove_attributes(attributes), do: attributes
end
<|start_filename|>test/lib/code_corps_web/views/conversation_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ConversationViewTest do
use CodeCorpsWeb.ViewCase
alias CodeCorps.Repo
test "renders all attributes and relationships properly" do
conversation = insert(:conversation)
conversation_part = insert(:conversation_part, conversation: conversation)
rendered_json =
CodeCorpsWeb.ConversationView
|> render(
"show.json-api",
data: conversation |> Repo.preload(:conversation_parts)
)
expected_json = %{
"data" => %{
"id" => conversation.id |> Integer.to_string,
"type" => "conversation",
"attributes" => %{
"read-at" => conversation.read_at,
"status" => conversation.status,
"inserted-at" => conversation.inserted_at,
"updated-at" => conversation.updated_at
},
"relationships" => %{
"conversation-parts" => %{
"data" => [
%{
"id" => conversation_part.id |> Integer.to_string,
"type" => "conversation-part"
}
]
},
"message" => %{
"data" => %{
"id" => conversation.message_id |> Integer.to_string,
"type" => "message"
}
},
"user" => %{
"data" => %{
"id" => conversation.user_id |> Integer.to_string,
"type" => "user"
}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps_web/views/github_event_view.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubEventView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [
:action, :event_type, :error, :failure_reason, :github_delivery_id,
:inserted_at, :payload, :record_data, :status, :updated_at
]
def event_type(github_event, _conn) do
github_event.type
end
def record_data(github_event, _conn) do
github_event.data
end
end
<|start_filename|>lib/code_corps/github/sync/sync.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync do
@moduledoc """
GitHub syncing functions for:
- events received from the GitHub API
- entire GitHub repositories
"""
alias CodeCorps.{
Comment,
GitHub.API,
GitHub.Sync,
GitHub.Sync.Utils.Finder,
GitHub.Utils.ResultAggregator,
GithubAppInstallation,
GithubComment,
GithubIssue,
GithubPullRequest,
GithubRepo,
GithubUser,
Repo,
Task
}
alias Ecto.{Changeset, Multi}
@type issue_event_outcome ::
{:ok, Task.t()} |
{:error, :repo_not_found} |
{:error, :validating_github_issue, Changeset.t()} |
{:error, :validating_user, Changeset.t()} |
{:error, :multiple_task_users_match} |
{:error, :validating_task, Changeset.t()} |
{:error, :unexpected_transaction_outcome, any}
@doc ~S"""
Syncs a GitHub Issues event.
- Finds the `CodeCorps.GithubRepo`
- Syncs the issue portion of the event with `Sync.Issue`
[https://developer.github.com/v3/activity/events/types/#issuesevent](https://developer.github.com/v3/activity/events/types/#issuesevent)
"""
@spec issue_event(map) :: issue_event_outcome()
def issue_event(%{"issue" => issue_payload} = payload) do
multi =
Multi.new
|> Multi.run(:repo, fn _ -> Finder.find_repo(payload) end)
|> Multi.run(:github_issue, fn %{repo: github_repo} ->
issue_payload
|> Sync.GithubIssue.create_or_update_issue(github_repo)
end)
|> Multi.run(:task_user, fn %{github_issue: github_issue} ->
github_issue |> Sync.User.RecordLinker.link_to(issue_payload)
end)
|> Multi.run(:task, fn %{github_issue: github_issue, task_user: user} ->
github_issue |> Sync.Task.sync_github_issue(user)
end)
case multi |> Repo.transaction() do
{:ok, %{task: task}} -> {:ok, task}
{:error, :repo, :unmatched_repository, _steps} ->
{:error, :repo_not_found}
{:error, :github_issue, %Changeset{data: %GithubIssue{}} = changeset, _steps} ->
{:error, :validating_github_issue, changeset}
{:error, :github_issue, %Changeset{data: %GithubUser{}} = changeset, _steps} ->
{:error, :validating_github_user, changeset}
{:error, :task_user, %Changeset{} = changeset, _steps} ->
{:error, :validating_user, changeset}
{:error, :task_user, :multiple_users, _steps} ->
{:error, :multiple_task_users_match}
{:error, :task, %Changeset{} = changeset, _steps} ->
{:error, :validating_task, changeset}
{:error, _errored_step, error_response, _steps} ->
{:error, :unexpected_transaction_outcome, error_response}
end
end
@type comment_deleted_outcome :: {:ok, map}
@type issue_comment_outcome ::
{:ok, Comment.t()} |
{:error, :repo_not_found} |
{:error, :validating_github_issue, Changeset.t()} |
{:error, :validating_github_user_on_github_issue, Changeset.t()} |
{:error, :validating_task_user, Changeset.t()} |
{:error, :multiple_task_users_match} |
{:error, :validating_task, Changeset.t()} |
{:error, :validating_github_comment, Changeset.t()} |
{:error, :validating_github_user_on_github_comment, Changeset.t()} |
{:error, :validating_comment_user, Changeset.t()} |
{:error, :multiple_comment_users_match} |
{:error, :validating_comment, Changeset.t()} |
{:error, :unexpected_transaction_outcome, any}
@type pull_request_comment_outcome ::
issue_comment_outcome() |
{:error, :fetching_pull_request, struct} |
{:error, :validating_github_pull_request, Changeset.t()} |
{:error, :validating_github_user_on_github_pull_request, Changeset.t()}
@type issue_comment_event_outcome ::
comment_deleted_outcome() |
pull_request_comment_outcome() |
issue_comment_outcome()
@doc ~S"""
Syncs a GitHub IssueComment event.
- For the deleted action
- Deletes the related comment records with `Sync.Comment`
- For any other action
- Finds the `CodeCorps.GithubRepo`
- If it's a pull request, it fetches the pull request from the GitHub API
and syncs it with `Sync.PullRequest`
- Syncs the issue portion of the event with `Sync.Issue`
- Syncs the comment portion of the event with `Sync.Comment`
[https://developer.github.com/v3/activity/events/types/#issuecommentevent](https://developer.github.com/v3/activity/events/types/#issuecommentevent)
"""
@spec issue_comment_event(map) :: issue_comment_event_outcome()
def issue_comment_event(
%{"action" => "deleted", "comment" => %{"id" => github_id}}) do
multi =
Multi.new
|> Multi.run(:deleted_comments, fn _ -> Sync.Comment.delete(github_id) end)
|> Multi.run(:deleted_github_comment, fn _ -> Sync.GithubComment.delete(github_id) end)
case multi |> Repo.transaction() do
{:ok, %{deleted_comments: _, deleted_github_comment: _} = result} ->
{:ok, result}
end
end
def issue_comment_event(%{
"issue" => %{"pull_request" => %{"url" => pull_request_url}} = issue_payload,
"comment" => comment_payload} = payload) do
multi =
Multi.new
|> Multi.run(:repo, fn _ -> Finder.find_repo(payload) end)
|> Multi.run(:fetch_pull_request, fn %{repo: github_repo} ->
API.PullRequest.from_url(pull_request_url, github_repo)
end)
|> Multi.run(:github_pull_request, fn %{repo: github_repo, fetch_pull_request: pr_payload} ->
pr_payload
|> Sync.GithubPullRequest.create_or_update_pull_request(github_repo)
end)
|> Multi.run(:github_issue, fn %{repo: github_repo, github_pull_request: github_pull_request} ->
issue_payload
|> Sync.GithubIssue.create_or_update_issue(github_repo, github_pull_request)
end)
|> Multi.run(:task_user, fn %{github_issue: github_issue} ->
github_issue |> Sync.User.RecordLinker.link_to(issue_payload)
end)
|> Multi.run(:task, fn %{github_issue: github_issue, task_user: user} ->
github_issue |> Sync.Task.sync_github_issue(user)
end)
|> Multi.run(:github_comment, fn %{github_issue: github_issue} ->
github_issue
|> Sync.GithubComment.create_or_update_comment(comment_payload)
end)
|> Multi.run(:comment_user, fn %{github_comment: github_comment} ->
github_comment |> Sync.User.RecordLinker.link_to(comment_payload)
end)
|> Multi.run(:comment, fn %{github_comment: github_comment, comment_user: user, task: task} ->
task |> Sync.Comment.sync(github_comment, user)
end)
case multi |> Repo.transaction() do
{:ok, %{comment: %Comment{} = comment}} -> {:ok, comment}
{:error, :repo, :unmatched_repository, _steps} ->
{:error, :repo_not_found}
{:error, :fetch_pull_request, error, _steps} ->
{:error, :fetching_pull_request, error}
{:error, :github_pull_request, %Changeset{data: %GithubPullRequest{}} = changeset, _steps} ->
{:error, :validating_github_pull_request, changeset}
{:error, :github_pull_request, %Changeset{data: %GithubUser{}} = changeset, _steps} ->
{:error, :validating_github_user_on_github_pull_request, changeset}
{:error, :github_issue, %Changeset{data: %GithubIssue{}} = changeset, _steps} ->
{:error, :validating_github_issue, changeset}
{:error, :github_issue, %Changeset{data: %GithubUser{}} = changeset, _steps} ->
{:error, :validating_github_user_on_github_issue, changeset}
{:error, :task_user, %Changeset{} = changeset, _steps} ->
{:error, :validating_task_user, changeset}
{:error, :task_user, :multiple_users, _steps} ->
{:error, :multiple_task_users_match}
{:error, :task, %Changeset{} = changeset, _steps} ->
{:error, :validating_task, changeset}
{:error, :github_comment, %Changeset{data: %GithubComment{}} = changeset, _steps} ->
{:error, :validating_github_comment, changeset}
{:error, :github_comment, %Changeset{data: %GithubUser{}} = changeset, _steps} ->
{:error, :validating_github_user_on_github_comment, changeset}
{:error, :comment_user, %Changeset{} = changeset, _steps} ->
{:error, :validating_comment_user, changeset}
{:error, :comment_user, :multiple_users, _steps} ->
{:error, :multiple_comment_users_match}
{:error, :comment, %Changeset{} = changeset, _steps} ->
{:error, :validating_comment, changeset}
{:error, _errored_step, error_response, _steps} ->
{:error, :unexpected_transaction_outcome, error_response}
end
end
def issue_comment_event(%{"issue" => issue_payload, "comment" => comment_payload} = payload) do
multi =
Multi.new
|> Multi.run(:repo, fn _ -> Finder.find_repo(payload) end)
|> Multi.run(:github_issue, fn %{repo: github_repo} ->
issue_payload |> Sync.GithubIssue.create_or_update_issue(github_repo)
end)
|> Multi.run(:task_user, fn %{github_issue: github_issue} ->
github_issue |> Sync.User.RecordLinker.link_to(issue_payload)
end)
|> Multi.run(:task, fn %{github_issue: github_issue, task_user: user} ->
github_issue |> Sync.Task.sync_github_issue(user)
end)
|> Multi.run(:github_comment, fn %{github_issue: github_issue} ->
github_issue
|> Sync.GithubComment.create_or_update_comment(comment_payload)
end)
|> Multi.run(:comment_user, fn %{github_comment: github_comment} ->
github_comment |> Sync.User.RecordLinker.link_to(comment_payload)
end)
|> Multi.run(:comment, fn %{github_comment: github_comment, comment_user: user, task: task} ->
task |> Sync.Comment.sync(github_comment, user)
end)
case multi |> Repo.transaction() do
{:ok, %{comment: %Comment{} = comment}} -> {:ok, comment}
{:error, :repo, :unmatched_repository, _steps} ->
{:error, :repo_not_found}
{:error, :github_issue, %Changeset{data: %GithubIssue{}} = changeset, _steps} ->
{:error, :validating_github_issue, changeset}
{:error, :github_issue, %Changeset{data: %GithubUser{}} = changeset, _steps} ->
{:error, :validating_github_user_on_github_issue, changeset}
{:error, :task_user, %Changeset{} = changeset, _steps} ->
{:error, :validating_task_user, changeset}
{:error, :task_user, :multiple_users, _steps} ->
{:error, :multiple_task_users_match}
{:error, :task, %Changeset{} = changeset, _steps} ->
{:error, :validating_task, changeset}
{:error, :github_comment, %Changeset{data: %GithubComment{}} = changeset, _steps} ->
{:error, :validating_github_comment, changeset}
{:error, :github_comment, %Changeset{data: %GithubUser{}} = changeset, _steps} ->
{:error, :validating_github_user_on_github_comment, changeset}
{:error, :comment_user, %Changeset{} = changeset, _steps} ->
{:error, :validating_comment_user, changeset}
{:error, :comment_user, :multiple_users, _steps} ->
{:error, :multiple_comment_users_match}
{:error, :comment, %Changeset{} = changeset, _steps} ->
{:error, :validating_comment, changeset}
{:error, _errored_step, error_response, _steps} ->
{:error, :unexpected_transaction_outcome, error_response}
end
end
@type installation_event_outcome() ::
{:ok, GithubAppInstallation.t()} |
{:error, :validation_error_on_syncing_installation, Changeset.t()} |
{:error, :multiple_unprocessed_installations_found} |
{:error, :github_api_error_on_syncing_repos, struct} |
{:error, :validation_error_on_deleting_removed_repos, {list, list}} |
{:error, :validation_error_on_syncing_existing_repos, {list, list}} |
{:error, :validation_error_on_marking_installation_processed, Changeset.t()} |
{:error, :unexpected_transaction_outcome, any}
@doc ~S"""
Handles a GitHub installation event.
Currently only supports the "added" version of the event.
The event is handled by first syncing the installation payload into a new or
existing `CodeCorps.GithubAppInstallation` record, using
`CodeCorps.GitHub.Sync.Installation.sync/1`, followed by syncing the
record's `CodeCorps.GithubRepo` children using
`CodeCorps.GitHub.Sync.Repo.sync_installation/1`.
[https://developer.github.com/v3/activity/events/types/#installationevent](https://developer.github.com/v3/activity/events/types/#installationevent)
"""
@spec installation_event(map) :: installation_event_outcome()
def installation_event(%{"action" => "created"} = payload) do
multi =
Multi.new
|> Multi.run(:installation, fn _ -> payload |> Sync.GithubAppInstallation.sync() end)
|> Multi.run(:repos, fn %{installation: installation} -> installation |> Sync.GithubRepo.sync_installation() end)
case multi |> Repo.transaction() do
{:ok, %{installation: installation, repos: {synced_repos, _deleted_repos}}} ->
{:ok, GithubAppInstallation |> Repo.get(installation.id) |> Map.put(:github_repos, synced_repos)}
{:error, :installation, %Changeset{} = changeset, _steps} ->
{:error, :validation_error_on_syncing_installation, changeset}
{:error, :installation, :multiple_unprocessed_installations_found, _steps} ->
{:error, :multiple_unprocessed_installations_found}
{:error, :repos, {:api_error, error}, _steps} ->
{:error, :github_api_error_on_syncing_repos, error}
{:error, :repos, {:delete, {repos, changesets}}, _steps} ->
{:error, :validation_error_on_deleting_removed_repos, {repos, changesets}}
{:error, :repos, {:sync, {repos, changesets}}, _steps} ->
{:error, :validation_error_on_syncing_existing_repos, {repos, changesets}}
{:error, :repos, {:mark_processed, %Changeset{} = changeset}, _steps} ->
{:error, :validation_error_on_marking_installation_processed, changeset}
{:error, _errored_step, error_response, _steps} ->
{:error, :unexpected_transaction_outcome, error_response}
end
end
@type installation_repositories_event_outcome ::
{:ok, list(GithubRepo.t())} |
{:error, :unmatched_installation} |
{:error, :validation_error_on_syncing_repos, Changeset.t()} |
{:error, :unexpected_transaction_outcome, any}
@doc ~S"""
Syncs a GitHub InstallationRepositories event.
- For the "removed" action:
- Deletes all `CodeCorps.GithubRepo` records matched with the payload
- For the "added" action:
- Adds all `CodeCorps.GithubRepo` records matching data from the payload
[https://developer.github.com/v3/activity/events/types/#installationrepositoriesevent](https://developer.github.com/v3/activity/events/types/#installationrepositoriesevent)
"""
@spec installation_repositories_event(map) ::
installation_repositories_event_outcome()
def installation_repositories_event(payload) do
multi =
Multi.new
|> Multi.run(:installation, fn _ ->
payload |> Finder.find_installation()
end)
|> Multi.run(:repos, fn %{installation: installation} ->
installation |> Sync.GithubRepo.sync_installation(payload)
end)
case multi |> Repo.transaction() do
{:ok, %{repos: repos}} -> {:ok, repos}
{:error, :installation, :unmatched_installation, _steps} ->
{:error, :unmatched_installation}
{:error, :repos, {_repos, _changesets}, _steps} ->
{:error, :validation_error_on_syncing_repos, %{}}
{:error, _errored_step, error_response, _steps} ->
{:error, :unexpected_transaction_outcome, error_response}
end
end
@type pull_request_event_outcome ::
{:ok, map} |
{:error, :repo_not_found} |
{:error, :fetching_issue, struct} |
{:error, :validating_github_pull_request, Changeset.t()} |
{:error, :validating_github_issue, Changeset.t()} |
{:error, :validating_user, Changeset.t()} |
{:error, :multiple_issue_users_match} |
{:error, :validating_task, Changeset.t()} |
{:error, :unexpected_transaction_outcome, any}
@doc ~S"""
Syncs a GitHub PullRequest event.
- Finds the `CodeCorps.GithubRepo`
- Fetches the issue from the GitHub API
- Syncs the pull request portion of the event with `Sync.PullRequest`
- Syncs the issue portion of the event with `Sync.Issue`, using the
changes passed in from the issue fetching and the pull request syncing
[https://developer.github.com/v3/activity/events/types/#pullrequestevent](https://developer.github.com/v3/activity/events/types/#pullrequestevent)
"""
@spec pull_request_event(map) :: pull_request_event_outcome()
def pull_request_event(
%{"pull_request" => %{"issue_url" => issue_url} = pr_payload} = payload) do
multi =
Multi.new
|> Multi.run(:repo, fn _ -> Finder.find_repo(payload) end)
|> Multi.run(:fetch_issue, fn %{repo: github_repo} ->
API.Issue.from_url(issue_url, github_repo)
end)
|> Multi.run(:github_pull_request, fn %{repo: github_repo} ->
pr_payload
|> Sync.GithubPullRequest.create_or_update_pull_request(github_repo)
end)
|> Multi.run(:github_issue, fn %{fetch_issue: issue_payload, repo: github_repo, github_pull_request: github_pull_request} ->
issue_payload
|> Sync.GithubIssue.create_or_update_issue(github_repo, github_pull_request)
end)
|> Multi.run(:issue_user, fn %{fetch_issue: issue_payload, github_issue: github_issue} ->
Sync.User.RecordLinker.link_to(github_issue, issue_payload)
end)
|> Multi.run(:task, fn %{github_issue: github_issue, issue_user: user} ->
github_issue |> Sync.Task.sync_github_issue(user)
end)
case multi |> Repo.transaction() do
{:ok, %{github_pull_request: _, github_issue: _} = result} -> {:ok, result}
{:error, :repo, :unmatched_repository, _steps} ->
{:error, :repo_not_found}
{:error, :fetch_issue, error, _steps} ->
{:error, :fetching_issue, error}
{:error, :github_pull_request, %Changeset{} = changeset, _steps} ->
{:error, :validating_github_pull_request, changeset}
{:error, :github_issue, %Changeset{} = changeset, _steps} ->
{:error, :validating_github_issue, changeset}
{:error, :issue_user, %Changeset{} = changeset, _steps} ->
{:error, :validating_user, changeset}
{:error, :issue_user, :multiple_users, _steps} ->
{:error, :multiple_issue_users_match}
{:error, :task, %Changeset{} = changeset, _steps} ->
{:error, :validating_task, changeset}
{:error, _errored_step, error_response, _steps} ->
{:error, :unexpected_transaction_outcome, error_response}
end
end
@doc ~S"""
Syncs a `GithubRepo`.
Fetches and syncs records from the GitHub API for a given repository, marking
progress of the sync state along the way.
- Fetches the pull requests from the API
- Creates or updates `GithubPullRequest` records (and their related
`GithubUser` records)
- Fetches the issues from the API
- Creates or updates `GithubIssue` records, and relates them to any related
`GithubPullRequest` records created previously (along with any related
`GithubUser` records)
- Fetches the comments from the API
- Creates or updates `GithubComment` records (and their related `GithubUser`
records)
- Creates or updates `User` records for the `GithubUser` records
- Creates or updates `Task` records, and relates them to any related
`GithubIssue` and `User` records created previously
- Creates or updates `Comment` records, and relates them to any related
`GithubComment` and `User` records created previously
"""
@spec sync_repo(GithubRepo.t()) ::
{:ok, GithubRepo.t()} | {:error, Changeset.t()}
def sync_repo(%GithubRepo{} = repo) do
repo = preload_github_repo(repo)
with {:ok, repo} <- repo |> mark_repo("fetching_pull_requests"),
{:ok, pr_payloads} <- repo |> API.Repository.pulls |> sync_step(:fetch_pull_requests),
{:ok, repo} <- repo |> mark_repo("syncing_github_pull_requests", %{syncing_pull_requests_count: pr_payloads |> Enum.count}),
{:ok, pull_requests} <- pr_payloads |> Enum.map(&Sync.GithubPullRequest.create_or_update_pull_request(&1, repo)) |> ResultAggregator.aggregate |> sync_step(:sync_pull_requests),
{:ok, repo} <- repo |> mark_repo("fetching_issues"),
{:ok, issue_payloads} <- repo |> API.Repository.issues |> sync_step(:fetch_issues),
{:ok, repo} <- repo |> mark_repo("syncing_github_issues", %{syncing_issues_count: issue_payloads |> Enum.count}),
paired_issues <- issue_payloads |> pair_issues_payloads_with_prs(pull_requests),
{:ok, _issues} <- paired_issues |> Enum.map(fn {issue_payload, pr} -> issue_payload |> Sync.GithubIssue.create_or_update_issue(repo, pr) end) |> ResultAggregator.aggregate |> sync_step(:sync_issues),
{:ok, repo} <- repo |> mark_repo("fetching_comments"),
{:ok, comment_payloads} <- repo |> API.Repository.issue_comments |> sync_step(:fetch_comments),
{:ok, repo} <- repo |> mark_repo("syncing_github_comments", %{syncing_comments_count: comment_payloads |> Enum.count}),
{:ok, _comments} <- comment_payloads |> Enum.map(&Sync.GithubComment.create_or_update_comment(repo, &1)) |> ResultAggregator.aggregate |> sync_step(:sync_comments),
repo <- GithubRepo |> Repo.get(repo.id) |> preload_github_repo(),
{:ok, repo} <- repo |> mark_repo("syncing_users"),
{:ok, _users} <- repo |> Sync.User.sync_github_repo() |> sync_step(:sync_users),
{:ok, repo} <- repo |> mark_repo("syncing_tasks"),
{:ok, _tasks} <- repo |> Sync.Task.sync_github_repo() |> sync_step(:sync_tasks),
{:ok, repo} <- repo |> mark_repo("syncing_comments"),
{:ok, _comments} <- repo |> Sync.Comment.sync_github_repo() |> sync_step(:sync_comments),
{:ok, repo} <- repo |> mark_repo("synced")
do
{:ok, repo}
else
{:error, %Changeset{} = changeset} -> {:error, changeset}
{:error, :fetch_pull_requests} -> repo |> mark_repo("errored_fetching_pull_requests")
{:error, :sync_pull_requests} -> repo |> mark_repo("errored_syncing_pull_requests")
{:error, :fetch_issues} -> repo |> mark_repo("errored_fetching_issues")
{:error, :sync_issues} -> repo |> mark_repo("errored_syncing_issues")
{:error, :fetch_comments} -> repo |> mark_repo("errored_fetching_comments")
{:error, :sync_comments} -> repo |> mark_repo("errored_syncing_comments")
{:error, :sync_users} -> repo |> mark_repo("errored_syncing_users")
{:error, :sync_tasks} -> repo |> mark_repo("errored_syncing_tasks")
{:error, :sync_comments} -> repo |> mark_repo("errored_syncing_comments")
end
end
@spec mark_repo(GithubRepo.t(), String.t(), map) ::
{:ok, GithubRepo.t()} | {:error, Changeset.t()}
defp mark_repo(%GithubRepo{} = repo, sync_state, params \\ %{}) do
repo
|> GithubRepo.update_sync_changeset(params |> Map.put(:sync_state, sync_state))
|> Repo.update
end
@spec pair_issues_payloads_with_prs(list, list) :: list(tuple)
defp pair_issues_payloads_with_prs(issue_payloads, pull_requests) do
issue_payloads |> Enum.map(fn %{"number" => number} = issue_payload ->
matching_pr =
pull_requests
|> Enum.find(nil, fn pr -> pr |> Map.get(:number) == number end)
{issue_payload, matching_pr}
end)
end
@spec preload_github_repo(GithubRepo.t()) :: GithubRepo.t()
defp preload_github_repo(%GithubRepo{} = github_repo) do
github_repo
|> Repo.preload([
:github_app_installation,
:project,
github_comments: [:github_issue, :github_user],
github_issues: [:github_comments, :github_user]
])
end
@spec sync_step(tuple, atom) :: tuple
defp sync_step({:ok, _} = result, _step), do: result
defp sync_step({:error, _}, step), do: {:error, step}
end
<|start_filename|>test/lib/code_corps/github/github_test.exs<|end_filename|>
defmodule CodeCorps.GitHubTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub,
GitHub.APIError
}
@response_body %{"foo" => "bar"}
@error_body %{"message" => "bar"}
@error_code 401
@error APIError.new({@error_code, @error_body})
defmodule BasicSuccessAPI do
def request(method, url, body, headers, options) do
send(self(), {method, url, body, headers, options})
{:ok, body} = %{"foo" => "bar"} |> Poison.encode
{:ok, %HTTPoison.Response{body: body, status_code: 200}}
end
end
defmodule BasicErrorAPI do
def request(method, url, body, headers, options) do
send(self(), {method, url, body, headers, options})
{:ok, body} = %{"message" => "bar"} |> Poison.encode
{:ok, %HTTPoison.Response{body: body, status_code: 401}}
end
end
describe "request/5" do
test "properly calls api and returns a successful response" do
with_mock_api(BasicSuccessAPI) do
assert {:ok, @response_body} == GitHub.request(:get, "foo", %{}, %{}, [])
end
assert_received({
:get,
"https://api.github.com/foo",
"{}",
[{"Accept", "application/vnd.github.machine-man-preview+json"}],
[]
})
end
test "properly calls api and returns an error response" do
with_mock_api(BasicErrorAPI) do
assert {:error, @error} = GitHub.request(:get, "bar", %{}, %{}, [])
end
assert_received({
:get,
"https://api.github.com/bar",
"{}",
[{"Accept", "application/vnd.github.machine-man-preview+json"}],
[]
})
end
end
describe "user_access_token_request/2" do
test "properly calls api and returns a successful response" do
with_mock_api(BasicSuccessAPI) do
assert {:ok, @response_body} == GitHub.user_access_token_request("foo_code", "foo_state")
end
assert_received({
:post,
"https://github.com/login/oauth/access_token",
body_text,
[{"Accept", "application/json"}, {"Content-Type", "application/json"}],
[]
})
body = body_text |> Poison.decode!
assert body["state"] == "foo_state"
assert body["code"] == "foo_code"
assert body |> Map.has_key?("client_secret")
assert body |> Map.has_key?("client_id")
end
test "properly calls api and returns an error response" do
with_mock_api(BasicErrorAPI) do
assert {:error, @error} == GitHub.user_access_token_request("foo_code", "foo_state")
end
assert_received({
:post, "https://github.com/login/oauth/access_token",
body_text,
[{"Accept", "application/json"}, {"Content-Type", "application/json"}],
[]
})
body = body_text |> Poison.decode!
assert body["state"] == "foo_state"
assert body["code"] == "foo_code"
assert body |> Map.has_key?("client_secret")
assert body |> Map.has_key?("client_id")
end
end
describe "integration_request/5" do
test "properly calls api and returns a successful response" do
with_mock_api(BasicSuccessAPI) do
assert {:ok, @response_body} == GitHub.integration_request(:get, "foo", %{}, %{}, [])
end
assert_received({
:get,
"https://api.github.com/foo",
"{}",
[{"Accept", "application/vnd.github.machine-man-preview+json"}, {"Authorization", "Bearer" <> _}],
[]
})
end
test "properly calls api and returns an error response" do
with_mock_api(BasicErrorAPI) do
assert {:error, @error} = GitHub.integration_request(:get, "bar", %{}, %{}, [])
end
assert_received({
:get,
"https://api.github.com/bar",
"{}",
[{"Accept", "application/vnd.github.machine-man-preview+json"}, {"Authorization", "Bearer" <> _}],
[]
})
end
end
end
<|start_filename|>test/lib/code_corps/policy/category_test.exs<|end_filename|>
defmodule CodeCorps.Policy.CategoryTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.Category, only: [create?: 1, update?: 1]
describe "create?" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
assert create?(user)
end
test "returns false if user is not an admin" do
user = build(:user, admin: false)
refute create?(user)
end
end
describe "update?" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
assert update?(user)
end
test "returns false if user is not an admin" do
user = build(:user, admin: false)
refute update?(user)
end
end
end
<|start_filename|>lib/code_corps/github/sync/utils/finder.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Utils.Finder do
@moduledoc ~S"""
Used to retrieve locally stored github records, using data contained in GitHub
API payloads.
"""
alias CodeCorps.{
GithubRepo,
GithubAppInstallation,
Repo
}
@doc ~S"""
For a provided Github API payload, attemps to find a `CodeCorps.GithubRepo`
record.
"""
@spec find_repo(map) :: {:ok, GithubRepo.t} | {:error, :unmatched_repository}
def find_repo(%{"repository" => %{"id" => github_id}}) do
result =
GithubRepo
|> Repo.get_by(github_id: github_id)
|> Repo.preload(:github_app_installation)
case result do
%GithubRepo{} = github_repo -> {:ok, github_repo}
nil -> {:error, :unmatched_repository}
end
end
@doc ~S"""
For a provided Github API payload, attemps to find a
`CodeCorps.GithubAppInstallation` record.
"""
@spec find_installation(map) :: {:ok, GithubAppInstallation.t()} | {:error, :unmatched_installation}
def find_installation(%{"installation" => %{"id" => github_id}}) do
case GithubAppInstallation |> Repo.get_by(github_id: github_id) do
%GithubAppInstallation{} = installation -> {:ok, installation}
nil -> {:error, :unmatched_installation}
end
end
end
<|start_filename|>test/lib/code_corps/github/adapters/pull_request_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.PullRequestTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Adapters.PullRequest
describe "from_api/1" do
test "maps api payload correctly" do
%{"pull_request" => payload} = load_event_fixture("pull_request_opened")
assert PullRequest.from_api(payload) == %{
additions: payload["additions"],
body: payload["body"],
changed_files: payload["changed_files"],
closed_at: payload["closed_at"],
comments: payload["comments"],
comments_url: payload["comments_url"],
commits: payload["commits"],
commits_url: payload["commits_url"],
deletions: payload["deletions"],
diff_url: payload["diff_url"],
github_created_at: payload["created_at"],
github_id: payload["id"],
github_updated_at: payload["updated_at"],
html_url: payload["html_url"],
issue_url: payload["issue_url"],
locked: payload["locked"],
merge_commit_sha: payload["merge_commit_sha"],
mergeable_state: payload["mergeable_state"],
merged: payload["merged"],
merged_at: payload["merged_at"],
number: payload["number"],
patch_url: payload["patch_url"],
review_comment_url: payload["review_comment_url"],
review_comments: payload["review_comments"],
review_comments_url: payload["review_comments_url"],
state: payload["state"],
statuses_url: payload["statuses_url"],
title: payload["title"],
url: payload["url"]
}
end
end
end
<|start_filename|>lib/code_corps/emails/project_user_request_email.ex<|end_filename|>
defmodule CodeCorps.Emails.ProjectUserRequestEmail do
import Bamboo.Email, only: [to: 2]
import Bamboo.PostmarkHelper
import Ecto.Query
alias CodeCorps.{Project, ProjectUser, Repo, User, WebClient}
alias CodeCorps.Emails.BaseEmail
alias CodeCorps.Presenters.ImagePresenter
@spec create(ProjectUser.t) :: Bamboo.Email.t
def create(%ProjectUser{project: project, user: user}) do
BaseEmail.create
|> to(project |> get_owners_emails())
|> template(template_id(), build_model(project, user))
end
@spec build_model(Project.t, User.t) :: map
defp build_model(%Project{} = project, %User{} = user) do
%{
contributors_url: project |> preload() |> url(),
project_logo_url: ImagePresenter.large(project),
project_title: project.title,
subject: "#{user.first_name} wants to join #{project.title}",
user_first_name: user.first_name,
user_image_url: ImagePresenter.large(user)
}
end
@spec preload(Project.t) :: Project.t
defp preload(%Project{} = project), do: project |> Repo.preload(:organization)
@spec url(Project.t) :: String.t
defp url(project) do
WebClient.url()
|> URI.merge(project.organization.slug <> "/" <> project.slug <> "/people")
|> URI.to_string
end
@spec template_id :: String.t
defp template_id, do: Application.get_env(:code_corps, :postmark_project_user_request_template)
@spec get_owners_emails(Project.t) :: list(String.t)
defp get_owners_emails(%Project{} = project) do
project |> get_owners() |> Enum.map(&extract_email/1)
end
@spec extract_email(User.t) :: String.t
defp extract_email(%User{email: email}), do: email
@spec get_owners(Project.t) :: list(User.t)
defp get_owners(%Project{id: project_id}) do
query = from u in User,
join: pu in ProjectUser, on: u.id == pu.user_id,
where: pu.project_id == ^project_id,
where: pu.role == "owner"
query |> Repo.all()
end
end
<|start_filename|>test/lib/code_corps_web/controllers/stripe_platform_customer_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripePlatformCustomerControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :stripe_platform_customer
describe "show" do
@tag :authenticated
test "shows chosen resource when user is authenticated and authorized", %{conn: conn, current_user: current_user} do
stripe_platform_customer = insert(:stripe_platform_customer, user: current_user)
conn
|> request_show(stripe_platform_customer)
|> json_response(200)
|> assert_id_from_response(stripe_platform_customer.id)
end
test "renders 401 when unauthenticated", %{conn: conn} do
stripe_platform_customer = insert(:stripe_platform_customer)
assert conn |> request_show(stripe_platform_customer) |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
stripe_platform_customer = insert(:stripe_platform_customer)
assert conn |> request_show(stripe_platform_customer) |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource user is authenticated and authorized", %{conn: conn, current_user: current_user} do
assert conn |> request_create(%{user: current_user}) |> json_response(201)
user_id = current_user.id
assert_received {:track, ^user_id, "Created Stripe Platform Customer", %{}}
end
test "does not create resource and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
end
<|start_filename|>lib/code_corps_web/views/github_app_installation_view.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubAppInstallationView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes ~w(github_id github_account_id github_account_avatar_url github_account_login github_account_type inserted_at installed state updated_at)a
has_one :project, type: "project", field: :project_id
has_one :user, type: "user", field: :user_id
has_many :github_repos, serializer: CodeCorpsWeb.GithubRepoView, identifiers: :always
has_many :organization_github_app_installations, serializer: CodeCorpsWeb.OrganizationGithubAppInstallationView, identifiers: :always
end
<|start_filename|>lib/code_corps/model/comment.ex<|end_filename|>
defmodule CodeCorps.Comment do
use CodeCorps.Model
alias CodeCorps.Services.MarkdownRendererService
@type t :: %__MODULE__{}
schema "comments" do
field :body, :string
field :created_at, :utc_datetime
field :created_from, :string, default: "code_corps"
field :markdown, :string
field :modified_at, :utc_datetime
field :modified_from, :string, default: "code_corps"
belongs_to :github_comment, CodeCorps.GithubComment
belongs_to :user, CodeCorps.User
belongs_to :task, CodeCorps.Task
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:markdown])
|> validate_required([:markdown])
|> MarkdownRendererService.render_markdown_to_html(:markdown, :body)
end
def create_changeset(struct, params) do
struct
|> changeset(params)
|> cast(params, [:task_id, :user_id])
|> set_created_and_modified_at()
|> validate_required([:task_id, :user_id])
|> assoc_constraint(:task)
|> assoc_constraint(:user)
|> put_change(:modified_from, "code_corps")
end
def update_changeset(struct, params) do
struct
|> changeset(params)
|> update_modified_at()
|> put_change(:modified_from, "code_corps")
end
defp set_created_and_modified_at(changeset) do
now = DateTime.utc_now
changeset
|> put_change(:created_at, now)
|> put_change(:modified_at, now)
end
defp update_modified_at(changeset) do
put_change(changeset, :modified_at, DateTime.utc_now)
end
end
<|start_filename|>lib/code_corps_web/views/conversation_part_view.ex<|end_filename|>
defmodule CodeCorpsWeb.ConversationPartView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:body, :inserted_at, :read_at, :updated_at]
has_one :author, type: "user", field: :author_id
has_one :conversation, type: "conversation", field: :conversation_id
end
<|start_filename|>priv/repo/migrations/20171114010851_migrate_unsupported_github_events.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.MigrateUnsupportedGithubEvents do
use Ecto.Migration
import Ecto.Query
alias CodeCorps.Repo
def up do
from(
ge in "github_events",
where: [failure_reason: "not_fully_implemented"],
or_where: [failure_reason: "not_yet_implemented"],
or_where: [failure_reason: "unexpected_action"],
update: [set: [failure_reason: nil, status: "unsupported"]]
) |> Repo.update_all([])
end
def down do
# no-op
end
end
<|start_filename|>lib/code_corps_web/controllers/project_user_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.ProjectUserController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{
Analytics.SegmentTracker,
Analytics.SegmentDataExtractor,
Emails,
Helpers.Query,
Mailer,
ProjectUser,
User
}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@preloads [:project, :user]
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with project_users <- ProjectUser |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: project_users)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %ProjectUser{} = project_user <- ProjectUser |> Repo.get(id) |> Repo.preload(@preloads) do
conn |> render("show.json-api", data: project_user)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %ProjectUser{}, params),
{:ok, %ProjectUser{} = project_user} <- %ProjectUser{} |> ProjectUser.create_changeset(params) |> Repo.insert,
_ <- maybe_send_create_email(project_user)
do
track_created(current_user, project_user)
conn |> put_status(:created) |> render("show.json-api", data: project_user)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %ProjectUser{} = project_user <- ProjectUser |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, project_user, params),
{:ok, %ProjectUser{} = updated_project_user} <- project_user |> ProjectUser.update_changeset(params) |> Repo.update,
_ <- maybe_send_update_email(updated_project_user, project_user)
do
track_updated(current_user, project_user)
conn |> render("show.json-api", data: updated_project_user)
end
end
@spec delete(Conn.t, map) :: Conn.t
def delete(%Conn{} = conn, %{"id" => id} = _params) do
with %ProjectUser{} = project_user <- ProjectUser |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:delete, project_user),
{:ok, %ProjectUser{} = _project_user} <- project_user |> Repo.delete
do
conn |> send_resp(:no_content, "")
end
end
@spec maybe_send_create_email(ProjectUser.t) :: Bamboo.Email.t | nil
defp maybe_send_create_email(%ProjectUser{role: "pending"} = project_user) do
send_request_email(project_user)
end
defp maybe_send_create_email(_), do: nil
@spec send_request_email(ProjectUser.t) :: Bamboo.Email.t
defp send_request_email(project_user) do
project_user
|> Repo.preload(@preloads)
|> Emails.ProjectUserRequestEmail.create()
|> Mailer.deliver_now()
end
@spec maybe_send_update_email(ProjectUser.t, ProjectUser.t) :: Bamboo.Email.t | nil
defp maybe_send_update_email(%ProjectUser{role: updated_role} = project_user, %ProjectUser{role: previous_role}) do
case {updated_role, previous_role} do
{"contributor", "pending"} ->
send_acceptance_email(project_user)
_ -> nil
end
end
@spec send_acceptance_email(ProjectUser.t) :: Bamboo.Email.t
defp send_acceptance_email(project_user) do
project_user
|> Repo.preload(@preloads)
|> Emails.ProjectUserAcceptanceEmail.create()
|> Mailer.deliver_now()
end
@spec track_created(User.t, ProjectUser.t) :: any
def track_created(
%User{id: user_id},
%ProjectUser{} = project_user) do
SegmentTracker.track(user_id, "Requested Membership (User)", project_user)
project_user
|> SegmentDataExtractor.get_project_id()
|> SegmentTracker.track("Membership Requested (Project)", project_user)
end
@spec track_updated(User.t, ProjectUser.t) :: any
def track_updated(
%User{id: user_id} = user,
%ProjectUser{} = project_user) do
data = %{
acceptor: user,
project_user: project_user
}
SegmentTracker.track(user_id, "Membership Approved (User)", data)
project_user
|> SegmentDataExtractor.get_project_id()
|> SegmentTracker.track("Approved Membership (Project)", data)
end
end
<|start_filename|>priv/repo/migrations/20170921014405_loosen_markdown_restrictions.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.LoosenMarkdownRestrictions do
use Ecto.Migration
def up do
alter table(:comments) do
modify :markdown, :text, null: true
end
end
def down do
alter table(:comments) do
modify :markdown, :text, null: false
end
end
end
<|start_filename|>test/lib/code_corps/policy/preview_test.exs<|end_filename|>
defmodule CodeCorps.Policy.PreviewTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.Preview, only: [create?: 2]
describe "create?" do
test "returns true if user is creating their own record" do
user = insert(:user)
params = %{"markdown" => "markdown", "user_id" => user.id}
assert create?(user, params)
end
test "returns false if user is creating someone else's record" do
[user, another_user] = insert_pair(:user)
params = %{"markdown" => "markdown", "user_id" => another_user.id}
refute create?(user, params)
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/github_event_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.GithubEventControllerTest do
@moduledoc false
use CodeCorpsWeb.ApiCase, resource_name: :github_event
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GithubEvent
defp for_event(conn, type, id) do
conn
|> put_req_header("x-github-event", type)
|> put_req_header("x-github-delivery", id)
end
describe "index" do
@tag authenticated: :admin
test "paginates on index", %{conn: conn} do
[_github_event_1, github_event_2] = insert_pair(:github_event)
path = "github-events/?page[page]=2&page[page-size]=1"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([github_event_2.id])
end
@tag authenticated: :admin
test "lists all entries on index by inserted_at desc", %{conn: conn} do
past_event = insert(:github_event, inserted_at: Timex.now())
recent_event = insert(:github_event, inserted_at: Timex.now() |> Timex.shift(days: 3))
data =
conn
|> request_index
|> json_response(200)
|> Map.get("data")
[first_event, second_event] = data
assert first_event["id"] == recent_event.id |> Integer.to_string
assert second_event["id"] == past_event.id |> Integer.to_string
end
@tag authenticated: :admin
test "filters resources on index", %{conn: conn} do
[github_event_1, github_event_2 | _] = insert_list(3, :github_event)
path = "github-events/?filter[id]=#{github_event_1.id},#{github_event_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([github_event_1.id, github_event_2.id])
end
@tag authenticated: :admin
test "filters resources on index with query params", %{conn: conn} do
expected_event = insert(:github_event, action: "opened", status: "processed", type: "issues")
insert(:github_event, action: "created")
insert(:github_event, status: "unprocessed")
insert(:github_event, type: "installation")
path = "github-events/?action=opened&status=processed&type=issues"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([expected_event.id])
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_index() |> json_response(401)
end
@tag :authenticated
test "renders 403 when unauthorized", %{conn: conn} do
assert conn |> request_index() |> json_response(403)
end
end
describe "show" do
@tag authenticated: :admin
test "shows chosen resource", %{conn: conn} do
github_event = insert(:github_event)
conn
|> request_show(github_event)
|> json_response(200)
|> assert_id_from_response(github_event.id)
end
test "renders 401 when unauthenticated", %{conn: conn} do
github_event = insert(:github_event)
assert conn |> request_show(github_event) |> json_response(401)
end
@tag :authenticated
test "renders 403 when unauthorized", %{conn: conn} do
github_event = insert(:github_event)
assert conn |> request_show(github_event) |> json_response(403)
end
end
describe "create" do
@tag :github_webhook
test "responds with 200 for a supported event", %{conn: conn} do
path = conn |> github_events_path(:create)
payload = load_event_fixture("installation_created")
assert conn |> for_event("installation", "foo") |> post(path, payload) |> response(200)
assert Repo.get_by(GithubEvent, github_delivery_id: "foo")
end
@tag :github_webhook
test "responds with 200 for an unsupported event", %{conn: conn} do
path = conn |> github_events_path(:create)
payload = load_event_fixture("pull_request_synchronize")
insert(:github_repo, github_id: payload["repository"]["id"])
assert conn |> for_event("pull_request", "foo") |> post(path, payload) |> response(200)
assert Repo.get_by(GithubEvent, github_delivery_id: "foo")
end
@tag :github_webhook
test "responds with 202 for a supported event but no project_id", %{conn: conn} do
path = conn |> github_events_path(:create)
payload = load_event_fixture("pull_request_synchronize")
insert(:github_repo, github_id: payload["repository"]["id"], project: nil)
assert conn |> for_event("pull_request", "foo") |> post(path, payload) |> response(202)
refute Repo.get_by(GithubEvent, github_delivery_id: "foo")
end
@tag :github_webhook
test "responds with 202 for an unknown event", %{conn: conn} do
path = conn |> github_events_path(:create)
assert conn |> for_event("unknown", "foo") |> post(path, %{}) |> response(202)
refute Repo.get_by(GithubEvent, github_delivery_id: "foo")
end
end
describe "update" do
@valid_attrs %{retry: true}
@tag authenticated: :admin
test "updates when the status was errored", %{conn: conn} do
payload = load_event_fixture("pull_request_opened")
github_event = insert(:github_event, action: "opened", github_delivery_id: "foo", payload: payload, status: "errored", type: "pull_request")
assert conn |> request_update(github_event, @valid_attrs) |> json_response(200)
end
@tag authenticated: :admin
test "does not update for any other status", %{conn: conn} do
payload = load_event_fixture("pull_request_opened")
github_event = insert(:github_event, action: "opened", payload: payload, status: "processed", type: "pull_request")
assert conn |> request_update(github_event, @valid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "renders 403 when unauthorized", %{conn: conn} do
assert conn |> request_update |> json_response(403)
end
end
end
<|start_filename|>lib/code_corps/policy/category.ex<|end_filename|>
defmodule CodeCorps.Policy.Category do
alias CodeCorps.User
@spec create?(User.t) :: boolean
def create?(%User{admin: true}), do: true
def create?(%User{}), do: false
@spec update?(User.t) :: boolean
def update?(%User{admin: true}), do: true
def update?(%User{}), do: false
end
<|start_filename|>test/lib/code_corps_web/plugs/data_to_attributes_test.exs<|end_filename|>
defmodule CodeCorpsWeb.Plug.DataToAttributesTest do
use CodeCorpsWeb.ConnCase
alias CodeCorpsWeb.Plug.DataToAttributes
test "converts basic JSON API payload to params suitable for Ecto", %{conn: conn} do
payload = %{
"id" => "1",
"data" => %{
"attributes" => %{"foo" => "bar", "baz" => "bat"},
"type" => "resource"
}
}
converted_params =
conn
|> Map.put(:params, payload)
|> DataToAttributes.call
|> Map.get(:params)
assert converted_params == %{
"baz" => "bat",
"foo" => "bar",
"id" => "1",
"type" => "resource"
}
end
test "converts belongs_to specified via identifier map into proper id", %{conn: conn} do
payload = %{
"id" => "1",
"data" => %{
"attributes" => %{"foo" => "bar"},
"relationships" => %{
"baz" => %{"data" => %{"id" => "2", "type" => "baz"}}
},
"type" => "resource"
}
}
converted_params =
conn
|> Map.put(:params, payload)
|> DataToAttributes.call
|> Map.get(:params)
assert converted_params == %{
"baz_id" => "2",
"foo" => "bar",
"id" => "1",
"type" => "resource"
}
end
test "converts has_many specified via identifier maps into proper ids", %{conn: conn} do
payload = %{
"id" => "1",
"data" => %{
"attributes" => %{"foo" => "bar"},
"relationships" => %{
"baz" => %{"data" => [
%{"id" => "2", "type" => "baz"},
%{"id" => "3", "type" => "baz"}
]}
},
"type" => "resource"
}
}
converted_params =
conn
|> Map.put(:params, payload)
|> DataToAttributes.call
|> Map.get(:params)
assert converted_params == %{
"baz_ids" => ["2", "3"],
"foo" => "bar",
"id" => "1",
"type" => "resource"
}
end
test "converts included belongs_to into proper subpayload", %{conn: conn} do
payload = %{
"id" => "1",
"data" => %{
"attributes" => %{"foo" => "bar"},
"type" => "resource"
},
"included" => [
%{"data" => %{"attributes" => %{"baz_foo" => "baz_bar"}, "type" => "baz"}}
]
}
converted_params =
conn
|> Map.put(:params, payload)
|> DataToAttributes.call
|> Map.get(:params)
assert converted_params == %{
"baz" => %{
"baz_foo" => "baz_bar",
"type" => "baz"
},
"foo" => "bar",
"id" => "1",
"type" => "resource"
}
end
test "converts included has_many into proper subpayload", %{conn: conn} do
payload = %{
"id" => "1",
"data" => %{
"attributes" => %{"foo" => "bar"},
"type" => "resource"
},
"included" => [
%{"data" => %{"attributes" => %{"baz_foo" => "baz_bar"}, "type" => "baz"}},
%{"data" => %{"attributes" => %{"baz_foo_2" => "baz_bar_2"}, "type" => "baz"}}
]
}
converted_params =
conn
|> Map.put(:params, payload)
|> DataToAttributes.call([includes_many: ["baz"]])
|> Map.get(:params)
assert converted_params == %{
"bazs" => [
%{"baz_foo" => "baz_bar", "type" => "baz"},
%{"baz_foo_2" => "baz_bar_2", "type" => "baz"},
],
"foo" => "bar",
"id" => "1",
"type" => "resource"
}
end
end
<|start_filename|>test/lib/code_corps/messages/conversation_parts_test.exs<|end_filename|>
defmodule CodeCorps.Messages.ConversationPartsTest do
use CodeCorps.ModelCase
alias CodeCorps.{
ConversationPart,
Messages.ConversationParts,
Repo
}
@valid_attrs %{
body: "Test body."
}
describe "create_changeset/2" do
test "with valid attributes" do
user_id = insert(:user, id: 1).id
insert(:conversation, id: 1)
attrs = @valid_attrs |> Map.merge(%{author_id: 1, conversation_id: 1})
{:ok, conversation} = ConversationParts.create_changeset(%ConversationPart{}, attrs) |> Repo.insert
assert conversation.body == "Test body."
assert conversation.part_type == "comment"
assert conversation.author_id == user_id
end
test "validates part_type inclusion: note" do
insert(:user, id: 1)
insert(:conversation, id: 1)
attrs = @valid_attrs |> Map.merge(%{author_id: 1, conversation_id: 1, part_type: "note"})
changeset = ConversationParts.create_changeset(%ConversationPart{}, attrs)
assert changeset.valid?
assert changeset.changes.part_type == "note"
end
test "validates part_type inclusion: reopened" do
attrs = @valid_attrs |> Map.merge(%{author_id: 1, conversation_id: 1, part_type: "reopened"})
changeset = ConversationParts.create_changeset(%ConversationPart{}, attrs)
assert changeset.valid?
assert changeset.changes.part_type == "reopened"
end
test "validates part_type inclusion: closed" do
attrs = @valid_attrs |> Map.merge(%{author_id: 1, conversation_id: 1, part_type: "closed"})
changeset = ConversationParts.create_changeset(%ConversationPart{}, attrs)
assert changeset.valid?
assert changeset.changes.part_type == "closed"
end
test "validates part_type inclusion: wat" do
insert(:user, id: 1)
insert(:conversation, id: 1)
attrs = @valid_attrs |> Map.merge(%{author_id: 1, conversation_id: 1, part_type: "wat"})
changeset = ConversationParts.create_changeset(%ConversationPart{}, attrs)
refute changeset.valid?
assert_error_message(changeset, :part_type, "is invalid")
end
test "requires author_id" do
conversation_id = insert(:conversation).id
changeset = ConversationParts.create_changeset(%ConversationPart{}, %{conversation_id: conversation_id})
refute changeset.valid?
assert_error_message(changeset, :author_id, "can't be blank")
end
test "requires conversation_id" do
author_id = insert(:user).id
changeset = ConversationParts.create_changeset(%ConversationPart{}, %{author_id: author_id})
refute changeset.valid?
assert_error_message(changeset, :conversation_id, "can't be blank")
end
test "requires id of actual author" do
author_id = -1
conversation_id = insert(:conversation).id
attrs = @valid_attrs |> Map.merge(%{author_id: author_id, conversation_id: conversation_id})
{result, changeset} =
ConversationParts.create_changeset(%ConversationPart{}, attrs)
|> Repo.insert()
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :author, "does not exist")
end
test "requires id of actual conversation" do
author_id = insert(:user).id
conversation_id = -1
attrs = @valid_attrs |> Map.merge(%{author_id: author_id, conversation_id: conversation_id})
{result, changeset} =
ConversationParts.create_changeset(%ConversationPart{}, attrs)
|> Repo.insert()
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :conversation, "does not exist")
end
end
end
<|start_filename|>lib/code_corps/policy/user_skill.ex<|end_filename|>
defmodule CodeCorps.Policy.UserSkill do
alias CodeCorps.{UserSkill, User}
def create?(%User{admin: true}, %{}), do: true
def create?(%User{id: id}, %{"user_id" => user_id}), do: id == user_id
def create?(%User{}, %{}), do: false
def delete?(%User{admin: true}, %UserSkill{}), do: true
def delete?(%User{id: id}, %UserSkill{user_id: user_id}), do: id == user_id
def delete?(%User{}, %UserSkill{}), do: false
end
<|start_filename|>test/lib/code_corps_web/views/changeset_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ChangesetViewTest do
use CodeCorpsWeb.ViewCase
alias CodeCorps.Preview
test "renders all errors properly" do
changeset = Preview.create_changeset(%Preview{}, %{})
rendered_json = render(CodeCorpsWeb.ChangesetView, "422.json", %{changeset: changeset})
expected_json = %{
errors: [
%{
detail: "Markdown can't be blank",
source: %{
pointer: "data/attributes/markdown"
},
status: "422",
title: "can't be blank"
},
%{
detail: "User can't be blank",
source: %{
pointer: "data/attributes/user-id"
},
status: "422",
title: "can't be blank"
}
],
jsonapi: %{
version: "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps_web/views/user_skill_view.ex<|end_filename|>
defmodule CodeCorpsWeb.UserSkillView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
has_one :user, type: "user", field: :user_id
has_one :skill, type: "skill", field: :skill_id
end
<|start_filename|>lib/code_corps/github/api/issue.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.Issue do
@moduledoc ~S"""
Functions for working with issues on GitHub.
"""
alias CodeCorps.{
GitHub,
GitHub.API,
GithubAppInstallation,
GithubIssue,
GithubRepo,
Task,
User
}
@doc """
Fetches an issue from the GitHub API, given the API URL for the issue and the
`CodeCorps.GithubRepo` record that points to its GitHub repository.
"""
def from_url(url, %GithubRepo{github_app_installation: %GithubAppInstallation{} = installation}) do
"https://api.github.com/" <> endpoint = url
with opts when is_list(opts) <- API.opts_for(installation) do
GitHub.request(:get, endpoint, %{}, %{}, opts)
else
{:error, github_error} -> {:error, github_error}
end
end
@doc """
Create an issue on GitHub's API for a `CodeCorps.Task`.
"""
@spec create(Task.t) :: GitHub.response
def create(%Task{
github_repo: %GithubRepo{
github_app_installation: %GithubAppInstallation{} = installation
} = github_repo,
user: %User{} = user
} = task) do
endpoint = github_repo |> get_endpoint()
attrs = task |> GitHub.Adapters.Issue.to_api
with opts when is_list(opts) <- GitHub.API.opts_for(user, installation) do
GitHub.request(:post, endpoint, attrs, %{}, opts)
else
{:error, github_error} -> {:error, github_error}
end
end
@doc """
Update an issue on GitHub's API for a `CodeCorps.Task`.
"""
@spec update(Task.t) :: GitHub.response
def update(%Task{
github_issue: %GithubIssue{number: number},
github_repo: %GithubRepo{
github_app_installation: %GithubAppInstallation{} = installation
} = github_repo,
user: %User{} = user,
} = task) do
endpoint = "#{github_repo |> get_endpoint()}/#{number}"
attrs = task |> GitHub.Adapters.Issue.to_api
with opts when is_list(opts) <- GitHub.API.opts_for(user, installation) do
GitHub.request(:patch, endpoint, attrs, %{}, opts)
else
{:error, github_error} -> {:error, github_error}
end
end
@spec get_endpoint(GithubRepo.t) :: String.t
defp get_endpoint(%GithubRepo{github_account_login: owner, name: repo}) do
"/repos/#{owner}/#{repo}/issues"
end
end
<|start_filename|>lib/code_corps_web/views/stripe_connect_subscription_view.ex<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectSubscriptionView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:quantity, :inserted_at, :updated_at]
has_one :user, type: "user", field: :user_id
has_one :project, serializer: CodeCorpsWeb.ProjectView, through: [:stripe_connect_plan, :project]
end
<|start_filename|>test/lib/code_corps_web/views/project_category_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ProjectCategoryViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
project_category = insert(:project_category)
rendered_json = render(CodeCorpsWeb.ProjectCategoryView, "show.json-api", data: project_category)
expected_json = %{
"data" => %{
"id" => project_category.id |> Integer.to_string,
"type" => "project-category",
"attributes" => %{},
"relationships" => %{
"category" => %{
"data" => %{"id" => project_category.category_id |> Integer.to_string, "type" => "category"}
},
"project" => %{
"data" => %{"id" => project_category.project_id |> Integer.to_string, "type" => "project"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps_web/controllers/github_event_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubEventController do
@moduledoc false
use CodeCorpsWeb, :controller
import Ecto.Query, only: [from: 2]
alias CodeCorps.{
Admin,
GithubEvent,
GithubRepo,
GitHub.Webhook.Handler,
GitHub.Webhook.EventSupport,
Helpers.Query,
Processor,
Repo,
User
}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:index, %GithubEvent{}, params)
do
github_events =
GithubEvent
|> Query.id_filter(params)
|> Admin.GithubEventQuery.action_filter(params)
|> Admin.GithubEventQuery.status_filter(params)
|> Admin.GithubEventQuery.type_filter(params)
|> Ecto.Query.order_by([desc: :inserted_at])
|> paginate(params)
conn |> render("index.json-api", data: github_events)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
%GithubEvent{} = github_event <- GithubEvent |> Repo.get(id),
{:ok, :authorized} <- current_user |> Policy.authorize(:show, github_event, params)
do
conn |> render("show.json-api", data: github_event)
end
end
@spec create(Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = payload) do
type = conn |> event_type()
delivery_id = conn |> delivery_id()
action = payload |> Map.get("action", "")
event_support =
if should_process?(payload) do
process_status = type |> EventSupport.status(action)
process_status |> process_event(type, delivery_id, payload)
process_status
else
:ignored
end
conn |> respond_to_webhook(event_support)
end
@spec should_process?(map) :: boolean
defp should_process?(%{"repository" => %{"id" => repository_id}}) do
query = from repo in GithubRepo,
where: repo.github_id == ^repository_id,
where: not(is_nil(repo.project_id))
Repo.one(query) != nil
end
defp should_process?(_), do: true
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %GithubEvent{} = github_event <- GithubEvent |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, github_event, params),
changeset <- github_event |> GithubEvent.update_changeset(params),
{:ok, updated_github_event} <- changeset |> retry_event()
do
conn |> render("show.json-api", data: updated_github_event)
end
end
@spec event_type(Conn.t) :: String.t
defp event_type(%Conn{} = conn) do
conn |> get_req_header("x-github-event") |> List.first
end
@spec delivery_id(Conn.t) :: String.t
defp delivery_id(%Conn{} = conn) do
conn |> get_req_header("x-github-delivery") |> List.first
end
@spec paginate(Ecto.Queryable.t, map) :: list(GithubEvent.t)
defp paginate(query, %{"page" => page_params}) do
query |> Repo.paginate(page_params)
end
defp paginate(query, _) do
query |> Repo.all()
end
@spec process_event(atom, String.t, String.t, map) :: any | :ok
defp process_event(:supported, type, delivery_id, payload) do
Processor.process(fn -> Handler.handle_supported(type, delivery_id, payload) end)
end
defp process_event(:unsupported, type, delivery_id, payload) do
Processor.process(fn -> Handler.handle_unsupported(type, delivery_id, payload) end)
end
defp process_event(:ignored, _, _, _), do: :ok
@type retry_outcome :: {:ok, GithubEvent.t} | {:error, Ecto.Changeset.t} | :ok
@spec retry_event(Ecto.Changeset.t) :: retry_outcome
defp retry_event(%Ecto.Changeset{data: %GithubEvent{action: action, type: type}} = changeset) do
type
|> EventSupport.status(action)
|> do_retry_event(changeset)
end
@spec do_retry_event(atom, Ecto.Changeset.t) :: retry_outcome
defp do_retry_event(:ignored, _changeset), do: nil
defp do_retry_event(support, %Ecto.Changeset{data: %GithubEvent{github_delivery_id: delivery_id, payload: payload, type: type}} = changeset) do
case changeset |> Repo.update() do
{:ok, %GithubEvent{} = github_event} ->
process_event(support, type, delivery_id, payload)
{:ok, github_event}
{:error, error} ->
{:error, error}
end
end
@spec respond_to_webhook(Conn.t, atom) :: Conn.t
defp respond_to_webhook(conn, :supported), do: conn |> send_resp(200, "")
defp respond_to_webhook(conn, :unsupported), do: conn |> send_resp(200, "")
defp respond_to_webhook(conn, :ignored), do: conn |> send_resp(202, "")
end
<|start_filename|>priv/repo/migrations/20171213062707_add_conversation_models.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddConversationModels do
use Ecto.Migration
def change do
create table(:conversations) do
add :status, :string, null: false, default: "open"
add :read_at, :utc_datetime, null: true
add :message_id, references(:messages)
add :user_id, references(:users)
timestamps()
end
create index(:conversations, [:status])
create index(:conversations, [:message_id])
create index(:conversations, [:user_id])
create table(:conversation_parts) do
add :body, :text, null: false
add :read_at, :utc_datetime, null: true
add :author_id, references(:users)
add :conversation_id, references(:conversations)
timestamps()
end
create index(:conversation_parts, [:author_id])
create index(:conversation_parts, [:conversation_id])
end
end
<|start_filename|>lib/code_corps/cloudex/cloudinary_url.ex<|end_filename|>
defmodule CodeCorps.Cloudex.CloudinaryUrl do
@cloudex Application.get_env(:code_corps, :cloudex)
def for(nil, _options, version, default_color, type) do
"#{Application.get_env(:code_corps, :asset_host)}/icons/#{type}_default_#{version}_#{default_color}.png"
end
def for(public_id, options, _version, _default_color, _type) do
@cloudex.Url.for(public_id, options)
|> add_uri_scheme
end
defp add_uri_scheme(generated_url) do
base_url = String.split(generated_url, "//")
add_https(base_url)
end
defp add_https(base_url) when is_list(base_url) and length(base_url) > 0, do: "https://" <> List.last(base_url)
defp add_https(url), do: url
end
<|start_filename|>lib/code_corps_web/views/user_view.ex<|end_filename|>
defmodule CodeCorpsWeb.UserView do
@moduledoc false
alias CodeCorps.Presenters.ImagePresenter
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [
:admin, :biography, :cloudinary_public_id, :email, :first_name,
:github_avatar_url, :github_id, :github_username, :intercom_user_hash,
:inserted_at, :last_name, :name, :photo_large_url, :photo_thumb_url,
:sign_up_context, :state, :state_transition, :twitter, :username,
:website, :updated_at
]
has_one :slugged_route, serializer: CodeCorpsWeb.SluggedRouteView
has_one :stripe_platform_card, serializer: CodeCorpsWeb.StripePlatformCardView
has_one :stripe_platform_customer, serializer: CodeCorpsWeb.StripePlatformCustomerView
has_many :categories, serializer: CodeCorpsWeb.CategoryView, identifiers: :always
has_many :github_app_installations, serializer: CodeCorpsWeb.GithubAppInstallationView, identifiers: :always
has_many :organizations, serializer: CodeCorpsWeb.OrganizationView, identifiers: :always
has_many :project_users, serializer: CodeCorpsWeb.ProjectUserView, identifiers: :always
has_many :stripe_connect_subscriptions, serializer: CodeCorpsWeb.StripeConnectSubscriptionView, identifiers: :always
has_many :user_categories, serializer: CodeCorpsWeb.UserCategoryView, identifiers: :always
has_many :user_roles, serializer: CodeCorpsWeb.UserRoleView, identifiers: :always
has_many :user_skills, serializer: CodeCorpsWeb.UserSkillView, identifiers: :always
def photo_large_url(user, _conn), do: ImagePresenter.large(user)
def photo_thumb_url(user, _conn), do: ImagePresenter.thumbnail(user)
@doc """
Returns the user email or an empty string, depending on the user
being rendered is the authenticated user, or some other user.
Users can only see their own emails. Everyone else's are private.
"""
def email(user, %Plug.Conn{assigns: %{current_user: current_user}}) do
if user.id == current_user.id, do: user.email, else: ""
end
def email(_user, _conn), do: ""
@intercom_secret_key Application.get_env(:code_corps, :intercom_identity_secret_key) || "RANDOM_KEY"
def intercom_user_hash(%{id: id}, _conn) when is_number(id) do
id |> Integer.to_string |> do_intercom_user_hash
end
# def intercom_user_hash(_user, _conn), do: nil
defp do_intercom_user_hash(id_string) do
:crypto.hmac(:sha256, @intercom_secret_key, id_string)
|> Base.encode16
|> String.downcase
end
@doc """
Returns the user's full name when both first and last name are present.
Returns the only user's first name or last name when the other is missing,
otherwise returns nil.
"""
def name(%{first_name: first_name, last_name: last_name}, _conn) do
"#{first_name} #{last_name}" |> String.trim |> normalize_name
end
defp normalize_name(name) when name in ["", nil], do: nil
defp normalize_name(name), do: name
end
<|start_filename|>lib/code_corps/github/api/pagination.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.Pagination do
@moduledoc ~S"""
Used to parse and build pagination data when fetching multiple pages from the GitHub API
"""
@doc ~S"""
Parses a collection of response headers and determines the record page count for a GitHub endpoint.
The response the headers are retrieved from is usually generated using a
`:head` request.
The value of a "Link" header is used to determine the page count.
If the "Link" header is not present in the collection, the count is assumed 1
If the "Link" header is present, we use regex to parse the pagination info
from its value.
The format of the header is as follows:
```
{"Link", '<https://api.github.com/search/code?q=addClass+user%3Amozilla&page=15>; rel="next",
<https://api.github.com/search/code?q=addClass+user%3Amozilla&page=34>; rel="last",
<https://api.github.com/search/code?q=addClass+user%3Amozilla&page=1>; rel="first",
<https://api.github.com/search/code?q=addClass+user%3Amozilla&page=13>; rel="prev"'
```
The page count is determind by locating the `rel="last"` url and extracting
the `page` query parameter from it.
"""
@spec retrieve_total_pages(list) :: integer
def retrieve_total_pages(headers) do
headers
|> List.keyfind("Link", 0, nil)
|> extract_total_pages
end
@spec extract_total_pages(nil | String.t) :: integer
defp extract_total_pages(nil), do: 1
defp extract_total_pages({"Link", value} = _header) do
value
|> String.split(", ")
|> Enum.map(fn link ->
rel = get_rel(link)
page = get_page(link)
{rel, page}
end)
|> Enum.into(%{})
|> Map.get("last")
end
@spec get_rel(String.t) :: String.t
defp get_rel(link) when is_binary(link) do
# Searches for `rel=`
Regex.run(~r{rel="([a-z]+)"}, link) |> List.last()
end
@spec get_page(String.t) :: integer
defp get_page(link) when is_binary(link) do
# Searches for the following variations:
# ```
# ?page={match}>
# ?page={match}&...
# &page={match}>
# &page={match}&...
# ```
Regex.run(~r{[&/?]page=([^>&]+)}, link) |> List.last |> String.to_integer
end
@doc ~S"""
From the specified page count, generates a list of integers, `1..count`
"""
@spec to_page_numbers(integer) :: Range.t
def to_page_numbers(total) when is_integer(total), do: 1..total
@doc ~S"""
Adds a `page` query parameter to an `options` `Keyword` list.
For `HTTPPoison`, query parameters go under the `params` key of the `options`
argument, so this function also ensures the `params` key is present.
"""
@spec add_page_param(Keyword.t, integer) :: Keyword.t
def add_page_param(options, page) when is_list(options) when is_integer(page) do
params =
options
|> Keyword.get(:params, [])
|> Keyword.put(:page, page)
options
|> Keyword.put(:params, params)
end
end
<|start_filename|>test/lib/code_corps/policy/stripe_connect_plan_test.exs<|end_filename|>
defmodule CodeCorps.Policy.StripeConnectPlanTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.StripeConnectPlan, only: [show?: 2, create?: 2]
import CodeCorps.StripeConnectPlan, only: [create_changeset: 2]
alias CodeCorps.StripeConnectPlan
describe "show?" do
test "returns true when user is owner of project" do
%{project: project, user: user} = insert(:project_user, role: "owner")
plan = insert(:stripe_connect_plan, project: project)
assert show?(user, plan)
end
test "returns false otherwise" do
user = insert(:user)
project = insert(:project)
plan = insert(:stripe_connect_plan, project: project)
refute show?(user, plan)
end
end
describe "create?" do
test "returns true when user is owner of organization" do
%{project: project, user: user} = insert(:project_user, role: "owner")
changeset = create_changeset(%StripeConnectPlan{}, %{project_id: project.id})
assert create?(user, changeset)
end
test "returns false otherwise" do
user = insert(:user)
project = insert(:project)
changeset = create_changeset(%StripeConnectPlan{}, %{project_id: project.id})
refute create?(user, changeset)
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/adapters/stripe_connect_charge_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripeConnectChargeTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeService.Adapters.StripeConnectChargeAdapter
describe "to_params/2" do
test "adds customer and user id if those records exist" do
# load a predefined fixture to use for adapter testing
fixture = CodeCorps.StripeTesting.Helpers.load_fixture("charge")
account = insert(:stripe_connect_account)
customer = insert(
:stripe_connect_customer,
id_from_stripe: fixture.customer,
stripe_connect_account: account
)
{:ok, result} = StripeConnectChargeAdapter.to_params(fixture, account)
assert result == %{
amount: 100,
amount_refunded: 0,
application_fee_id_from_stripe: nil,
application_id_from_stripe: nil,
balance_transaction_id_from_stripe: "test_balance_transaction_for_charge",
captured: true,
created: 1484869309,
currency: "usd",
customer_id_from_stripe: "test_customer_for_charge",
description: "Test Charge (created for fixture)",
failure_code: nil,
failure_message: nil,
id_from_stripe: "charge",
invoice_id_from_stripe: "invoice",
paid: true,
refunded: false,
review_id_from_stripe: nil,
source_transfer_id_from_stripe: nil,
statement_descriptor: nil,
status: "succeeded",
stripe_connect_account_id: account.id,
stripe_connect_customer_id: customer.id,
user_id: customer.user_id
}
end
end
end
<|start_filename|>lib/code_corps_web/controllers/comment_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.CommentController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Comment, User, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with comments <- Comment |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: comments)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %Comment{} = comment <- Comment |> Repo.get(id) do
conn |> render("show.json-api", data: comment)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %Comment{}, params),
{:ok, %Comment{} = comment} <- Comment.Service.create(params) do
conn |> put_status(:created) |> render("show.json-api", data: comment)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %Comment{} = comment <- Comment |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, comment),
{:ok, %Comment{} = comment} <- comment |> Comment.Service.update(params) do
conn |> render("show.json-api", data: comment)
end
end
end
<|start_filename|>test/lib/code_corps/emails/receipt_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.ReceiptEmailTest do
use CodeCorps.ModelCase
use Bamboo.Test
alias CodeCorps.Emails.ReceiptEmail
test "receipt email works" do
invoice_fixture = CodeCorps.StripeTesting.Helpers.load_fixture("invoice")
user = insert(:user, email: "<EMAIL>", first_name: "Jimmy")
project = insert(:project, title: "Code Corps")
plan = insert(:stripe_connect_plan, project: project)
subscription = insert(
:stripe_connect_subscription,
id_from_stripe: invoice_fixture.subscription,
stripe_connect_plan: plan,
user: user
)
invoice = insert(
:stripe_invoice,
id_from_stripe: invoice_fixture.id,
stripe_connect_subscription: subscription,
user: user
)
charge = insert(
:stripe_connect_charge,
amount: 500,
id_from_stripe: invoice.charge_id_from_stripe,
invoice_id_from_stripe: invoice.id_from_stripe,
user: user,
statement_descriptor: "Test descriptor"
)
insert(:donation_goal, project: project, current: true, description: "Test goal")
email = ReceiptEmail.create(charge, invoice_fixture)
assert email.from == "Code Corps<<EMAIL>>"
assert email.to == "<EMAIL>"
template_model = email.private.template_model |> Map.delete(:high_five_image_url)
high_five_image_url = email.private.template_model |> Map.get(:high_five_image_url)
assert template_model == %{
charge_amount: "$5.00",
charge_statement_descriptor: "Test descriptor",
name: "Jimmy",
project_title: "Code Corps",
project_url: "http://localhost:4200/#{project.organization.slug}/#{project.slug}",
project_current_donation_goal_description: "Test goal",
subject: "Your monthly donation to Code Corps"
}
assert high_five_image_url
end
end
<|start_filename|>test/lib/code_corps/stripe_service/adapters/stripe_connect_subscription_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripeConnectSubscriptionTest do
use ExUnit.Case, async: true
import CodeCorps.StripeService.Adapters.StripeConnectSubscriptionAdapter, only: [to_params: 2]
date = 1479472835
@stripe_connect_subscription %Stripe.Subscription{
application_fee_percent: 5.0,
cancel_at_period_end: false,
canceled_at: nil,
created: date,
current_period_end: date,
current_period_start: date,
customer: "cus_123",
ended_at: nil,
id: "sub_123",
livemode: false,
metadata: %{},
plan: %Stripe.Plan{
id: "month",
amount: 5000,
created: date,
currency: "usd",
interval: "month",
interval_count: 1,
livemode: false,
metadata: %{},
name: "Monthly subscription for Code Corps",
statement_descriptor: nil,
trial_period_days: nil
},
quantity: 1000,
start: date,
status: "active",
tax_percent: nil,
trial_end: nil,
trial_start: nil
}
@local_map %{
"application_fee_percent" => 5.0,
"cancelled_at" => nil,
"created" => date,
"current_period_end" => date,
"current_period_start" => date,
"customer_id_from_stripe" => "cus_123",
"ended_at" => nil,
"id_from_stripe" => "sub_123",
"plan_id_from_stripe" => "month",
"quantity" => 1000,
"start" => date,
"status" => "active"
}
describe "to_params/2" do
test "converts from stripe map to local properly" do
test_attributes = %{
"stripe_connect_plan_id" => 123,
"user_id" =>123,
"foo" => "bar"
}
expected_attributes = %{
"stripe_connect_plan_id" => 123,
"user_id" =>123
}
{:ok, result} = to_params(@stripe_connect_subscription, test_attributes)
expected_map = Map.merge(@local_map, expected_attributes)
assert result == expected_map
end
end
end
<|start_filename|>priv/repo/migrations/20171016125229_add_github_comment.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddGithubComment do
use Ecto.Migration
def change do
create table(:github_comments) do
add :body, :text
add :github_created_at, :utc_datetime
add :github_id, :integer
add :github_updated_at, :utc_datetime
add :html_url, :text
add :url, :text
timestamps()
add :github_issue_id, references(:github_issues)
end
end
end
<|start_filename|>lib/code_corps/model/stripe_platform_customer.ex<|end_filename|>
defmodule CodeCorps.StripePlatformCustomer do
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "stripe_platform_customers" do
field :created, :integer
field :currency, :string
field :delinquent, :boolean
field :email, :string
field :id_from_stripe, :string, null: false
belongs_to :user, CodeCorps.User
has_many :stripe_connect_customers, CodeCorps.StripeConnectCustomer
timestamps()
end
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:created, :currency, :delinquent, :id_from_stripe, :user_id])
|> validate_required([:id_from_stripe, :user_id])
|> assoc_constraint(:user)
end
def update_changeset(struct, params) do
struct
|> cast(params, [:email])
|> validate_required([:email])
end
end
<|start_filename|>test/lib/code_corps/model/github_issue_test.exs<|end_filename|>
defmodule CodeCorps.GithubIssueTest do
use CodeCorps.ModelCase
alias CodeCorps.GithubIssue
@valid_attrs %{
body: "I'm having a problem with this.",
closed_at: nil,
comments_url: "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments",
events_url: "https://api.github.com/repos/octocat/Hello-World/issues/1347/events",
github_created_at: "2011-04-22T13:33:48Z",
github_id: 1,
github_updated_at: "2014-03-03T18:58:10Z",
html_url: "https://github.com/octocat/Hello-World/issues/1347",
labels_url: "https://api.github.com/repos/octocat/Hello-World/issues/1347/labels{/name}",
locked: false,
number: 1347,
repository_url: "https://api.github.com/repos/octocat/Hello-World",
state: "open",
title: "Found a bug",
url: "https://api.github.com/repos/octocat/Hello-World/issues/1347",
}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = GithubIssue.changeset(%GithubIssue{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with missing body" do
attrs = @valid_attrs |> Map.delete(:body)
changeset = GithubIssue.changeset(%GithubIssue{}, attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = GithubIssue.changeset(%GithubIssue{}, @invalid_attrs)
refute changeset.valid?
end
end
<|start_filename|>lib/code_corps_web/controllers/conversation_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.ConversationController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{
Conversation,
Messages,
User
}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
conversations <- Conversation |> Policy.scope(current_user) |> Messages.list_conversations(params) |> preload() do
conn |> render("index.json-api", data: conversations)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
%Conversation{} = conversation <- Messages.get_conversation(id) |> preload(),
{:ok, :authorized} <- current_user |> Policy.authorize(:show, conversation, %{}) do
conn |> render("show.json-api", data: conversation)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %Conversation{} = conversation <- Messages.get_conversation(id) |> preload(),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, conversation),
{:ok, %Conversation{} = updated_conversation} <- conversation |> Messages.update_conversation(params)
do
conn |> render("show.json-api", data: updated_conversation)
end
end
@preloads [:conversation_parts, :message, :user]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>test/lib/code_corps/github/sync/github_app_installation/changeset_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.GithubAppInstallation.ChangesetTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub.Sync,
GithubAppInstallation
}
alias Ecto.Changeset
describe "create_changeset/2" do
test "assigns correct changes" do
payload = load_event_fixture("installation_created")
changeset =
payload |> Sync.GithubAppInstallation.Changeset.create_changeset()
assert changeset |> Changeset.get_change(:github_id) == payload["installation"]["id"]
assert changeset |> Changeset.get_change(:github_account_id) == payload["installation"]["account"]["id"]
assert changeset |> Changeset.get_change(:github_account_avatar_url) == payload["installation"]["account"]["avatar_url"]
assert changeset |> Changeset.get_change(:github_account_login) == payload["installation"]["account"]["login"]
assert changeset |> Changeset.get_change(:github_account_type) == payload["installation"]["account"]["type"]
assert changeset |> Changeset.get_change(:sender_github_id) == payload["sender"]["id"]
assert changeset |> Changeset.get_change(:installed) == true
assert changeset |> Changeset.get_change(:origin) == "github"
assert changeset |> Changeset.get_change(:user) == nil
assert changeset.valid?
end
test "assigns user if provided" do
payload = load_event_fixture("installation_created")
user = insert(:user)
changeset =
payload |> Sync.GithubAppInstallation.Changeset.create_changeset(user)
assert changeset |> Changeset.get_change(:user) |> Map.get(:data) == user
assert changeset.valid?
end
end
describe "update_changeset/2" do
test "assigns proper changes to the task" do
payload = load_event_fixture("installation_created")
github_app_installation = %GithubAppInstallation{}
changeset =
github_app_installation
|> Sync.GithubAppInstallation.Changeset.update_changeset(payload)
assert changeset |> Changeset.get_change(:github_id) == payload["installation"]["id"]
assert changeset |> Changeset.get_change(:github_account_id) == payload["installation"]["account"]["id"]
assert changeset |> Changeset.get_change(:github_account_avatar_url) == payload["installation"]["account"]["avatar_url"]
assert changeset |> Changeset.get_change(:github_account_login) == payload["installation"]["account"]["login"]
assert changeset |> Changeset.get_change(:github_account_type) == payload["installation"]["account"]["type"]
assert changeset |> Changeset.get_change(:sender_github_id) == payload["sender"]["id"]
assert changeset |> Changeset.get_change(:installed) == true
refute changeset |> Changeset.get_change(:origin) == "github"
refute changeset |> Changeset.get_change(:user)
assert changeset.valid?
end
end
end
<|start_filename|>test/lib/code_corps/github/event_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.EventTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.{
GithubEvent,
GitHub.APIError,
GitHub.Event
}
describe "start_processing/1" do
test "sets event status to processing" do
event = insert(:github_event, status: "unprocessed")
{:ok, %GithubEvent{} = updated_event} = Event.start_processing(event)
assert updated_event.status == "processing"
end
end
describe "stop_processing/2" do
test "sets event as processed if resulting tuple starts with :ok" do
event = insert(:github_event, status: "processing")
{:ok, %GithubEvent{} = updated_event} = Event.stop_processing({:ok, "foo"}, event)
assert updated_event.status == "processed"
end
test "marks event errored for changeset" do
event = insert(:github_event, status: "processing")
data = %{foo: "bar"}
changeset = %Ecto.Changeset{data: data}
{:ok, %GithubEvent{} = updated_event} =
{:error, :bar, changeset}
|> Event.stop_processing(event)
assert updated_event.data == data |> Kernel.inspect(pretty: true)
assert updated_event.error == changeset |> Kernel.inspect(pretty: true)
assert updated_event.failure_reason == "bar"
assert updated_event.status == "errored"
end
test "marks event errored for API error" do
event = insert(:github_event, status: "processing")
error_body = %{"message" => "bar"}
error_code = 401
error = APIError.new({error_code, error_body})
{:ok, %GithubEvent{} = updated_event} =
{:error, :bar, error}
|> Event.stop_processing(event)
assert updated_event.data == nil
assert updated_event.error == error |> Kernel.inspect(pretty: true)
assert updated_event.failure_reason == "bar"
assert updated_event.status == "errored"
end
end
end
<|start_filename|>lib/code_corps/processor/async.ex<|end_filename|>
defmodule CodeCorps.Processor.Async do
@behaviour CodeCorps.Processor
@spec process((() -> any)) :: {:ok, pid}
def process(fun) do
Task.Supervisor.start_child(:background_processor, fn ->
apply(fun, [])
end)
end
end
<|start_filename|>test/lib/code_corps/github/sync/github_user/github_user_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubUserTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub.Adapters,
GitHub.Sync,
GithubUser,
Repo
}
alias Ecto.Changeset
@event_payload "issues_opened" |> load_event_fixture() |> Map.get("issue")
describe "create_or_update_github_user/1" do
test "creates github user if one is not matched from the payload" do
assert {:ok, %GithubUser{id: created_id}} =
@event_payload |> Sync.GithubUser.create_or_update_github_user()
assert created_user = Repo.one(GithubUser)
assert created_user.id == created_id
attrs = Adapters.User.to_github_user(@event_payload["user"])
assert created_user |> Map.take(attrs |> Map.keys()) == attrs
end
test "updates github user if one is matched from the payload" do
record = insert(:github_user, github_id: @event_payload["user"]["id"])
assert {:ok, %GithubUser{id: updated_id}} =
@event_payload |> Sync.GithubUser.create_or_update_github_user()
assert updated_user = Repo.one(GithubUser)
assert updated_user.id == updated_id
assert updated_user.id == record.id
attrs = Adapters.User.to_github_user(@event_payload["user"])
assert updated_user |> Map.take(attrs |> Map.keys()) == attrs
end
test "returns changeset if there was a problem" do
assert {:error, %Changeset{} = changeset} =
@event_payload
|> Kernel.put_in(["user", "login"], nil)
|> Sync.GithubUser.create_or_update_github_user()
refute changeset.valid?
refute Repo.one(GithubUser)
end
end
end
<|start_filename|>test/lib/code_corps_web/channels/conversation_channel_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ConversationChannelTest do
use CodeCorpsWeb.ChannelCase
alias CodeCorps.{Conversation, User}
alias CodeCorpsWeb.ConversationChannel
def build_socket(%Conversation{id: id}, %User{} = current_user) do
"test"
|> socket(%{current_user: current_user})
|> subscribe_and_join(ConversationChannel, "conversation:#{id}")
end
describe "conversation:id" do
test "requires authentication" do
%{id: id} = insert(:conversation)
response =
"test"
|> socket(%{})
|> subscribe_and_join(ConversationChannel, "conversation:#{id}")
assert response == {:error, %{reason: "unauthenticated"}}
end
test "ensures current user is authorized for :show on resource" do
user = insert(:user)
%{id: id} = insert(:conversation)
response =
"test"
|> socket(%{current_user: user})
|> subscribe_and_join(ConversationChannel, "conversation:#{id}")
assert response == {:error, %{reason: "unauthorized"}}
end
test "broadcasts new conversation part" do
%{id: id, user: user} = conversation = insert(:conversation)
{:ok, %{}, _socket} =
"test"
|> socket(%{current_user: user})
|> subscribe_and_join(ConversationChannel, "conversation:#{id}")
%{id: conversation_part_id} = conversation_part =
insert(:conversation_part, conversation: conversation)
ConversationChannel.broadcast_new_conversation_part(conversation_part)
assert_broadcast("new:conversation-part", %{id: ^conversation_part_id})
end
end
end
<|start_filename|>lib/code_corps_web/controllers/task_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.TaskController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Analytics.SegmentTracker, Policy, Task, Tasks, User}
import ScoutApm.Tracing
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
tasks = Tasks.Query.list(params)
tasks = preload(tasks)
timing("JaSerializer", "render") do
conn |> render("index.json-api", data: tasks)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{} = params) do
with %Task{} = task <- Tasks.Query.find(params),
task <- preload(task)
do
conn |> render("show.json-api", data: task)
end
end
@spec create(Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %Task{}, params),
{:ok, %Task{} = task} <- params |> Tasks.create_task,
task <- preload(task)
do
current_user |> track_created(task)
current_user |> maybe_track_connected(task)
conn |> put_status(:created) |> render("show.json-api", data: task)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{} = params) do
with %Task{} = task <- Tasks.Query.find(params),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, task),
{:ok, %Task{} = updated_task} <- task |> Tasks.update_task(params),
updated_task <- preload(updated_task)
do
current_user |> track_updated(updated_task)
current_user |> maybe_track_connected(updated_task, task)
current_user |> maybe_track_list_move(updated_task, task)
current_user |> maybe_track_title_change(updated_task, task)
current_user |> maybe_track_close(updated_task, task)
current_user |> maybe_track_archive(updated_task, task)
conn |> render("show.json-api", data: updated_task)
end
end
@preloads [:comments, :github_pull_request, :task_skills, :user_task]
def preload(data) do
timing("TaskController", "preload") do
Repo.preload(data, @preloads)
end
end
# tracking
@spec track_created(User.t, Task.t) :: any
defp track_created(%User{id: user_id}, %Task{} = task) do
user_id |> SegmentTracker.track("Created Task", task)
end
@spec track_updated(User.t, Task.t) :: any
defp track_updated(%User{id: user_id}, %Task{} = task) do
user_id |> SegmentTracker.track("Edited Task", task)
end
@spec maybe_track_connected(User.t, Task.t) :: any
defp maybe_track_connected(
%User{id: user_id},
%Task{github_issue_id: issue_id} = task) when not is_nil(issue_id) do
user_id |> SegmentTracker.track("Connected Task to GitHub", task)
end
defp maybe_track_connected(%User{}, %Task{}), do: :nothing
@spec maybe_track_connected(User.t, Task.t, Task.t) :: any
defp maybe_track_connected(
%User{id: user_id},
%Task{github_issue_id: new_issue_id} = task,
%Task{github_issue_id: old_issue_id})
when is_nil(old_issue_id) and not is_nil(new_issue_id) do
user_id |> SegmentTracker.track("Connected Task to GitHub", task)
end
defp maybe_track_connected(%User{}, %Task{}, %Task{}), do: :nothing
@spec maybe_track_list_move(User.t, Task.t, Task.t) :: any
defp maybe_track_list_move(
%User{id: user_id},
%Task{task_list_id: new_list_id} = task,
%Task{task_list_id: old_list_id}) when new_list_id != old_list_id do
user_id |> SegmentTracker.track("Moved Task Between Lists", task)
end
defp maybe_track_list_move(%User{}, %Task{}, %Task{}), do: :nothing
@spec maybe_track_title_change(User.t, Task.t, Task.t) :: any
defp maybe_track_title_change(
%User{id: user_id},
%Task{title: new_title} = task,
%Task{title: old_title}) when new_title != old_title do
user_id |> SegmentTracker.track("Edited Task Title", task)
end
defp maybe_track_title_change(%User{}, %Task{}, %Task{}), do: :nothing
@spec maybe_track_close(User.t, Task.t, Task.t) :: any
defp maybe_track_close(
%User{id: user_id},
%Task{status: "closed"} = task,
%Task{status: "open"}) do
user_id |> SegmentTracker.track("Closed Task", task)
end
defp maybe_track_close(%User{}, %Task{}, %Task{}), do: :nothing
@spec maybe_track_archive(User.t, Task.t, Task.t) :: any
defp maybe_track_archive(
%User{id: user_id},
%Task{archived: true} = task,
%Task{archived: false}) do
user_id |> SegmentTracker.track("Archived Task", task)
end
defp maybe_track_archive(%User{}, %Task{}, %Task{}), do: :nothing
end
<|start_filename|>test/lib/code_corps/stripe_service/adapters/stripe_connect_plan_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripeConnectPlanTest do
use ExUnit.Case, async: true
import CodeCorps.StripeService.Adapters.StripeConnectPlanAdapter, only: [to_params: 2]
timestamp = 1479472835
@stripe_connect_plan %Stripe.Plan{
id: "month",
amount: 5000,
created: timestamp,
currency: "usd",
interval: "month",
interval_count: 1,
livemode: false,
metadata: %{},
name: "Monthly subscription for Code Corps",
statement_descriptor: nil,
trial_period_days: nil
}
@local_map %{
"amount" => 5000,
"created" => timestamp,
"id_from_stripe" => "month",
"name" => "Monthly subscription for Code Corps"
}
describe "to_params/2" do
test "converts from stripe map to local properly" do
test_attributes = %{
"project_id" => 123,
"foo" => "bar"
}
expected_attributes = %{
"project_id" => 123,
}
{:ok, result} = to_params(@stripe_connect_plan, test_attributes)
expected_map = Map.merge(@local_map, expected_attributes)
assert result == expected_map
end
end
end
<|start_filename|>test/lib/code_corps/model/project_test.exs<|end_filename|>
defmodule CodeCorps.ProjectTest do
use CodeCorps.ModelCase
import CodeCorps.Project
alias CodeCorps.{Project, ProjectUser, Repo}
describe "changeset" do
@valid_attrs %{title: "A title"}
@invalid_attrs %{}
test "with valid attributes" do
changeset = Project.changeset(%Project{}, @valid_attrs)
assert changeset.valid?
end
test "with invalid attributes" do
changeset = Project.changeset(%Project{}, @invalid_attrs)
refute changeset.valid?
assert_error_message(changeset, :title, "can't be blank")
end
test "with long_description_markdown renders long_description_body" do
changeset = Project.changeset(%Project{}, @valid_attrs |> Map.merge(%{long_description_markdown: "Something"}))
assert changeset |> fetch_change(:long_description_body) == {:ok, "<p>Something</p>\n"}
end
test "without long_description_markdown doesn't render long_description_body" do
changeset = Project.changeset(%Project{}, @valid_attrs)
assert changeset |> fetch_change(:long_description_body) == :error
end
test "generates slug from title" do
changeset = Project.changeset(%Project{}, @valid_attrs)
assert changeset |> get_change(:slug) == "a-title"
end
test "validates slug is unique" do
project = insert(:project, slug: "used-slug")
changeset = Project.changeset(%Project{organization_id: project.organization_id}, %{title: "Used Slug"})
{_, changeset} = Repo.insert(changeset)
assert_error_message(changeset, :slug, "has already been taken")
end
test "requires :website to be in proper format" do
project = %Project{}
attrs = %{website: "bad <> website"}
changeset = Project.changeset(project, attrs)
assert_error_message(changeset, :website, "has invalid format")
end
test "doesn't require :website to be part of the changes" do
project = %Project{}
attrs = %{}
changeset = Project.changeset(project, attrs)
refute Keyword.has_key?(changeset.errors, :website)
end
test "prefixes website with 'http://' if there is no prefix" do
project = %Project{website: "https://first.com"}
attrs = %{website: "example.com"}
changeset = Project.changeset(project, attrs)
assert changeset.changes.website == "http://example.com"
end
test "doesn't make a change to the url when there is no param for it" do
project = %Project{website: "https://first.com"}
attrs = %{}
changeset = Project.changeset(project, attrs)
refute Map.has_key?(changeset.changes, :website)
end
end
describe "create_changeset/3" do
test "with valid attributes" do
category = insert(:category)
skill = insert(:skill)
organization = insert(:organization)
attrs = %{
"categories_ids" => [category.id |> Integer.to_string()],
"cloudinary_public_id" => "foo123",
"description" => "Description",
"skills_ids" => [skill.id |> Integer.to_string()],
"title" => "A title",
"organization_id" => organization.id
}
changeset = create_changeset(%Project{}, attrs)
assert changeset.valid?
end
test "with invalid attributes" do
changeset = create_changeset(%Project{}, %{})
refute changeset.valid?
end
test "casts :organization_id and ensures organization exists" do
category = insert(:category)
skill = insert(:skill)
attrs = %{
"categories_ids" => [category.id |> Integer.to_string()],
"cloudinary_public_id" => "foo123",
"description" => "Description",
"skills_ids" => [skill.id |> Integer.to_string()],
"title" => "A title",
"organization_id" => -1
}
changeset = create_changeset(%Project{}, attrs)
assert {:error, failed_insert_changeset} = changeset |> Repo.insert()
refute failed_insert_changeset.valid?
assert error_message(failed_insert_changeset, :organization) == "does not exist"
end
test "casts and inserts proper associated records" do
category = insert(:category)
skill = insert(:skill)
organization = insert(:organization)
attrs = %{
"categories_ids" => [category.id |> Integer.to_string()],
"cloudinary_public_id" => "foo123",
"description" => "Description",
"skills_ids" => [skill.id |> Integer.to_string()],
"title" => "A title",
"organization_id" => organization.id
}
changeset = Project.create_changeset(%Project{}, attrs)
{_, project} = Repo.insert(changeset)
task_list_orders = for task_list <- project.task_lists, do: task_list.order
assert Enum.all?(task_list_orders), "some of the orders are not set (nil)"
assert task_list_orders == Enum.sort(task_list_orders), "task lists order does not correspond to their position"
project_user = Repo.one(ProjectUser)
assert project_user.project_id == project.id
assert project_user.user_id == organization.owner_id
assert project_user.role == "owner"
end
end
describe "update_changeset" do
test "rejects setting of organization id" do
changeset = Project.update_changeset(%Project{}, %{organization_id: 1})
assert :error == changeset |> fetch_change(:organization_id)
end
end
end
<|start_filename|>config/test.exs<|end_filename|>
use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :code_corps, CodeCorpsWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :code_corps, CodeCorps.Repo,
adapter: Ecto.Adapters.Postgres,
username: System.get_env("DATABASE_POSTGRESQL_USERNAME") || "postgres",
password: System.get_env("DATABASE_POSTGRESQL_PASSWORD") || "postgres",
hostname: System.get_env("DATABASE_POSTGRESQL_HOST") || "localhost",
database: "code_corps_phoenix_test",
pool: Ecto.Adapters.SQL.Sandbox
config :code_corps, site_url: "http://localhost:4200"
# speed up password hashing
config :comeonin, :bcrypt_log_rounds, 4
# CORS allowed origins
config :code_corps, allowed_origins: ["http://localhost:4200"]
config :code_corps, CodeCorps.Guardian,
secret_key: "e62fb6e2746f6b1bf8b5b735ba816c2eae1d5d76e64f18f3fc647e308b0c159e"
config :code_corps, :analytics, CodeCorps.Analytics.TestAPI
# Configures stripe for test mode
config :code_corps, :stripe, CodeCorps.StripeTesting
config :code_corps, :stripe_env, :test
config :code_corps, :icon_color_generator, CodeCorps.RandomIconColor.TestGenerator
# Set Corsica logging to output no console warning when rejecting a request
config :code_corps, :corsica_log_level, [rejected: :debug]
# fall back to sample pem if none is available as an ENV variable
pem = case System.get_env("GITHUB_TEST_APP_PEM") do
nil -> "./test/fixtures/github/app.pem" |> File.read!
encoded_pem -> encoded_pem |> Base.decode64!
end
config :code_corps,
github: CodeCorps.GitHub.SuccessAPI,
github_app_id: System.get_env("GITHUB_TEST_APP_ID"),
github_app_client_id: System.get_env("GITHUB_TEST_APP_CLIENT_ID"),
github_app_client_secret: System.get_env("GITHUB_TEST_APP_CLIENT_SECRET"),
github_app_pem: pem
config :sentry,
environment_name: Mix.env || :test
config :code_corps, :sentry, CodeCorps.Sentry.Sync
config :code_corps, :processor, CodeCorps.Processor.Sync
config :code_corps, CodeCorps.Mailer,
adapter: Bamboo.TestAdapter
config :code_corps,
postmark_forgot_password_template: "123",
postmark_message_initiated_by_project_template: "123",
postmark_organization_invite_email_template: "123",
postmark_project_approval_request_template: "123",
postmark_project_approved_template: "123",
postmark_project_user_acceptance_template: "123",
postmark_project_user_request_template: "123",
postmark_receipt_template: "123",
postmark_reply_to_conversation_template: "123"
config :code_corps, :cloudex, CloudexTest
config :cloudex, api_key: "test_key", secret: "test_secret", cloud_name: "test_cloud_name"
<|start_filename|>lib/code_corps_web/views/project_view.ex<|end_filename|>
defmodule CodeCorpsWeb.ProjectView do
@moduledoc false
alias CodeCorps.StripeService.Validators.ProjectCanEnableDonations
alias CodeCorps.Presenters.ImagePresenter
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [
:approval_requested,
:approved,
:can_activate_donations,
:cloudinary_public_id,
:description,
:donations_active,
:icon_thumb_url,
:icon_large_url,
:inserted_at,
:long_description_body,
:long_description_markdown,
:should_link_externally,
:slug,
:title,
:total_monthly_donated,
:updated_at,
:website
]
has_one :organization, type: "organization", field: :organization_id
has_one :stripe_connect_plan, serializer: CodeCorpsWeb.StripeConnectPlanView
has_many :categories, serializer: CodeCorpsWeb.CategoryView, identifiers: :always
has_many :donation_goals, serializer: CodeCorpsWeb.DonationGoalView, identifiers: :always
has_many :github_repos, serializer: CodeCorpsWeb.GithubRepoView, identifiers: :always
has_many :project_categories, serializer: CodeCorpsWeb.ProjectCategoryView, identifiers: :always
has_many :project_skills, serializer: CodeCorpsWeb.ProjectSkillView, identifiers: :always
has_many :project_users, serializer: CodeCorpsWeb.ProjectUserView, identifiers: :always
has_many :skills, serializer: CodeCorpsWeb.SkillView, identifiers: :always
has_many :tasks, serializer: CodeCorpsWeb.TaskView, identifiers: :always
has_many :task_lists, serializer: CodeCorpsWeb.TaskListView, identifiers: :always
def can_activate_donations(project, _conn) do
case ProjectCanEnableDonations.validate(project) do
{:ok, _} -> true
{:error, _} -> false
end
end
def donations_active(project, _conn) do
Enum.any?(project.donation_goals) && project.stripe_connect_plan != nil
end
def icon_large_url(project, _conn), do: ImagePresenter.large(project)
def icon_thumb_url(project, _conn), do: ImagePresenter.thumbnail(project)
end
<|start_filename|>lib/code_corps/accounts/changesets.ex<|end_filename|>
defmodule CodeCorps.Accounts.Changesets do
@moduledoc ~S"""
Changesets for Code Corps accounts.
"""
import CodeCorpsWeb.Gettext
alias CodeCorps.GitHub.Adapters
alias CodeCorps.Helpers.RandomIconColor
alias Ecto.Changeset
@doc ~S"""
Casts a changeset used for creating a user account from a GitHub user payload
"""
@spec create_from_github_changeset(struct, map) :: Changeset.t
def create_from_github_changeset(struct, %{} = params) do
params =
params
|> Adapters.User.to_user()
|> Enum.reject(fn {_, v} -> is_nil(v) end)
|> Map.new()
struct
|> Changeset.change(params)
|> Changeset.put_change(:sign_up_context, "github")
|> Changeset.validate_inclusion(:type, ["bot", "user"])
|> RandomIconColor.generate_icon_color(:default_color)
|> Changeset.unique_constraint(:email)
|> Changeset.assoc_constraint(:github_user)
|> unique_github_constraint()
end
@doc ~S"""
Casts a changeset used for updating a user account from a GitHub user payload
"""
@spec update_with_github_user_changeset(struct, map) :: Changeset.t
def update_with_github_user_changeset(struct, %{} = params) do
struct
|> Changeset.cast(params, [:github_avatar_url, :github_id, :github_username, :type])
|> ensure_email_without_overwriting(params)
|> Changeset.validate_required([:github_avatar_url, :github_id, :github_username, :type])
|> Changeset.unique_constraint(:email)
|> unique_github_constraint()
end
@doc ~S"""
Casts a changeset used for updating a user account from a GitHub OAuth payload
"""
@spec update_from_github_oauth_changeset(struct, map) :: Changeset.t
def update_from_github_oauth_changeset(struct, %{} = params) do
struct
|> Changeset.cast(params, [:github_auth_token, :github_avatar_url, :github_id, :github_username, :type])
|> ensure_email_without_overwriting(params)
|> Changeset.validate_required([:github_auth_token, :github_avatar_url, :github_id, :github_username, :type])
|> Changeset.unique_constraint(:email)
|> unique_github_constraint()
end
def dissociate_github_user_changeset(struct, %{} = params) do
struct
|> Changeset.cast(params, [:github_id, :github_id_was])
|> Changeset.unique_constraint(:github_id_was)
|> unique_github_constraint()
end
@spec ensure_email_without_overwriting(Changeset.t, map) :: Changeset.t
defp ensure_email_without_overwriting(%Changeset{} = changeset, %{"email" => new_email} = _params) do
case changeset |> Changeset.get_field(:email) do
nil -> changeset |> Changeset.put_change(:email, new_email)
_email -> changeset
end
end
defp ensure_email_without_overwriting(%Changeset{} = changeset, _params), do: changeset
defp unique_github_constraint(struct) do
struct
|> Changeset.unique_constraint(:github_id, message: dgettext("errors", "account is already connected to someone else"))
end
end
<|start_filename|>lib/code_corps_web/views/task_list_view.ex<|end_filename|>
defmodule CodeCorpsWeb.TaskListView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:done, :inbox, :name, :order, :pull_requests, :inserted_at, :updated_at]
has_one :project, type: "project", field: :project_id
has_many :tasks, serializer: CodeCorpsWeb.TaskView, identifiers: :always
end
<|start_filename|>lib/code_corps/messages/conversations.ex<|end_filename|>
defmodule CodeCorps.Messages.Conversations do
@moduledoc ~S"""
Subcontext aimed at managing `CodeCorps.Conversation` records aimed at a
specific user belonging to a `CodeCorps.Message`.
"""
alias Ecto.Changeset
alias CodeCorps.{Conversation}
@doc false
@spec create_changeset(Conversation.t, map) :: Ecto.Changeset.t
def create_changeset(%Conversation{} = conversation, %{} = attrs) do
conversation
|> Changeset.cast(attrs, [:user_id])
|> Changeset.validate_required([:user_id])
|> Changeset.assoc_constraint(:user)
end
@doc false
@spec part_added_changeset(Conversation.t) :: Ecto.Changeset.t
def part_added_changeset(%Conversation{} = conversation) do
params = %{
status: "open",
updated_at: Ecto.DateTime.utc()
}
conversation
|> Changeset.cast(params, [:status, :updated_at])
end
end
<|start_filename|>lib/code_corps/skills/skills.ex<|end_filename|>
defmodule CodeCorps.Skills do
@moduledoc ~S"""
Work with skills.
"""
alias CodeCorps.{
Repo,
Skill,
UserSkill
}
import Ecto.Query
@doc """
Find the most popular skills, in order, with a limit.
"""
@spec popular(map) :: [Skill.t]
def popular(params \\ %{})
def popular(%{"limit" => limit}), do: limit |> Integer.parse() |> apply_limit()
def popular(_), do: do_popular()
defp apply_limit({limit, _rem}) when limit <= 100, do: do_popular(limit)
defp apply_limit(_), do: do_popular()
@spec do_popular(pos_integer) :: [Skill.t]
def do_popular(limit \\ 10) do
query =
from s in Skill,
join: us in UserSkill,
on: s.id == us.skill_id,
group_by: s.id,
order_by: [desc: count(us.skill_id)],
limit: ^limit
query
|> Repo.all()
end
end
<|start_filename|>lib/code_corps_web/plugs/current_user.ex<|end_filename|>
defmodule CodeCorpsWeb.Plug.CurrentUser do
@moduledoc """
Puts authenticated Guardian user into conn.assigns[:current_user]
"""
@spec init(Keyword.t) :: Keyword.t
def init(opts), do: opts
@spec call(Plug.Conn.t, Keyword.t) :: Plug.Conn.t
def call(conn, _opts) do
case Guardian.Plug.current_resource(conn) do
user = %CodeCorps.User{} ->
Plug.Conn.assign(conn, :current_user, user)
nil ->
conn
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/user_category_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.UserCategoryControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :user_category
describe "index" do
test "lists all entries on index", %{conn: conn} do
[user_category_1, user_category_2] = insert_pair(:user_category)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([user_category_1.id, user_category_2.id])
end
test "filters resources on index", %{conn: conn} do
[user_category_1, user_category_2 | _] = insert_list(3, :user_category)
path = "user-categories/?filter[id]=#{user_category_1.id},#{user_category_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([user_category_1.id, user_category_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
user_category = insert(:user_category)
conn
|> request_show(user_category)
|> json_response(200)
|> assert_id_from_response(user_category.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
category = insert(:category)
attrs = (%{user: current_user, category: category})
assert conn |> request_create(attrs) |> json_response(201)
user_id = current_user.id
tracking_properties = %{
category: category.name,
category_id: category.id
}
assert_received {:track, ^user_id, "Added User Category", ^tracking_properties}
end
@tag :authenticated
test "renders 422 when data is invalid", %{conn: conn, current_user: current_user} do
category = build(:category)
invalid_attrs = %{category: category, user: current_user}
assert conn |> request_create(invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
category = insert(:category)
user = insert(:user)
attrs = %{category: category, user: user}
assert conn |> request_create(attrs) |> json_response(403)
end
end
describe "delete" do
@tag authenticated: :admin
test "deletes resource", %{conn: conn, current_user: current_user} do
user_category = insert(:user_category)
assert conn |> request_delete(user_category.id) |> response(204)
user_id = current_user.id
tracking_properties = %{
category: user_category.category.name,
category_id: user_category.category.id
}
assert_received {:track, ^user_id, "Removed User Category", ^tracking_properties}
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent on delete", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>test/lib/code_corps/emails/reply_to_conversation_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.ReplyToConversationEmailTest do
use CodeCorps.DbAccessCase
use Bamboo.Test
alias CodeCorps.Emails.ReplyToConversationEmail
test "email works" do
message = insert(:message)
preloads = [:author, conversation: [message: [[project: :organization]]]]
conversation = insert(:conversation, message: message)
conversation_part =
:conversation_part
|> insert(conversation: conversation)
|> Repo.preload(preloads)
%{project: %{organization: %{slug: slug}, slug: project_slug} = project} = message
user = insert(:user)
email = ReplyToConversationEmail.create(conversation_part, user)
assert email.from == "Code Corps<<EMAIL>>"
assert email.to == user.email
template_model = email.private.template_model
assert template_model == %{
author_name: conversation_part.author.first_name,
conversation_url: "http://localhost:4200/#{slug}/#{project_slug}/conversations/#{conversation.id}",
name: user.first_name,
project_title: project.title,
subject: "#{conversation_part.author.first_name} replied to your conversation in #{project.title}"
}
end
end
<|start_filename|>lib/code_corps_web/controllers/category_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.CategoryController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Category, Repo, User, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
categories = Category |> Query.id_filter(params) |> Repo.all |> preload()
conn |> render("index.json-api", data: categories)
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %Category{} = category <- Category |> Repo.get(id) |> preload() do
conn |> render("show.json-api", data: category)
end
end
@spec create(Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %Category{}, params),
{:ok, %Category{} = category} <- %Category{} |> Category.create_changeset(params) |> Repo.insert,
category <- preload(category)
do
conn |> put_status(:created) |> render("show.json-api", data: category)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %Category{} = category <- Category |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, category),
{:ok, %Category{} = category} <- category |> Category.changeset(params) |> Repo.update,
category <- preload(category)
do
conn |> render("show.json-api", data: category)
end
end
@preloads [:project_categories]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>test/lib/code_corps_web/controllers/organization_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.OrganizationControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :organization
@valid_attrs %{
cloudinary_public_id: "foo",
description: "Build a better future.",
name: "<NAME>"
}
@invalid_attrs %{name: ""}
describe "index" do
test "lists all entries on index", %{conn: conn} do
[organization_1, organization_2] = insert_pair(:organization)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([organization_1.id, organization_2.id])
end
test "filters resources on index", %{conn: conn} do
[organization_1, organization_2 | _] = insert_list(3, :organization)
path = "organizations/?filter[id]=#{organization_1.id},#{organization_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([organization_1.id, organization_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
organization = insert(:organization)
conn
|> request_show(organization)
|> json_response(200)
|> assert_id_from_response(organization.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid and invite exists", %{conn: conn, current_user: user} do
insert(:organization_invite, code: "valid")
attrs = Map.merge(@valid_attrs, %{owner: user, invite_code: "valid"})
assert conn |> request_create(attrs) |> json_response(201)
end
@tag :authenticated
test "renders 403 when data is valid but invite does not exist", %{conn: conn, current_user: user} do
attrs = Map.merge(@valid_attrs, %{owner: user, invite_code: "invalid"})
assert conn |> request_create(attrs) |> json_response(403)
end
@tag authenticated: :admin
test "renders 422 when data is invalid", %{conn: conn} do
assert conn |> request_create(@invalid_attrs) |> json_response(422)
end
@tag authenticated: :admin
test "creates and renders resource when data is valid and user is admin", %{conn: conn, current_user: user} do
attrs = Map.merge(@valid_attrs, %{owner: user})
assert conn |> request_create(attrs) |> json_response(201)
end
test "renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when data is valid, but no invite and user not admin", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "update" do
@tag authenticated: :admin
test "updates and renders chosen resource when data is valid", %{conn: conn} do
assert conn |> request_update(@valid_attrs) |> json_response(200)
end
@tag authenticated: :admin
test "renders 422 when data is invalid", %{conn: conn} do
assert conn |> request_update(@invalid_attrs) |> json_response(422)
end
test "renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_update |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_update(:not_found) |> json_response(404)
end
end
end
<|start_filename|>lib/code_corps_web/views/task_skill_view.ex<|end_filename|>
defmodule CodeCorpsWeb.TaskSkillView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
has_one :task, type: "task", field: :task_id
has_one :skill, type: "skill", field: :skill_id
end
<|start_filename|>test/lib/code_corps/github/api/api_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.APITest do
@moduledoc false
use ExUnit.Case
alias CodeCorps.GitHub.{
API, API.Errors.PaginationError, APIError, HTTPClientError}
describe "request/5" do
defmodule MockAPI do
def request(_method, "/error", _body, _headers, _opts) do
{:error, %HTTPoison.Error{reason: "Intentional Test Error"}}
end
def request(_method, url, _body, _headers, _opts) do
response =
%HTTPoison.Response{}
|> Map.put(:body, url |> body())
|> Map.put(:request_url, url)
|> Map.put(:status_code, url |> code)
{:ok, response}
end
defp body("/200"), do: %{"bar" => "baz"} |> Poison.encode!
defp body("/200-bad"), do: "bad"
defp body("/400"), do: %{"message" => "baz"} |> Poison.encode!
defp body("/400-bad"), do: "bad"
defp body("/404"), do: %{"message" => "Not Found"} |> Poison.encode!
defp code("/200" <> _rest), do: 200
defp code("/400" <> _rest), do: 400
defp code("/404" <> _rest), do: 404
end
setup do
old_mock = Application.get_env(:code_corps, :github)
Application.put_env(:code_corps, :github, MockAPI)
on_exit fn ->
Application.put_env(:code_corps, :github, old_mock)
end
:ok
end
test "handles a 200..299 response" do
{:ok, response} = API.request(:get, "/200", %{}, [], [])
assert response == %{"bar" => "baz"}
end
test "handles a decode error for a 200..299 response" do
{:error, response} = API.request(:get, "/200-bad", %{}, [], [])
assert response == HTTPClientError.new([reason: :body_decoding_error])
end
test "handles a 404 response" do
{:error, response} = API.request(:get, "/404", %{}, [], [])
assert response ==
APIError.new({404, %{"message" => "{\"message\":\"Not Found\"}"}})
end
test "handles a 400 response" do
{:error, response} = API.request(:get, "/400", %{}, [], [])
assert response == APIError.new({400, %{"message" => "baz"}})
end
test "handles a decode error for a 400..599 response" do
{:error, response} = API.request(:get, "/400-bad", %{}, [], [])
assert response == HTTPClientError.new([reason: :body_decoding_error])
end
test "handles a client error" do
{:error, %HTTPClientError{reason: reason}} =
API.request(:get, "/error", %{}, [], [])
assert reason == "Intentional Test Error"
end
end
describe "get_all/3" do
defmodule MockPaginationAPI do
def request(:head, "/one-page", _body, _headers, _opts) do
{:ok, %HTTPoison.Response{body: "", headers: [], status_code: 200}}
end
def request(:get, "/one-page", _body, _headers, [params: [page: 1]]) do
body = [1] |> Poison.encode!
{:ok, %HTTPoison.Response{body: body, status_code: 200}}
end
def request(:head, "/two-pages", _body, _headers, _opts) do
next = '<two-pages?page=2>; rel="next"'
last = '<two-pages?page=2>; rel="last"'
headers = [{"Link", [next, last] |> Enum.join(", ")}]
{:ok, %HTTPoison.Response{body: "", headers: headers, status_code: 200}}
end
def request(:get, "/two-pages", _body, _headers, [params: [page: 1]]) do
body = [1, 2] |> Poison.encode!
{:ok, %HTTPoison.Response{body: body, status_code: 200}}
end
def request(:get, "/two-pages", _body, _headers, [params: [page: 2]]) do
body = [3] |> Poison.encode!
{:ok, %HTTPoison.Response{body: body, status_code: 200}}
end
def request(:head, "/pages-with-errors", _body, _headers, _opts) do
next = '<three-pages-with-errors?page=2>; rel="next"'
last = '<three-pages-with-errors?page=4>; rel="last"'
headers = [{"Link", [next, last] |> Enum.join(", ")}]
{:ok, %HTTPoison.Response{body: "", headers: headers, status_code: 200}}
end
def request(:get, "/pages-with-errors", _body, _headers, [params: [page: 1]]) do
body = [1, 2] |> Poison.encode!
{:ok, %HTTPoison.Response{body: body, status_code: 200}}
end
def request(:get, "/pages-with-errors", _body, _headers, [params: [page: 2]]) do
{:error, %HTTPoison.Error{reason: "Test Client Error"}}
end
def request(:get, "/pages-with-errors", _body, _headers, [params: [page: 3]]) do
body = %{"message" => "Test API Error"}
{:ok, %HTTPoison.Response{body: body |> Poison.encode!, status_code: 400}}
end
def request(:get, "/pages-with-errors", _body, _headers, [params: [page: 4]]) do
errors = [
%{"code" => 1, "field" => "foo", "resource" => "/foo"},
%{"code" => 2, "field" => "bar", "resource" => "/bar"}
]
body = %{"message" => "Test API Error", "errors" => errors}
{:ok, %HTTPoison.Response{body: body |> Poison.encode!, status_code: 400}}
end
def request(:head, "/head-client-error", _body, _headers, _opts) do
{:error, %HTTPoison.Error{reason: "Test Client Error"}}
end
def request(:head, "/head-api-error", _body, _headers, _opts) do
{:ok, %HTTPoison.Response{body: "", status_code: 400}}
end
end
setup do
old_mock = Application.get_env(:code_corps, :github)
Application.put_env(:code_corps, :github, MockPaginationAPI)
on_exit fn ->
Application.put_env(:code_corps, :github, old_mock)
end
:ok
end
test "works when there's just one page" do
assert {:ok, [1]} == API.get_all("/one-page", [], [])
end
test "works with multiple pages" do
assert {:ok, [1, 2, 3]} == API.get_all("/two-pages", [], [])
end
test "fails properly when pages respond in errors" do
{:error, %PaginationError{} = error} =
API.get_all("/pages-with-errors", [], [])
assert error.retrieved_pages |> Enum.count == 1
assert error.api_errors |> Enum.count == 2
assert error.client_errors |> Enum.count == 1
end
test "fails properly when initial head request fails with a client error" do
{:error, %HTTPClientError{} = error} = API.get_all("/head-client-error", [], [])
assert error
end
test "fails properly when initial head request fails with an api error" do
{:error, %APIError{} = error} = API.get_all("/head-api-error", [], [])
assert error
end
end
end
<|start_filename|>priv/repo/migrations/20171031234356_create_github_users.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.CreateGithubUsers do
use Ecto.Migration
def change do
create table(:github_users) do
add :avatar_url, :string
add :email, :string
add :github_id, :integer
add :username, :string
add :type, :string
timestamps()
end
create index(:github_users, [:github_id], unique: true)
alter table(:users) do
add :github_user_id, references(:github_users, on_delete: :nothing)
end
create index(:users, [:github_user_id], unique: true)
alter table(:github_issues) do
add :github_user_id, references(:github_users, on_delete: :nothing)
end
create index(:github_issues, [:github_user_id])
alter table(:github_pull_requests) do
add :github_user_id, references(:github_users, on_delete: :nothing)
end
create index(:github_pull_requests, [:github_user_id])
alter table(:github_comments) do
add :github_user_id, references(:github_users, on_delete: :nothing)
end
create index(:github_comments, [:github_user_id])
end
end
<|start_filename|>lib/code_corps/policy/stripe_connect_plan.ex<|end_filename|>
defmodule CodeCorps.Policy.StripeConnectPlan do
import CodeCorps.Policy.Helpers, only: [get_project: 1, owned_by?: 2]
alias CodeCorps.{StripeConnectPlan, User}
@spec show?(User.t, StripeConnectPlan.t) :: boolean
def show?(%User{} = user, %StripeConnectPlan{} = plan) do
plan |> get_project |> owned_by?(user)
end
@spec create?(User.t, map) :: boolean
def create?(%User{} = user, %{} = params) do
params |> get_project |> owned_by?(user)
end
end
<|start_filename|>test/support/api_case.ex<|end_filename|>
defmodule CodeCorpsWeb.ApiCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection, specificaly,
those working with the API endpoints.
It's basically a clone of CodeCorpsWeb.ConnCase, with some extras,
mainly authentication and proper headers, added.
If provided with a :resource_name option, it dynamically
generates higher level request helper methods
## Examples
use ApiCase, resource_name: :task
use ApiCase, resource_name: :comment
"""
import CodeCorps.Factories
use ExUnit.CaseTemplate
use Phoenix.ConnTest
using(opts) do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias CodeCorps.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import CodeCorps.AuthenticationTestHelpers
import CodeCorpsWeb.Router.Helpers
import CodeCorps.Factories
import CodeCorps.TestHelpers
# The default endpoint for testing
@endpoint CodeCorpsWeb.Endpoint
CodeCorpsWeb.ApiCase.define_request_helper_methods(unquote(opts))
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(CodeCorps.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(CodeCorps.Repo, {:shared, self()})
end
conn = cond do
tags[:github_webhook] ->
%{build_conn() | host: "api."}
|> put_req_header("accept", "application/json")
|> put_req_header("content-type", "application/json")
true ->
%{build_conn() | host: "api."}
|> put_req_header("accept", "application/vnd.api+json")
|> put_req_header("content-type", "application/vnd.api+json")
end
{conn, current_user} = cond do
tags[:authenticated] ->
conn |> add_authentication_headers(tags[:authenticated])
true ->
{conn, nil}
end
{:ok, conn: conn, current_user: current_user}
end
defp add_authentication_headers(conn, true) do
user = insert(:user)
conn = conn |> CodeCorps.AuthenticationTestHelpers.authenticate(user)
{conn, user}
end
defp add_authentication_headers(conn, :admin) do
admin = insert(:user, admin: true)
conn = conn |> CodeCorps.AuthenticationTestHelpers.authenticate(admin)
{conn, admin}
end
defmacro define_request_helper_methods(resource_name: resource_name), do: do_add_request_helper_methods(resource_name)
defmacro define_request_helper_methods(_), do: nil
defp do_add_request_helper_methods(resource_name) do
quote do
defp factory_name, do: unquote(resource_name)
defp path_helper_method, do: "#{unquote(resource_name)}_path" |> String.to_atom
defp default_record, do: insert(unquote(resource_name))
defp path_for(conn, action, resource_or_id) do
apply(CodeCorpsWeb.Router.Helpers, path_helper_method(), [conn, action, resource_or_id])
end
defp path_for(conn, action) do
apply(CodeCorpsWeb.Router.Helpers, path_helper_method(), [conn, action])
end
def request_index(conn) do
path = conn |> path_for(:index)
conn |> get(path)
end
def request_show(conn, :not_found), do: conn |> request_show(-1)
def request_show(conn, resource_or_id) do
path = conn |> path_for(:show, resource_or_id)
conn |> get(path)
end
def request_create(conn, attrs \\ %{}) do
path = conn |> path_for(:create)
payload = CodeCorps.JsonAPIHelpers.build_json_payload(attrs)
conn |> post(path, payload)
end
def request_update(conn), do: request_update(conn, %{})
def request_update(conn, :not_found), do: request_update(conn, -1, %{})
def request_update(conn, attrs), do: request_update(conn, default_record(), attrs)
def request_update(conn, resource_or_id, attrs) do
payload = CodeCorps.JsonAPIHelpers.build_json_payload(attrs)
path = conn |> path_for(:update, resource_or_id)
conn |> put(path, payload)
end
def request_delete(conn), do: request_delete(conn, default_record())
def request_delete(conn, :not_found), do: request_delete(conn, -1)
def request_delete(conn, resource_or_id) do
path = conn |> path_for(:delete, resource_or_id)
conn |> delete(path)
end
end
end
end
<|start_filename|>lib/code_corps_web/controllers/stripe_connect_plan_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectPlanController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{StripeConnectPlan, User}
alias CodeCorps.StripeService.StripeConnectPlanService
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
%StripeConnectPlan{} = stripe_platform_plan <- StripeConnectPlan |> Repo.get(id),
{:ok, :authorized} <- current_user |> Policy.authorize(:show, stripe_platform_plan, params) do
conn |> render("show.json-api", data: stripe_platform_plan)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %StripeConnectPlan{}, params),
{:ok, %StripeConnectPlan{} = stripe_platform_plan} <- StripeConnectPlanService.create(params) |> handle_create_result(conn) do
conn |> put_status(:created) |> render("show.json-api", data: stripe_platform_plan)
end
end
defp handle_create_result({:error, :project_not_ready}, conn) do
conn
|> put_status(422)
|> render(CodeCorpsWeb.ErrorView, "422.json-api", %{})
end
defp handle_create_result(other, _conn), do: other
end
<|start_filename|>test/lib/code_corps/github/sync/task/changeset_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Task.ChangesetTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.GitHub.Sync.Task
alias Ecto.Changeset
describe "create_changeset/4" do
test "assigns proper changes to the task" do
github_issue = insert(
:github_issue,
github_created_at: DateTime.utc_now |> Timex.shift(minutes: 1),
github_updated_at: DateTime.utc_now |> Timex.shift(hours: 1))
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, inbox: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:created_at) == github_issue.github_created_at
assert changeset |> Changeset.get_change(:markdown) == github_issue.body
assert changeset |> Changeset.get_change(:modified_at) == github_issue.github_updated_at
assert changeset |> Changeset.get_change(:title) == github_issue.title
assert changeset |> Changeset.get_field(:status) == github_issue.state
assert changeset |> Changeset.get_change(:created_from) == "github"
assert changeset |> Changeset.get_change(:modified_from) == "github"
assert changeset |> Changeset.get_change(:github_issue) |> Map.get(:data) == github_issue
assert changeset |> Changeset.get_change(:github_repo) |> Map.get(:data) == github_repo
assert changeset |> Changeset.get_change(:project_id) == github_repo.project_id
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
assert changeset |> Changeset.get_change(:user) |> Map.get(:data) == user
assert changeset |> Changeset.get_change(:position)
assert changeset |> Changeset.get_field(:archived) == false
expected_body =
github_issue.body
|> Earmark.as_html!(%Earmark.Options{code_class_prefix: "language-"})
assert Changeset.get_change(changeset, :body) == expected_body
assert changeset.valid?
end
test "assigns task to inbox list if github issue is open" do
github_issue = insert(:github_issue, state: "open")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, inbox: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to pull request list if github issue is associated with pull request" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request, state: "open")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, pull_requests: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to 'done' list if github issue is closed" do
github_issue = insert(:github_issue, state: "closed")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, done: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to 'done' list if github issue is closed and associated to pull request" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request, state: "closed")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, done: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "archives task and removes from task list if issue is closed and unmodified for over a month" do
over_a_month_ago = Timex.now |> Timex.shift(days: -35)
github_pull_request = insert(:github_pull_request)
github_issue = insert(
:github_issue,
github_pull_request: github_pull_request,
state: "closed",
github_updated_at: over_a_month_ago)
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
insert(:task_list, project: project, done: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_field(:archived) == true
assert changeset |> Changeset.get_field(:task_list_id) == nil
end
test "does not fail and instead returns invalid changeset if no task list matched" do
github_issue = insert(
:github_issue,
github_created_at: DateTime.utc_now |> Timex.shift(minutes: 1),
github_updated_at: DateTime.utc_now |> Timex.shift(hours: 1))
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
refute changeset.valid?
end
end
describe "update_changeset/3" do
test "assigns proper changes to the task" do
github_issue = insert(
:github_issue,
github_created_at: DateTime.utc_now |> Timex.shift(minutes: 1),
github_updated_at: DateTime.utc_now |> Timex.shift(hours: 1))
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, inbox: true)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:markdown) == github_issue.body
assert changeset |> Changeset.get_change(:modified_at) == github_issue.github_updated_at
assert changeset |> Changeset.get_change(:title) == github_issue.title
assert changeset |> Changeset.get_field(:status) == github_issue.state
refute changeset |> Changeset.get_change(:created_from)
assert changeset |> Changeset.get_change(:modified_from) == "github"
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
assert changeset |> Changeset.get_change(:position)
assert changeset |> Changeset.get_field(:archived) == false
expected_body =
github_issue.body
|> Earmark.as_html!(%Earmark.Options{code_class_prefix: "language-"})
assert Changeset.get_change(changeset, :body) == expected_body
assert changeset.valid?
end
test "validates that modified_at has not already happened" do
project = insert(:project)
github_issue = insert(:github_issue, github_updated_at: DateTime.utc_now |> Timex.shift(minutes: -1), state: "open")
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
insert(:task_list, project: project, inbox: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
refute changeset.valid?
assert changeset.errors[:modified_at] == {"cannot be before the last recorded time", []}
end
test "assigns task to inbox list if github issue is open" do
github_issue = insert(:github_issue, state: "open")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
task_list = insert(:task_list, project: project, inbox: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to pull request list if github issue is associated with pull request" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request, state: "open")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
task_list = insert(:task_list, project: project, pull_requests: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to 'done' list if github issue is closed" do
github_issue = insert(:github_issue, state: "closed")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
task_list = insert(:task_list, project: project, done: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to 'done' list if github issue is closed and associated to pull request" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request, state: "closed")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
task_list = insert(:task_list, project: project, done: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "archives task and removes from task list if issue is closed and unmodified for over a month" do
over_a_month_ago = Timex.now |> Timex.shift(days: -35)
github_pull_request = insert(:github_pull_request)
github_issue = insert(
:github_issue,
github_pull_request: github_pull_request,
state: "closed",
github_updated_at: over_a_month_ago)
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
insert(:task_list, project: project, done: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_field(:archived) == true
assert changeset |> Changeset.get_field(:task_list_id) == nil
end
test "does not fail and instead returns invalid changeset if no task list matched" do
github_issue = insert(
:github_issue,
github_created_at: DateTime.utc_now |> Timex.shift(minutes: 1),
github_updated_at: DateTime.utc_now |> Timex.shift(hours: 1))
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
refute changeset.valid?
end
end
end
<|start_filename|>lib/code_corps/guardian.ex<|end_filename|>
defmodule CodeCorps.Guardian do
use Guardian, otp_app: :code_corps
alias CodeCorps.{Project, Repo, User}
def subject_for_token(project = %Project{}, _claims) do
{:ok, "Project:#{project.id}"}
end
def subject_for_token(user = %User{}, _claims) do
{:ok, "User:#{user.id}"}
end
def subject_for_token(_, _) do
{:error, :unknown_resource_type}
end
def resource_from_claims(%{"sub" => sub}), do: resource_from_subject(sub)
def resource_from_claims(_), do: {:error, :missing_subject}
defp resource_from_subject("Project:" <> id), do: {:ok, Repo.get(Project, id)}
defp resource_from_subject("User:" <> id), do: {:ok, Repo.get(User, id)}
defp resource_from_subject(_), do: {:error, :unknown_resource_type}
end
<|start_filename|>lib/code_corps/github/sync/user/user.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.User do
@moduledoc ~S"""
In charge of finding or creating a `User` given a `GithubUser`.
"""
import Ecto.Query
alias CodeCorps.{
Accounts,
GithubComment,
GithubIssue,
GithubRepo,
GithubUser,
GitHub.Utils.ResultAggregator,
Repo,
User
}
def sync_github_repo(%GithubRepo{} = github_repo) do
%GithubRepo{
github_comments: github_comments,
github_issues: github_issues
} = github_repo
comment_users = find_users_for_comments(github_comments)
issue_users = find_users_for_issues(github_issues)
comment_users
|> Enum.concat(issue_users)
|> Enum.uniq()
|> Enum.map(&create_or_update_user/1)
|> ResultAggregator.aggregate
end
defp find_users_for_comments(github_comments) do
github_comment_ids = Enum.map(github_comments, fn c -> c.id end)
query = from gu in GithubUser,
distinct: gu.id,
join: gc in GithubComment, on: gu.id == gc.github_user_id, where: gc.id in ^github_comment_ids
query |> Repo.all
end
defp find_users_for_issues(github_issues) do
github_issue_ids = Enum.map(github_issues, fn i -> i.id end)
query = from gu in GithubUser,
distinct: gu.id,
join: gi in GithubIssue, on: gu.id == gi.github_user_id, where: gi.id in ^github_issue_ids
query |> Repo.all
end
@doc ~S"""
Creates or updates a `User` given a `GithubUser`.
"""
@spec create_or_update_user(GithubUser.t) :: {:ok, User.t}
def create_or_update_user(%GithubUser{github_id: github_id} = github_user) do
case User |> Repo.get_by(github_id: github_id) |> Repo.preload([:github_user]) do
nil -> Accounts.create_from_github_user(github_user)
%User{} = user -> user |> Accounts.update_with_github_user(github_user)
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/project_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ProjectControllerTest do
@moduledoc false
use CodeCorpsWeb.ApiCase, resource_name: :project
use Bamboo.Test
alias CodeCorps.{Analytics.SegmentTraitsBuilder, Emails, Project, Repo}
@valid_attrs %{
cloudinary_public_id: "foo123",
description: "Valid description",
title: "Valid project"
}
@invalid_attrs %{title: ""}
describe "index" do
test "filters on index", %{conn: conn} do
[project_1, project_2] = insert_pair(:project, approved: true)
project_3 = insert(:project, approved: false)
path = "/projects?approved=true"
returned_ids =
conn
|> get(path)
|> json_response(200)
|> ids_from_response
assert project_1.id in returned_ids
assert project_2.id in returned_ids
refute project_3.id in returned_ids
end
test "lists all entries for organization specified by slug", %{conn: conn} do
organization_slug = "test-organization"
organization = insert(:organization, name: "Test Organization", slug: organization_slug)
insert(:slugged_route, organization: organization, slug: organization_slug)
[project_1, project_2] = insert_pair(:project, organization: organization)
project_3 = insert(:project)
path = ("/#{organization_slug}/projects")
returned_ids =
conn
|> get(path)
|> json_response(200)
|> ids_from_response
assert project_1.id in returned_ids
assert project_2.id in returned_ids
refute project_3.id in returned_ids
end
test "listing by organization slug is case insensitive", %{conn: conn} do
organization = insert(:organization)
insert(:slugged_route, slug: "codecorps", organization: organization)
assert conn |> get("/codeCorps/projects") |> json_response(200)
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
project = insert(:project)
conn
|> request_show(project)
|> json_response(200)
|> assert_id_from_response(project.id)
end
test "shows chosen resource retrieved by slug", %{conn: conn} do
organization = insert(:organization)
project = insert(:project, organization: organization)
path = "#{organization.slug}/#{project.slug}"
conn
|> get(path)
|> json_response(200)
|> assert_id_from_response(project.id)
end
test "retrieval by slug is case insensitive", %{conn: conn} do
organization = insert(:organization, slug: "codecorps")
insert(:project, slug: "codecorpsproject", organization: organization)
assert conn |> get("codeCorps/codeCorpsProject") |> json_response(200)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when attributes are valid", %{conn: conn, current_user: current_user} do
category = insert(:category)
organization = insert(:organization, owner: current_user)
skill = insert(:skill)
params = %{
categories: [category],
organization: organization,
skills: [skill]
}
attrs = @valid_attrs |> Map.merge(params)
response = conn |> request_create(attrs)
assert %{assigns: %{data: %{task_lists: [_inbox, _backlog, _in_progress, _done]}}} = response
assert response |> json_response(201)
user_id = current_user.id
traits = Project |> Repo.one() |> SegmentTraitsBuilder.build
assert_received {:track, ^user_id, "Created Project", ^traits}
end
@tag :authenticated
test "renders 422 when attributes are invalid", %{conn: conn, current_user: current_user} do
organization = insert(:organization, owner: current_user)
attrs = @invalid_attrs |> Map.merge(%{organization: organization})
assert conn |> request_create(attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
# Need the organization in order to access membership in the project policy
attrs = %{organization: insert(:organization)}
assert conn |> request_create(attrs) |> json_response(403)
end
end
describe "update" do
@tag :authenticated
test "updates and renders resource when attributes are valid", %{conn: conn, current_user: current_user} do
project = insert(:project, approval_requested: false)
insert(:project_user, project: project, user: current_user, role: "owner")
insert(:user, admin: true)
attrs = @valid_attrs |> Map.merge(%{approval_requested: true})
assert conn |> request_update(project, attrs) |> json_response(200)
project =
Project
|> Repo.get_by(approved: true)
|> Repo.preload([:organization])
email =
project
|> Emails.ProjectApprovalRequestEmail.create()
assert_delivered_email(email)
user_id = current_user.id
traits = project |> SegmentTraitsBuilder.build
assert_received {:track, ^user_id, "Requested Project Approval", ^traits}
end
@tag authenticated: :admin
test "sends the approved email when approved", %{conn: conn, current_user: current_user} do
project = insert(:project, approved: false)
insert(:project_user, project: project, role: "owner")
attrs = @valid_attrs |> Map.merge(%{approved: true})
assert conn |> request_update(project, attrs) |> json_response(200)
project =
Project
|> Repo.get_by(approved: true)
|> Repo.preload([:organization])
email =
project
|> Emails.ProjectApprovedEmail.create()
assert_delivered_email(email)
user_id = current_user.id
traits = project |> SegmentTraitsBuilder.build
assert_received {:track, ^user_id, "Approved Project", ^traits}
end
@tag :authenticated
test "renders errors when attributes are invalid", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
assert conn |> request_update(project, @invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
# Need the organization in order to access membership in the project policy
attrs = %{organization: insert(:organization)}
assert conn |> request_update(attrs) |> json_response(403)
end
@tag authenticated: :admin
test "renders 404 when not found", %{conn: conn} do
assert conn |> request_update(:not_found) |> json_response(404)
end
end
end
<|start_filename|>priv/repo/migrations/20171027061833_add_more_indexes_again.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddMoreIndexesAgain do
use Ecto.Migration
def change do
create index(:auth_token, [:value])
create index(:github_events, [:github_delivery_id], unique: true)
create index(:github_events, [:status])
create index(:stripe_external_accounts, [:id_from_stripe], unique: true)
create index(:stripe_file_upload, [:id_from_stripe], unique: true)
create index(:tasks, [:number])
create index(:tasks, [:order])
create index(:task_lists, [:inbox])
create index(:task_lists, [:order])
end
end
<|start_filename|>test/lib/code_corps/model/message_test.exs<|end_filename|>
defmodule CodeCorps.MessageTest do
use CodeCorps.ModelCase
alias CodeCorps.Message
@valid_admin_initiated_attrs %{
body: "Test body.",
initiated_by: "admin",
subject: "Test subject"
}
@valid_user_initiated_attrs %{
body: "Test body.",
initiated_by: "user"
}
@invalid_attrs %{}
describe "changeset" do
test "when initiated by an admin with valid attributes" do
changeset = Message.changeset(%Message{}, @valid_admin_initiated_attrs)
assert changeset.valid?
end
test "when initiated by an admin with invalid attributes" do
changeset = Message.changeset(%Message{}, @invalid_attrs)
refute changeset.valid?
end
test "when initiated by a user with valid attributes" do
changeset = Message.changeset(%Message{}, @valid_user_initiated_attrs)
assert changeset.valid?
end
test "when initiated by a user with invalid attributes" do
changeset = Message.changeset(%Message{}, @invalid_attrs)
refute changeset.valid?
end
test "when initiated by an unknown source" do
attrs = %{body: "Test body.", initiated_by: "invalid"}
changeset = Message.changeset(%Message{}, attrs)
refute changeset.valid?
end
end
end
<|start_filename|>lib/code_corps/github/adapters/user.ex<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.User do
@moduledoc """
Used to adapt a GitHub issue payload into attributes for creating or updating
a `CodeCorps.Task`.
"""
alias CodeCorps.{
Adapter.MapTransformer,
GithubUser
}
@user_mapping [
{:github_avatar_url, ["avatar_url"]},
{:github_id, ["id"]},
{:github_username, ["login"]},
{:email, ["email"]},
{:type, ["type"]}
]
@doc ~S"""
Converts a Github user payload into a map of attributes suitable for creating
or updating a `CodeCorps.User`
The `type` gets transformed to match our expected values for user type.
"""
@spec to_user(map) :: map
def to_user(%{} = payload) do
payload
|> CodeCorps.Adapter.MapTransformer.transform(@user_mapping)
|> transform_type
end
@github_user_mapping [
{:avatar_url, ["avatar_url"]},
{:github_id, ["id"]},
{:username, ["login"]},
{:email, ["email"]},
{:type, ["type"]}
]
@doc ~S"""
Converts a GitHub User payload into a set of attributes used to create or
update a `GithubUser` record.
"""
@spec to_github_user(map) :: map
def to_github_user(%{} = payload) do
payload |> CodeCorps.Adapter.MapTransformer.transform(@github_user_mapping)
end
@doc ~S"""
Converts a `GithubUser` into a set of attributes used to create or update a
GitHub User on the GitHub API.
"""
@spec to_user_attrs(GithubUser.t) :: map
def to_user_attrs(%GithubUser{} = github_user) do
github_user
|> Map.from_struct()
|> MapTransformer.transform_inverse(@github_user_mapping)
end
@spec transform_type(map) :: map
defp transform_type(%{:type => "Bot"} = map), do: Map.put(map, :type, "bot")
defp transform_type(%{:type => "Organization"} = map), do: Map.put(map, :type, "organization")
defp transform_type(%{:type => "User"} = map), do: Map.put(map, :type, "user")
defp transform_type(map), do: map
end
<|start_filename|>lib/code_corps_web/views/organization_view.ex<|end_filename|>
defmodule CodeCorpsWeb.OrganizationView do
@moduledoc false
alias CodeCorps.Presenters.ImagePresenter
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [
:approved, :cloudinary_public_id, :description, :icon_thumb_url,
:icon_large_url, :name, :slug, :inserted_at, :updated_at
]
has_one :owner, type: "user", field: :owner_id
has_one :slugged_route, serializer: CodeCorpsWeb.SluggedRouteView
has_one :stripe_connect_account, type: "stripe-connect-account", serializer: CodeCorpsWeb.StripeConnectAccountView
has_many :organization_github_app_installations, serializer: CodeCorpsWeb.OrganizationGithubAppInstallationView, identifiers: :always
has_many :projects, serializer: CodeCorpsWeb.ProjectView, identifiers: :always
def icon_large_url(organization, _conn), do: ImagePresenter.large(organization)
def icon_thumb_url(organization, _conn), do: ImagePresenter.thumbnail(organization)
end
<|start_filename|>test/lib/code_corps/cloudex/uploader_test.exs<|end_filename|>
defmodule CodeCorps.Cloudex.UploaderTest do
alias CodeCorps.Cloudex.Uploader
use ExUnit.Case, async: true
test "returns the public_id" do
{:ok, %Cloudex.UploadedImage{public_id: public_id}} =
"https://placehold.it/500x500"
|> Uploader.upload
assert public_id
end
end
<|start_filename|>test/lib/code_corps/model/stripe_connect_account_test.exs<|end_filename|>
defmodule CodeCorps.StripeConnectAccountTest do
@moduledoc false
use CodeCorps.ModelCase
alias CodeCorps.StripeConnectAccount
@valid_attrs %{
id_from_stripe: "abc123",
tos_acceptance_date: 1_234_567
}
@invalid_attrs %{}
describe "create_changeset/2" do
test "reports as valid when attributes are valid" do
organization_id = insert(:organization).id
changes = Map.merge(@valid_attrs, %{organization_id: organization_id})
changeset = StripeConnectAccount.create_changeset(%StripeConnectAccount{}, changes)
assert changeset.valid?
end
test "reports as invalid when attributes are invalid" do
changeset = StripeConnectAccount.create_changeset(%StripeConnectAccount{}, @invalid_attrs)
refute changeset.valid?
assert_validation_triggered(changeset, :id_from_stripe, :required)
assert_validation_triggered(changeset, :organization_id, :required)
assert_validation_triggered(changeset, :tos_acceptance_date, :required)
end
test "ensures associations link to records that exist" do
attrs = @valid_attrs |> Map.merge(%{organization_id: -1})
{:error, changeset} =
%StripeConnectAccount{}
|> StripeConnectAccount.create_changeset(attrs)
|> Repo.insert
refute changeset.valid?
assert_error_message(changeset, :organization, "does not exist")
end
test "accepts list of values as verification_fields_needed" do
organization_id = insert(:organization).id
list = ["legal_entity.first_name", "legal_entity.last_name"]
map = %{
organization_id: organization_id,
verification_fields_needed: list
}
attrs = @valid_attrs |> Map.merge(map)
{:ok, record} =
%StripeConnectAccount{}
|> StripeConnectAccount.create_changeset(attrs)
|> Repo.insert
assert record.verification_fields_needed == list
end
end
end
<|start_filename|>lib/code_corps/stripe_service/stripe_platform_card.ex<|end_filename|>
defmodule CodeCorps.StripeService.StripePlatformCardService do
@moduledoc """
Used to perform actions on StripePlatformCard records, while propagating to
and from associated Stripe.Card records
"""
alias CodeCorps.Repo
alias CodeCorps.StripeService.Adapters.StripePlatformCardAdapter
alias CodeCorps.StripeService.StripeConnectCardService
alias CodeCorps.{StripeConnectCard, StripePlatformCard, StripePlatformCustomer}
alias Ecto.Multi
@api Application.get_env(:code_corps, :stripe)
def create(%{"stripe_token" => stripe_token, "user_id" => user_id} = attributes) do
with %StripePlatformCustomer{} = customer <- StripePlatformCustomer |> CodeCorps.Repo.get_by(user_id: user_id),
{:ok, %Stripe.Card{} = card} <- @api.Card.create(%{customer: customer.id_from_stripe, source: stripe_token}),
{:ok, params} <- StripePlatformCardAdapter.to_params(card, attributes)
do
%StripePlatformCard{} |> StripePlatformCard.create_changeset(params) |> Repo.insert
else
nil -> {:error, :not_found}
failure -> failure
end
end
def update_from_stripe(card_id) do
with %StripePlatformCard{} = record <- Repo.get_by(StripePlatformCard, id_from_stripe: card_id),
{:ok, %Stripe.Card{} = stripe_card} <- @api.Card.retrieve(card_id, %{customer: record.customer_id_from_stripe}),
{:ok, params} <- StripePlatformCardAdapter.to_params(stripe_card, %{})
do
perform_update(record, params)
else
nil -> {:error, :not_found}
failure -> failure
end
end
defp perform_update(record, params) do
changeset = record |> StripePlatformCard.update_changeset(params)
multi =
Multi.new
|> Multi.update(:update_platform_card, changeset)
|> Multi.run(:update_connect_cards, &update_connect_cards/1)
case Repo.transaction(multi) do
{:ok, %{update_platform_card: platform_card_update, update_connect_cards: connect_card_updates}} ->
{:ok, platform_card_update, connect_card_updates}
{:error, :update_platform_card, %Ecto.Changeset{} = changeset, %{}} ->
{:error, changeset}
{:error, failed_operation, failed_value, _changes_so_far} ->
{:error, failed_operation, failed_value}
end
end
defp update_connect_cards(%{update_platform_card: %StripePlatformCard{} = stripe_platform_card}) do
attributes = connect_card_attributes(stripe_platform_card)
case do_update_connect_cards(stripe_platform_card, attributes) do
[_h | _t] = results -> {:ok, results}
[] -> {:ok, nil}
end
end
defp connect_card_attributes(stripe_platform_card) do
stripe_platform_card |> Map.take([:exp_month, :exp_year, :name])
end
defp do_update_connect_cards(_stripe_platform_card, attributes) when attributes == %{}, do: []
defp do_update_connect_cards(stripe_platform_card, attributes) do
stripe_platform_card
|> Repo.preload([stripe_connect_cards: [:stripe_connect_account, :stripe_platform_card]])
|> Map.get(:stripe_connect_cards)
|> Enum.map(&do_update_connect_card(&1, attributes))
end
defp do_update_connect_card(%StripeConnectCard{} = stripe_connect_card, attributes) do
stripe_connect_card |> StripeConnectCardService.update(attributes)
end
end
<|start_filename|>test/lib/code_corps/model/stripe_connect_charge_test.exs<|end_filename|>
defmodule CodeCorps.StripeConnectChargeTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeConnectCharge
test "requires :id_from_stripe, :stripe_connect_customer_id, :user_id" do
changeset = %StripeConnectCharge{} |> StripeConnectCharge.create_changeset(%{})
refute changeset.valid?
assert_validation_triggered(changeset, :id_from_stripe, :required)
assert_validation_triggered(changeset, :stripe_connect_customer_id, :required)
assert_validation_triggered(changeset, :user_id, :required)
end
test "ensures stripe_connect_account exists" do
attrs = %{
id_from_stripe: "test",
stripe_connect_account_id: -1,
stripe_connect_customer_id: -1,
user_id: -1
}
changeset = %StripeConnectCharge{} |> StripeConnectCharge.create_changeset(attrs)
{:error, changeset} = changeset |> Repo.insert
assert_error_message(changeset, :stripe_connect_account, "does not exist")
end
test "ensures stripe_connect_customer exists" do
account = insert(:stripe_connect_account)
attrs = %{
id_from_stripe: "test",
stripe_connect_account_id: account.id,
stripe_connect_customer_id: -1,
user_id: -1
}
changeset = %StripeConnectCharge{} |> StripeConnectCharge.create_changeset(attrs)
{:error, changeset} = changeset |> Repo.insert
assert_error_message(changeset, :stripe_connect_customer, "does not exist")
end
test "ensures user exists" do
account = insert(:stripe_connect_account)
customer = insert(:stripe_connect_customer)
attrs = %{
id_from_stripe: "test",
stripe_connect_account_id: account.id,
stripe_connect_customer_id: customer.id,
user_id: -1
}
changeset = %StripeConnectCharge{} |> StripeConnectCharge.create_changeset(attrs)
{:error, changeset} = changeset |> Repo.insert
assert_error_message(changeset, :user, "does not exist")
end
test "ensures uniqueness of :id_from_stripe" do
insert(:stripe_connect_charge, id_from_stripe: "exists")
account = insert(:stripe_connect_account)
customer = insert(:stripe_connect_customer)
user = insert(:user)
attrs = %{
id_from_stripe: "exists",
stripe_connect_account_id: account.id,
stripe_connect_customer_id: customer.id,
user_id: user.id
}
changeset = %StripeConnectCharge{} |> StripeConnectCharge.create_changeset(attrs)
{:error, changeset} = changeset |> Repo.insert
assert_error_message(changeset, :id_from_stripe, "has already been taken")
end
end
<|start_filename|>test/lib/code_corps/model/stripe_connect_customer_test.exs<|end_filename|>
defmodule CodeCorps.StripeConnectCustomerTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeConnectCustomer
@valid_attrs %{
id_from_stripe: "abc123"
}
@invalid_attrs %{}
describe "create_changeset/2" do
test "reports as valid when attributes are valid" do
ids = %{
stripe_connect_account_id: insert(:stripe_connect_account).id,
stripe_platform_customer_id: insert(:stripe_platform_customer).id,
user_id: insert(:user).id
}
changes =
@valid_attrs
|> Map.merge(ids)
changeset =
%StripeConnectCustomer{}
|> StripeConnectCustomer.create_changeset(changes)
assert changeset.valid?
end
test "reports as invalid when attributes are invalid" do
changeset = StripeConnectCustomer.create_changeset(%StripeConnectCustomer{}, @invalid_attrs)
refute changeset.valid?
assert_error_message(changeset, :id_from_stripe, "can't be blank")
assert_error_message(changeset, :stripe_connect_account_id, "can't be blank")
assert_error_message(changeset, :stripe_platform_customer_id, "can't be blank")
assert_error_message(changeset, :user_id, "can't be blank")
end
test "ensures associations to existing stripe_connect_account" do
ids = %{
stripe_connect_account_id: -1,
stripe_platform_customer_id: insert(:stripe_platform_customer).id,
user_id: insert(:user).id
}
attrs =
@valid_attrs
|> Map.merge(ids)
{result, changeset} =
%StripeConnectCustomer{}
|> StripeConnectCustomer.create_changeset(attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :stripe_connect_account, "does not exist")
end
test "ensures associations to existing stripe_platform_customer" do
ids = %{
stripe_connect_account_id: insert(:stripe_connect_account).id,
stripe_platform_customer_id: -1,
user_id: insert(:user).id
}
attrs =
@valid_attrs
|> Map.merge(ids)
{result, changeset} =
%StripeConnectCustomer{}
|> StripeConnectCustomer.create_changeset(attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :stripe_platform_customer, "does not exist")
end
test "ensures associations to existing user" do
ids = %{
stripe_connect_account_id: insert(:stripe_connect_account).id,
stripe_platform_customer_id: insert(:stripe_platform_customer).id,
user_id: -1
}
attrs =
@valid_attrs
|> Map.merge(ids)
{result, changeset} =
%StripeConnectCustomer{}
|> StripeConnectCustomer.create_changeset(attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :user, "does not exist")
end
end
end
<|start_filename|>test/lib/code_corps/policy/comment_test.exs<|end_filename|>
defmodule CodeCorps.Policy.CommentTest do
use CodeCorps.PolicyCase
alias CodeCorps.{Comment, Policy, User}
describe "create?" do
test "returns true if own record" do
user = insert(:user)
params = %{"user_id" => user.id}
assert Policy.Comment.create?(user, params)
end
test "returns false if someone else's record" do
[user, another_user] = insert_pair(:user)
params = %{"user_id" => another_user.id}
refute Policy.Comment.create?(user, params)
end
test "returns false by default" do
refute Policy.Comment.create?(%User{}, %{})
end
end
describe "update?" do
test "returns true if own record" do
user = insert(:user)
comment = insert(:comment, user: user)
assert Policy.Comment.update?(user, comment)
end
test "returns false if someone else's record" do
[user, another_user] = insert_pair(:user)
comment = insert(:comment, user: user)
refute Policy.Comment.update?(another_user, comment)
end
test "returns false by default" do
refute Policy.Comment.update?(%User{}, %Comment{})
end
end
end
<|start_filename|>test/lib/code_corps/model/project_user_test.exs<|end_filename|>
defmodule CodeCorps.ProjectUserTest do
use CodeCorps.ModelCase
alias CodeCorps.{ProjectUser, Repo}
describe "update_changeset role validation" do
test "includes pending" do
attrs = %{role: "pending"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
assert changeset.valid?
end
test "includes contributor" do
attrs = %{role: "contributor"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
assert changeset.valid?
end
test "includes admin" do
attrs = %{role: "admin"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
assert changeset.valid?
end
test "includes owner" do
attrs = %{role: "owner"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
assert changeset.valid?
end
test "does not include invalid values" do
attrs = %{role: "invalid"}
changeset = ProjectUser.update_changeset(%ProjectUser{}, attrs)
refute changeset.valid?
end
end
describe "create_owner_changeset/2" do
@attributes ~w(project_id user_id role)
test "casts #{@attributes}, with role cast to 'owner'" do
attrs = %{foo: "bar", project_id: 1, user_id: 2}
changeset = ProjectUser.create_owner_changeset(%ProjectUser{}, attrs)
assert changeset.changes == %{project_id: 1, user_id: 2, role: "owner"}
end
test "ensures user record exists" do
project = insert(:project)
attrs = %{project_id: project.id, user_id: -1}
changeset = ProjectUser.create_owner_changeset(%ProjectUser{}, attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :user)
end
test "ensures project record exists" do
user = insert(:user)
attrs = %{project_id: -1, user_id: user.id}
changeset = ProjectUser.create_owner_changeset(%ProjectUser{}, attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :project)
end
end
describe "create_changeset/2" do
@attributes ~w(project_id user_id role)
test "casts #{@attributes}, with role cast to 'pending'" do
attrs = %{foo: "bar", project_id: 1, user_id: 2}
changeset = ProjectUser.create_changeset(%ProjectUser{}, attrs)
assert changeset.changes == %{project_id: 1, user_id: 2, role: "pending"}
end
test "ensures user record exists" do
project = insert(:project)
attrs = %{project_id: project.id, user_id: -1}
changeset = ProjectUser.create_changeset(%ProjectUser{}, attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :user)
end
test "ensures project record exists" do
user = insert(:user)
attrs = %{project_id: -1, user_id: user.id}
changeset = ProjectUser.create_changeset(%ProjectUser{}, attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :project)
end
end
end
<|start_filename|>lib/code_corps/tasks/tasks.ex<|end_filename|>
defmodule CodeCorps.Tasks do
@moduledoc """
Handles special CRUD operations for `CodeCorps.Task`.
"""
alias CodeCorps.{GitHub, GitHub.Sync, GithubIssue, Repo, Task}
alias Ecto.{Changeset, Multi}
require Logger
# :user, :github_issue and :github_repo are required for connecting to github
# :project and :organization are required in order to add a header to the
# github issue body when the user themselves are not connected to github, but
# the task is
#
# Right now, all of these preloads are loaded at once. If there are
# performance issues, we can split them up according the the information
# provided here.
@preloads [
:github_issue,
[github_repo: :github_app_installation],
[project: :organization],
:user
]
@type result :: {:ok, Task.t} | {:error, Changeset.t} | {:error, :github} | {:error, :task_not_found}
@doc ~S"""
Performs all actions involved in creating a task on a project
"""
@spec create_task(map) :: result
def create_task(%{} = attributes) do
Multi.new
|> Multi.insert(:task, %Task{} |> Task.create_changeset(attributes))
|> Multi.run(:preload, fn %{task: %Task{} = task} ->
{:ok, task |> Repo.preload(@preloads)}
end)
|> Multi.run(:github, (fn %{preload: %Task{} = task} -> task |> create_on_github() end))
|> Repo.transaction
|> marshall_result()
end
@spec update_task(Task.t, map) :: result
def update_task(%Task{} = task, %{} = attributes) do
Multi.new
|> Multi.update(:task, task |> Task.update_changeset(attributes))
|> Multi.run(:preload, fn %{task: %Task{} = task} ->
{:ok, task |> Repo.preload(@preloads)}
end)
|> Multi.run(:github, (fn %{preload: %Task{} = task} -> task |> update_on_github() end))
|> Repo.transaction()
|> marshall_result()
end
@spec marshall_result(tuple) :: result
defp marshall_result({:ok, %{github: %Task{} = task}}), do: {:ok, task}
defp marshall_result({:ok, %{task: %Task{} = task}}), do: {:ok, task}
defp marshall_result({:error, :task, %Changeset{} = changeset, _steps}), do: {:error, changeset}
defp marshall_result({:error, :github, {:error, :task_not_found}, _steps}), do: {:error, :task_not_found}
defp marshall_result({:error, :github, result, _steps}) do
Logger.info "An error occurred when creating/updating the task with the GitHub API"
Logger.info "#{inspect result}"
{:error, :github}
end
@spec create_on_github(Task.t) :: {:ok, Task.t} | {:error, Changeset.t} | {:error, GitHub.api_error_struct}
defp create_on_github(%Task{github_repo_id: nil} = task) do
# Don't create: no GitHub repo was selected
{:ok, task}
end
defp create_on_github(%Task{github_repo: github_repo} = task) do
with {:ok, payload} <- GitHub.API.Issue.create(task),
{:ok, %GithubIssue{} = github_issue} <-
payload
|> Sync.GithubIssue.create_or_update_issue(github_repo) do
task |> link_with_github_changeset(github_issue) |> Repo.update()
else
{:error, error} -> {:error, error}
end
end
@spec link_with_github_changeset(Task.t, GithubIssue.t) :: Changeset.t
defp link_with_github_changeset(%Task{} = task, %GithubIssue{} = github_issue) do
task |> Changeset.change(%{github_issue: github_issue})
end
@spec update_on_github(Task.t) :: {:ok, Task.t} | {:error, Changeset.t} | {:error, GitHub.api_error_struct} | {:error, :task_not_found}
defp update_on_github(%Task{github_repo_id: nil, github_issue_id: nil} = task), do: {:ok, task}
defp update_on_github(%Task{github_repo_id: _, github_issue_id: nil} = task), do: task |> create_on_github()
defp update_on_github(%Task{github_repo: github_repo} = task) do
with {:ok, payload} <- GitHub.API.Issue.update(task),
{:ok, %GithubIssue{}} <- payload |> Sync.GithubIssue.create_or_update_issue(github_repo),
%Task{} = task <- Repo.get(Task, task.id) do
{:ok, task}
else
nil -> {:error, :task_not_found}
{:error, error} -> {:error, error}
end
end
end
<|start_filename|>lib/code_corps/cloudex/uploader.ex<|end_filename|>
defmodule CodeCorps.Cloudex.Uploader do
@cloudex Application.get_env(:code_corps, :cloudex)
@spec upload(list | String.t) :: Cloudex.upload_result()
def upload(list_or_url) do
@cloudex.upload(list_or_url)
end
end
<|start_filename|>lib/code_corps/organizations/organizations.ex<|end_filename|>
defmodule CodeCorps.Organizations do
@moduledoc ~S"""
"""
alias CodeCorps.{Organization, OrganizationInvite, Repo}
alias Ecto.{Changeset, Multi}
@doc ~S"""
Creates a `CodeCorps.Organization` from a set of parameters,
fulfilling the associated `CodeCorps.OrganizationInvite`, if it exists, by
associating it with the created record.
"""
@spec create(map) :: {:ok, Organization.t} | {:error, Changeset.t}
def create(%{} = params) do
Multi.new()
|> Multi.insert(:organization, Organization.create_changeset(%Organization{}, params))
|> Multi.run(:organization_invite, fn %{organization: organization} -> organization |> fulfill_associated_invite(params) end)
|> Repo.transaction()
|> handle_result()
end
@spec fulfill_associated_invite(Organization.t, map) :: {:ok, OrganizationInvite.t | nil} | {:error, Changeset.t}
defp fulfill_associated_invite(%Organization{id: organization_id}, %{"invite_code" => code}) do
OrganizationInvite
|> Repo.get_by(code: code)
|> OrganizationInvite.update_changeset(%{organization_id: organization_id})
|> Repo.update()
end
defp fulfill_associated_invite(%Organization{}, %{}), do: {:ok, nil}
@spec handle_result(tuple) :: tuple
defp handle_result({:ok, %{organization: %Organization{} = organization}}) do
{:ok, organization}
end
defp handle_result({:error, :organization, %Changeset{} = changeset, _steps}) do
{:error, changeset}
end
defp handle_result({:error, :organization_invite, %Changeset{} = changeset, _steps}) do
{:error, changeset}
end
end
<|start_filename|>test/support/model_case.ex<|end_filename|>
defmodule CodeCorps.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias CodeCorps.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import CodeCorps.Factories
import CodeCorps.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(CodeCorps.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(CodeCorps.Repo, {:shared, self()})
end
:ok
end
@doc """
Helper for returning list of errors in a struct when given certain data.
## Examples
Given a User schema that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "<PASSWORD>", name: "is blank"]
You could then write your assertion like:
assert {:password, "<PASSWORD>"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "<PASSWORD>"} in changeset.errors
true
"""
def errors_on(struct, data) do
struct.__struct__.changeset(struct, data)
|> Ecto.Changeset.traverse_errors(&CodeCorpsWeb.ErrorHelpers.translate_error/1)
|> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
end
@doc """
Asserts if a specific error message has been added to a specific field on the
changeset. It is more flexible to use `error_message/2` directly instead of
this one.
```
assert_error_message(changeset, :foo, "bar")
```
Compared to
```
assert error_message(changeset, :foo) == "bar"
refute error_message?(changeset, :foo) == "baz"
```
"""
def assert_error_message(changeset, field, expected_message) do
assert error_message(changeset, field) == expected_message
end
@doc """
Asserts if a specific validation type has been triggered on a specific field
on the changeset. It is more flexible to use `validation_triggered/2` directly
instead of this one.
```
assert_validation_triggered(changeset, :foo, "bar")
```
Compared to
```
assert validation_triggered(changeset, :foo) == :required
refute validation_triggered?(changeset, :bar) == :required
```
"""
def assert_validation_triggered(changeset, field, type) do
assert validation_triggered(changeset, field) == type
end
@doc """
Returns an error message on a specific field on the specified changeset
"""
@spec error_message(Ecto.Changeset.t, Atom.t) :: String.t
def error_message(changeset, field) do
{message, _} = changeset.errors[field]
message
end
@doc """
Returns an atom indicating the type of validation that was triggered on a
field in a changeset.
"""
@spec validation_triggered(Ecto.Changeset.t, Atom.t) :: Atom.t
def validation_triggered(changeset, field) do
{_message, status} = changeset.errors[field]
status[:validation]
end
@doc """
Returns true or false depending on if an assoc_constraint validation has been
triggered in the provided changeset on the specified field.
"""
@spec assoc_constraint_triggered?(Ecto.Changeset.t, Atom.t) :: boolean
def assoc_constraint_triggered?(changeset, field) do
error_message(changeset, field) == "does not exist"
end
end
<|start_filename|>priv/repo/migrations/20171031232023_add_sync_state_to_project_github_repos.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddSyncStateToProjectGithubRepos do
use Ecto.Migration
def change do
alter table(:project_github_repos) do
add :sync_state, :string, default: "unsynced"
end
create index(:project_github_repos, [:sync_state])
end
end
<|start_filename|>test/lib/code_corps/policy/conversation_test.exs<|end_filename|>
defmodule CodeCorps.Policy.ConversationTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.Conversation, only: [scope: 2, show?: 2, update?: 2]
alias CodeCorps.{Conversation, Repo}
describe "scope" do
test "returns all records for admin user" do
insert_list(3, :conversation)
user = insert(:user, admin: true)
assert Conversation |> scope(user) |> Repo.all |> Enum.count == 3
end
test "returns records where user is the author or they administer the project" do
user = insert(:user, admin: false)
%{project: project_user_applied_to} =
insert(:project_user, user: user, role: "pending")
%{project: project_user_contributes_to} =
insert(:project_user, user: user, role: "contributor")
%{project: project_user_administers} =
insert(:project_user, user: user, role: "admin")
%{project: project_user_owns} =
insert(:project_user, user: user, role: "owner")
message_authored_by = insert(:message, author: user)
message_from_project_applied_to =
insert(:message, project: project_user_applied_to)
message_from_contributing_project =
insert(:message, project: project_user_contributes_to)
message_from_administered_project =
insert(:message, project: project_user_administers)
message_from_owned_project =
insert(:message, project: project_user_owns)
conversation_with = insert(:conversation, user: user)
conversation_authored_by =
insert(:conversation, message: message_authored_by)
some_other_conversation = insert(:conversation)
conversation_from_project_applied_to =
insert(:conversation, message: message_from_project_applied_to)
conversation_from_contributing_project =
insert(:conversation, message: message_from_contributing_project)
conversation_from_administered_project =
insert(:conversation, message: message_from_administered_project)
conversation_from_owned_project =
insert(:conversation, message: message_from_owned_project)
result_ids =
Conversation
|> scope(user)
|> Repo.all
|> Enum.map(&Map.get(&1, :id))
assert conversation_with.id in result_ids
assert conversation_authored_by.id in result_ids
refute conversation_from_project_applied_to.id in result_ids
refute conversation_from_contributing_project.id in result_ids
assert conversation_from_administered_project.id in result_ids
assert conversation_from_owned_project.id in result_ids
refute some_other_conversation.id in result_ids
end
end
describe "show?" do
test "returns true when user is the target" do
user = insert(:user)
message = insert(:message)
conversation = insert(:conversation, message: message, user: user)
assert show?(user, conversation)
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
refute show?(user, conversation)
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
refute show?(user, conversation)
end
test "returns true when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
assert show?(user, conversation)
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
assert show?(user, conversation)
end
end
describe "update?" do
test "returns true when user is admin" do
user = insert(:user, admin: true)
message = insert(:message)
conversation = insert(:conversation, message: message, user: user)
assert update?(user, conversation)
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
refute update?(user, conversation)
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
refute update?(user, conversation)
end
test "returns true when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
assert update?(user, conversation)
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
assert update?(user, conversation)
end
end
end
<|start_filename|>test/lib/code_corps_web/views/project_user_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ProjectUserViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
project = insert(:project)
user = insert(:user)
project_user = insert(:project_user, project: project, user: user)
rendered_json = render(CodeCorpsWeb.ProjectUserView, "show.json-api", data: project_user)
expected_json = %{
"data" => %{
"id" => project_user.id |> Integer.to_string,
"type" => "project-user",
"attributes" => %{
"inserted-at" => project_user.inserted_at,
"role" => project_user.role,
"updated-at" => project_user.updated_at
},
"relationships" => %{
"project" => %{
"data" => %{"id" => project_user.project_id |> Integer.to_string, "type" => "project"}
},
"user" => %{
"data" => %{"id" => project_user.user_id |> Integer.to_string, "type" => "user"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/support/test_helpers.ex<|end_filename|>
defmodule CodeCorps.TestHelpers do
use Phoenix.ConnTest
import ExUnit.Assertions
def ids_from_response(response) do
Enum.map response["data"], fn(attributes) ->
String.to_integer(attributes["id"])
end
end
def assert_ids_from_query(query, ids) do
assert query |> Enum.map(&Map.get(&1, :id)) |> Enum.sort == ids |> Enum.sort()
end
def assert_id_from_response(response, id) do
assert String.to_integer(response["data"]["id"]) == id
response
end
def assert_ids_from_response(response, ids) do
assert response |> ids_from_response() |> Enum.sort() == ids |> Enum.sort()
response
end
def assert_attributes(response, expected) do
assert response["attributes"] == expected
response
end
def assert_jsonapi_relationship(json = %{"relationships" => relationships}, relationship_name, id) do
assert relationships[relationship_name]["data"]["id"] == Integer.to_string(id)
json
end
def assert_jsonapi_relationship(json, relationship_name, id) do
assert json["data"]["relationships"][relationship_name]["data"]["id"] == Integer.to_string(id)
json
end
def put_id(payload, id), do: put_in(payload, ["data", "id"], id)
def put_attributes(payload, attributes), do: put_in(payload, ["data", "attributes"], attributes)
def put_relationships(payload, record_1, record_2), do: put_relationships(payload, [record_1, record_2])
def put_relationships(payload, records) do
relationships = build_relationships(%{}, records)
payload |> put_in(["data", "relationships"], relationships)
end
defp build_relationships(relationship_map, []), do: relationship_map
defp build_relationships(relationship_map, [head | tail]) do
relationship_map
|> Map.put(get_record_name(head), %{data: %{id: head.id}})
|> build_relationships(tail)
end
defp build_relationships(relationship_map, single_param) do
build_relationships(relationship_map, [single_param])
end
defp get_record_name(record) do
record.__struct__
|> Module.split
|> List.last
|> Macro.underscore
|> String.to_existing_atom
end
end
<|start_filename|>test/lib/code_corps/presenters/image_presenter_test.exs<|end_filename|>
defmodule CodeCorps.Presenters.ImagePresenterTest do
use ExUnit.Case, async: true
import CodeCorps.Factories
alias CodeCorps.Presenters.ImagePresenter
@organization build(:organization)
@project build(:project)
@user build(:user)
describe "large/1" do
test "returns proper large image defaults for organization" do
assert ImagePresenter.large(@organization) == "#{Application.get_env(:code_corps, :asset_host)}/icons/organization_default_large_.png"
end
test "returns proper large image defaults for project" do
assert ImagePresenter.large(@project) == "#{Application.get_env(:code_corps, :asset_host)}/icons/project_default_large_.png"
end
test "returns proper large image defaults for user" do
assert ImagePresenter.large(@user) == "#{Application.get_env(:code_corps, :asset_host)}/icons/user_default_large_.png"
end
end
describe "thumbnail/1" do
test "returns proper thumbnail image defaults for organization" do
assert ImagePresenter.thumbnail(@organization) == "#{Application.get_env(:code_corps, :asset_host)}/icons/organization_default_thumb_.png"
end
test "returns proper thumbnail image defaults for project" do
assert ImagePresenter.thumbnail(@project) == "#{Application.get_env(:code_corps, :asset_host)}/icons/project_default_thumb_.png"
end
test "returns proper thumbnail image defaults for user" do
assert ImagePresenter.thumbnail(@user) == "#{Application.get_env(:code_corps, :asset_host)}/icons/user_default_thumb_.png"
end
end
end
<|start_filename|>priv/repo/migrations/20171003134956_add_unique_constraint_to_users_github_id.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddUniqueConstraintToUsersGithubId do
use Ecto.Migration
def change do
create index(:users, [:github_id], unique: true)
end
end
<|start_filename|>lib/code_corps_web/controllers/message_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.MessageController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{
Message,
Messages,
User
}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes, [includes_many: ~w(conversation)]
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
messages <- Message |> Policy.scope(current_user) |> Messages.list(params) |> preload() do
conn |> render("index.json-api", data: messages)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
%Message{} = message <- Message |> Repo.get(id) |> preload(),
{:ok, :authorized} <- current_user |> Policy.authorize(:show, message, %{}) do
conn |> render("show.json-api", data: message)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %Message{}, params),
{:ok, %Message{} = message} <- Messages.create(params),
message <- preload(message)
do
conn |> put_status(:created) |> render("show.json-api", data: message)
end
end
@preloads [:author, :project, :conversations]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>test/lib/code_corps_web/controllers/user_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.UserControllerTest do
@moduledoc false
use CodeCorpsWeb.ApiCase, resource_name: :user
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{User, Repo}
@valid_attrs %{
email: "<EMAIL>",
username: "testuser",
first_name: "Test",
last_name: "User",
website: "http://www.example.com",
twitter: "testuser",
biography: "Just a test user"
}
@invalid_attrs %{
email: "",
username: "",
website: "---_<>-blank.com",
twitter: " @ testuser"
}
@relationships %{}
describe "index" do
test "lists all entries on index", %{conn: conn} do
[user_1, user_2] = insert_pair(:user)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([user_1.id, user_2.id])
end
test "filters resources on index", %{conn: conn} do
[user_1, user_2 | _] = insert_list(3, :user)
path = "users/?filter[id]=#{user_1.id},#{user_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([user_1.id, user_2.id])
end
test "returns search results on index", %{conn: conn} do
user_1 = insert(:user, first_name: "Joe")
user_2 = insert(:user, username: "joecoder")
user_3 = insert(:user, last_name: "Jacko")
insert(:user, first_name: "Max")
params = %{"query" => "j"}
path = conn |> user_path(:index, params)
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([user_1.id, user_2.id, user_3.id])
end
test "returns search result on project filter", %{conn: conn} do
user_1 = insert(:user)
user_2 = insert(:user)
project = insert(:project)
insert(:project_user, user: user_1, project: project)
insert(:project_user, user: user_2, project: project)
insert(:project_user)
params = %{"project_id" => project.id}
path = conn |> user_path(:index, params)
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([user_1.id, user_2.id])
end
test "limit filter limits results on index", %{conn: conn} do
insert_list(6, :user)
params = %{"limit" => 5}
path = conn |> user_path(:index, params)
json = conn |> get(path) |> json_response(200)
returned_users_length = json["data"] |> length
assert returned_users_length == 5
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
user = insert(:user)
conn
|> request_show(user)
|> json_response(200)
|> assert_id_from_response(user.id)
end
@tag :authenticated
test "renders email when authenticated", %{conn: conn, current_user: current_user} do
assert conn |> request_show(current_user) |> json_response(200)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
test "creates and renders resource when data is valid", %{conn: conn} do
attrs = Map.put(@valid_attrs, :password, "password")
conn = post conn, user_path(conn, :create), %{
"data" => %{
"attributes" => attrs
}
}
assert conn |> json_response(201)
end
test "calls segment tracking after user is created", %{conn: conn} do
conn = post conn, user_path(conn, :create), %{
"meta" => %{},
"data" => %{
"type" => "user",
"attributes" => Map.put(@valid_attrs, :password, "password"),
"relationships" => @relationships
}
}
id = json_response(conn, 201)["data"]["id"] |> String.to_integer
assert_received {:track, ^id, "Signed Up", %{}}
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
attrs = Map.put(@invalid_attrs, :password, "password")
conn = post conn, user_path(conn, :create), %{
"data" => %{
"attributes" => attrs
}
}
assert conn |> json_response(422)
end
end
describe "update" do
@tag :authenticated
test "updates and renders chosen resource when data is valid", %{conn: conn} do
user = insert(:user)
attrs = Map.put(@valid_attrs, :password, "password")
params = %{
"meta" => %{},
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => attrs,
"relationships" => @relationships
}
}
path = user_path(conn, :update, user)
assert conn |> authenticate(user) |> put(path, params) |> json_response(200)
end
test "tracks authentication & update profile events in Segment", %{conn: conn} do
user = insert(:user, email: "<EMAIL>")
attrs = Map.put(@valid_attrs, :password, "password")
params = %{
"meta" => %{},
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => attrs,
"relationships" => @relationships
}
}
path = user_path(conn, :update, user)
conn =
conn
|> authenticate(user)
|> put(path, params)
id = json_response(conn, 200)["data"]["id"] |> String.to_integer
assert_received {:identify, ^id, %{email: "<EMAIL>"}}
assert_received {:track, ^id, "Updated Profile", %{}}
end
test "does not update when authorized as different user", %{conn: conn} do
[user, another_user] = insert_pair(:user)
attrs = Map.put(@valid_attrs, :password, "password")
path = user_path(conn, :update, user)
params = %{
"meta" => %{},
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => attrs,
"relationships" => @relationships
}
}
conn =
conn
|> authenticate(another_user)
|> put(path, params)
assert json_response(conn, 403)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
user = insert(:user)
path = user_path(conn, :update, user)
params = %{
"meta" => %{},
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => @invalid_attrs,
"relationships" => @relationships
}
}
conn =
conn
|> authenticate(user)
|> put(path, params)
json = json_response(conn, 422)
assert json["errors"] != %{}
end
test "transitions from one state to the next", %{conn: conn} do
user = insert(:user)
conn = put authenticate(conn, user), user_path(conn, :update, user), %{
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => %{"password" => "password", "state_transition" => "edit_profile"}
}
}
%{"data" => %{"id" => id}} = json_response(conn, 200)
user = Repo.get(User, id)
assert user.state == "edited_profile"
# Transition was successful, so we should unset it
assert user.state_transition == nil
end
end
describe "github_oauth" do
@attrs %{"code" => "foo", "state" => "bar"}
@tag :authenticated
test "return the user when current user connects successfully", %{conn: conn, current_user: current_user} do
path = user_path(conn, :github_oauth)
json = conn |> post(path, @attrs) |> json_response(200)
assert json["data"]["id"] |> String.to_integer == current_user.id
assert json["data"]["attributes"]["github-id"]
end
@tag :authenticated
test "tracks event on segment when current user connects successfully", %{conn: conn, current_user: %{id: id}} do
path = user_path(conn, :github_oauth)
assert conn |> post(path, @attrs) |> json_response(200)
expected_data =
User
|> Repo.get(id)
|> CodeCorps.Analytics.SegmentTraitsBuilder.build
assert_received {:track, ^id, "Connected to GitHub", ^expected_data}
end
test "requires authentication", %{conn: conn} do
path = user_path(conn, :github_oauth)
assert conn |> post(path, @attrs) |> json_response(401)
end
@tag :authenticated
test "renders 500 if there's a GitHub API error", %{conn: conn} do
path = user_path(conn, :github_oauth)
with_mock_api(CodeCorps.GitHub.FailureAPI) do
assert conn |> post(path, @attrs) |> json_response(500)
end
end
end
describe "email_available" do
test "returns valid and available when email is valid and available", %{conn: conn} do
resp = get conn, user_path(conn, :email_available, %{email: "<EMAIL>"})
json = json_response(resp, 200)
assert json["available"]
assert json["valid"]
end
test "returns valid but inavailable when email is valid but taken", %{conn: conn} do
insert(:user, email: "<EMAIL>")
resp = get conn, user_path(conn, :email_available, %{email: "<EMAIL>"})
json = json_response(resp, 200)
refute json["available"]
assert json["valid"]
end
test "returns as available but invalid when email is invalid", %{conn: conn} do
resp = get conn, user_path(conn, :email_available, %{email: "not_an_email"})
json = json_response(resp, 200)
assert json["available"]
refute json["valid"]
end
end
describe "username_available" do
test "returns as valid and available when username is valid and available", %{conn: conn} do
resp = get conn, user_path(conn, :username_available, %{username: "available"})
json = json_response(resp, 200)
assert json["available"]
assert json["valid"]
end
test "returns as valid, but inavailable when username is valid but taken", %{conn: conn} do
insert(:user, username: "used")
resp = get conn, user_path(conn, :username_available, %{username: "used"})
json = json_response(resp, 200)
refute json["available"]
assert json["valid"]
end
test "returns available but invalid when username is invalid", %{conn: conn} do
resp = get conn, user_path(conn, :username_available, %{username: ""})
json = json_response(resp, 200)
assert json["available"]
refute json["valid"]
end
end
end
<|start_filename|>test/lib/code_corps/model/user_test.exs<|end_filename|>
defmodule CodeCorps.UserTest do
use CodeCorps.ModelCase
alias CodeCorps.User
@valid_attrs %{email: "<EMAIL>", password: "<PASSWORD>", username: "testuser"}
@invalid_attrs %{}
describe "full_name/1" do
test "returns full name correctly" do
names = [
{"Josh", "Smith", "<NAME>"},
{"Josh", nil, "Josh"},
{nil, "Smith", "Smith"},
{nil, nil, ""},
{"", "", ""}
]
Enum.each names, fn({first_name, last_name, expected_full_name}) ->
user = build(:user, first_name: first_name, last_name: last_name)
assert User.full_name(user) == expected_full_name
end
end
end
describe "changeset/2" do
test "changeset with valid attributes" do
changeset = User.changeset(%User{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = User.changeset(%User{}, @invalid_attrs)
refute changeset.valid?
end
test "changeset with invalid email" do
attrs = Map.put(@valid_attrs, :email, "notanemail")
changeset = User.changeset(%User{}, attrs)
assert_error_message(changeset, :email, "has invalid format")
end
end
describe "registration_changeset" do
test "does not accept long usernames" do
attrs = Map.put(@valid_attrs, :username, String.duplicate("a", 40))
changeset = User.registration_changeset(%User{}, attrs)
assert {:username, {"should be at most %{count} character(s)", [count: 39, validation: :length, max: 39]}} in changeset.errors
end
test "password must be at least 6 chars long" do
attrs = Map.put(@valid_attrs, :password, "<PASSWORD>")
changeset = User.registration_changeset(%User{}, attrs)
assert {:password, {"should be at least %{count} character(s)", [count: 6, validation: :length, min: 6]}} in changeset.errors
end
test "with valid attributes hashes password" do
attrs = Map.put(@valid_attrs, :password, "<PASSWORD>")
changeset = User.registration_changeset(%User{}, attrs)
%{password: <PASSWORD>, encrypted_password: encrypted_password} = changeset.changes
assert changeset.valid?
assert encrypted_password
assert Comeonin.Bcrypt.checkpw(pass, encrypted_password)
end
test "does not allow duplicate emails" do
user_1_attrs = %{email: "<EMAIL>", password: "password", username: "user_1"}
user_2_attrs = %{email: "<EMAIL>", password: "password", username: "user_2"}
insert(:user, user_1_attrs)
changeset = User.registration_changeset(%User{}, user_2_attrs)
{:error, changeset} = Repo.insert(changeset)
refute changeset.valid?
assert_error_message(changeset, :email, "has already been taken")
end
test "does not allow duplicate usernames, regardless of case" do
user_1_attrs = %{email: "<EMAIL>", password: "password", username: "duplicate"}
user_2_attrs = %{email: "<EMAIL>", password: "password", username: "DUPLICATE"}
insert(:user, user_1_attrs)
changeset = User.registration_changeset(%User{}, user_2_attrs)
{:error, changeset} = Repo.insert(changeset)
refute changeset.valid?
assert_error_message(changeset, :username, "has already been taken")
end
test "allows setting sign_up_context to 'donation'" do
attrs = @valid_attrs |> Map.put(:sign_up_context, "donation")
changeset = User.registration_changeset(%User{}, attrs)
assert changeset.valid?
assert changeset |> Ecto.Changeset.get_change(:sign_up_context) == "donation"
end
end
describe "update_changeset" do
test "requires :twitter to be in proper format" do
user = %User{}
attrs = %{twitter: "bad @ twitter"}
changeset = User.update_changeset(user, attrs)
assert_error_message(changeset, :twitter, "has invalid format")
end
test "doesn't require :twitter to be part of the changes" do
user = %User{}
attrs = %{}
changeset = User.update_changeset(user, attrs)
refute Keyword.has_key?(changeset.errors, :twitter)
end
test "requires :website to be in proper format" do
user = %User{}
attrs = %{website: "bad <> website"}
changeset = User.update_changeset(user, attrs)
assert_error_message(changeset, :website, "has invalid format")
end
test "doesn't require :website to be part of the changes" do
user = %User{}
attrs = %{}
changeset = User.update_changeset(user, attrs)
refute Keyword.has_key?(changeset.errors, :website)
end
test "prefixes website with 'http://' if there is no prefix" do
user = %User{website: "https://first.com"}
attrs = %{website: "example.com"}
changeset = User.update_changeset(user, attrs)
assert changeset.changes.website == "http://example.com"
end
test "doesn't make a change to the url when there is no param for it" do
user = %User{website: "https://first.com"}
attrs = %{}
changeset = User.update_changeset(user, attrs)
refute Map.has_key?(changeset.changes, :website)
end
test "prevents an invalid transition" do
user = insert(:user, state: "signed_up")
changeset = user |> User.update_changeset(%{state_transition: "yehaww"})
refute changeset.valid?
[error | _] = changeset.errors
{attribute, {message, _}} = error
assert attribute == :state_transition
assert message == "invalid transition yehaww from signed_up"
end
end
test "reset_password_changeset with valid passwords" do
changeset = User.reset_password_changeset(%User{}, %{password: "<PASSWORD>", password_confirmation: "<PASSWORD>"})
assert changeset.valid?
assert changeset.changes.encrypted_password
end
test "reset_password_changeset with invalid passwords generates message" do
changeset = User.reset_password_changeset(%User{}, %{password: "<PASSWORD>", password_confirmation: "<PASSWORD>"})
refute changeset.valid?
assert error_message(changeset, :password_confirmation) == "passwords do not match"
end
end
<|start_filename|>test/lib/code_corps/services/donation_goals_test.exs<|end_filename|>
defmodule CodeCorps.Services.DonationGoalsServiceTest do
use CodeCorps.ModelCase
import CodeCorps.Project, only: [update_total_changeset: 2]
alias CodeCorps.DonationGoal
alias CodeCorps.Services.DonationGoalsService
defp assert_current_goal_id(goal_id) do
current_goal =
DonationGoal
|> Repo.get_by(current: true)
assert current_goal.id == goal_id
end
defp set_donated(project, amount) do
project |> update_total_changeset(%{total_monthly_donated: amount}) |> Repo.update
end
describe "create/1" do
test "inserts new goal, returns {:ok, record}" do
project = insert(:project)
insert(:stripe_connect_plan, project: project)
{:ok, %DonationGoal{} = donation_goal} = DonationGoalsService.create(%{amount: 10, description: "Test", project_id: project.id})
assert_current_goal_id(donation_goal.id)
end
test "returns {:error, changeset} if there are validation errors" do
{:error, %Ecto.Changeset{} = changeset} = DonationGoalsService.create(%{amount: 10})
refute changeset.valid?
end
test "sets current goal correctly when amount exists already" do
project = insert(:project, total_monthly_donated: 10)
{:ok, first_goal} = DonationGoalsService.create(%{amount: 20, description: "Test", project_id: project.id})
assert_current_goal_id(first_goal.id)
{:ok, second_goal} = DonationGoalsService.create(%{amount: 15, description: "Test", project_id: project.id})
assert_current_goal_id(second_goal.id)
end
test "sets current goal correctly" do
project = insert(:project, total_monthly_donated: 5)
{:ok, first_goal} = DonationGoalsService.create(%{amount: 10, description: "Test", project_id: project.id})
# total donated is 5,
# only goal inserted is the first goal
assert_current_goal_id(first_goal.id)
{:ok, second_goal} = DonationGoalsService.create(%{amount: 7, description: "Test", project_id: project.id})
assert_current_goal_id(second_goal.id)
{:ok, _} = DonationGoalsService.create(%{amount: 20, description: "Test", project_id: project.id})
# total donated is still 5
# first goal larger than 5 is the second goal
assert_current_goal_id(second_goal.id)
project |> set_donated(20)
{:ok, fourth_goal} = DonationGoalsService.create(%{amount: 30, description: "Test", project_id: project.id})
# total donated is 20.
# first applicable goal is fourth goal, with an amount of 30
assert_current_goal_id(fourth_goal.id)
project |> set_donated(45)
{:ok, fourth_goal} = DonationGoalsService.create(%{amount: 40, description: "Test", project_id: project.id})
# total donated is 45, which is more than any defined goal
# largest goal inserted after change the fourth goal, with an amount of 40
assert_current_goal_id(fourth_goal.id)
end
end
describe "update/2" do
test "updates existing goal, returns {:ok, record}" do
project = insert(:project)
donation_goal = insert(:donation_goal, amount: 10, project: project)
{:ok, %DonationGoal{} = updated_goal} = DonationGoalsService.update(donation_goal, %{amount: 15})
assert_current_goal_id(updated_goal.id)
assert updated_goal.id == donation_goal.id
end
test "returns {:error, changeset} if there are validation errors" do
project = insert(:project)
donation_goal = insert(:donation_goal, amount: 10, project: project)
{:error, %Ecto.Changeset{} = changeset} = DonationGoalsService.update(donation_goal, %{amount: nil})
refute changeset.valid?
end
test "sets current goal correctly" do
project = insert(:project)
goal_1 = insert(:donation_goal, amount: 10, project: project)
goal_2 = insert(:donation_goal, amount: 15, project: project)
insert(:donation_goal, amount: 20, project: project)
DonationGoalsService.update(goal_1, %{amount: 11})
# amount donated is 0, first goal above that is still goal 1
assert_current_goal_id(goal_1.id)
DonationGoalsService.update(goal_1, %{amount: 21})
# amount donated is still 0, first goal above that is now goal 2
assert_current_goal_id(goal_2.id)
project |> set_donated(25)
DonationGoalsService.update(goal_1, %{amount: 21})
# amount donated is now 25
# this is more than any current goal
# largest goal is goal 1, with 21
assert_current_goal_id(goal_1.id)
DonationGoalsService.update(goal_2, %{amount: 22})
# amount donated is now 25
# this is more than any current goal
# largest goal is goal 2, with 22
assert_current_goal_id(goal_2.id)
DonationGoalsService.update(goal_1, %{amount: 27})
# amount donated is still 25
# first goal higher than that is goal 1, with 27
assert_current_goal_id(goal_1.id)
end
end
describe "set_current_goal_for_project/1" do
test "sets current goal correctly" do
project = insert(:project)
goal_1 = insert(:donation_goal, amount: 10, project: project)
goal_2 = insert(:donation_goal, amount: 15, project: project)
goal_3 = insert(:donation_goal, amount: 20, project: project)
project |> set_donated(5)
DonationGoalsService.update_related_goals(goal_1)
assert_current_goal_id(goal_1.id)
project |> set_donated(10) # total is now 10
DonationGoalsService.update_related_goals(goal_2)
assert_current_goal_id(goal_2.id)
project |> set_donated(15) # total is now 15
DonationGoalsService.update_related_goals(goal_3)
assert_current_goal_id(goal_3.id)
project |> set_donated(20) # total is now 20
DonationGoalsService.update_related_goals(goal_3)
assert_current_goal_id(goal_3.id)
project |> set_donated(25) # total is now 25
DonationGoalsService.update_related_goals(goal_3)
assert_current_goal_id(goal_3.id)
goal_4 = insert(:donation_goal, amount: 30, project: project) # 30 is more than the current 25 total
DonationGoalsService.update_related_goals(goal_4)
assert_current_goal_id(goal_4.id)
end
end
end
<|start_filename|>lib/code_corps_web/controllers/stripe_platform_card_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.StripePlatformCardController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.StripeService.StripePlatformCardService
alias CodeCorps.{StripePlatformCard, User}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
%StripePlatformCard{} = stripe_platform_card <- StripePlatformCard |> Repo.get(id),
{:ok, :authorized} <- current_user |> Policy.authorize(:show, stripe_platform_card, params) do
conn |> render("show.json-api", data: stripe_platform_card)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %StripePlatformCard{}, params),
{:ok, %StripePlatformCard{} = stripe_platform_card} <- StripePlatformCardService.create(params) do
conn |> put_status(:created) |> render("show.json-api", data: stripe_platform_card)
end
end
end
<|start_filename|>test/lib/code_corps/policy/role_skill_test.exs<|end_filename|>
defmodule CodeCorps.Policy.RoleSkillTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.RoleSkill, only: [create?: 1, delete?: 1]
describe "create?" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
assert create?(user)
end
test "returns false if user is not an admin" do
user = build(:user, admin: false)
refute create?(user)
end
end
describe "delete?" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
assert delete?(user)
end
test "returns false if user is not an admin" do
user = build(:user, admin: false)
refute delete?(user)
end
end
end
<|start_filename|>test/lib/code_corps/github/event/pull_request/validator_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.PullRequest.ValidatorTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Event.PullRequest.Validator
describe "valid?/1" do
test "returns true for any PullRequest event fixture" do
assert "pull_request_opened" |> load_event_fixture() |> Validator.valid?
assert "pull_request_closed" |> load_event_fixture() |> Validator.valid?
assert "pull_request_edited" |> load_event_fixture() |> Validator.valid?
assert "pull_request_reopened" |> load_event_fixture() |> Validator.valid?
end
test "returns false for an unsupported structure" do
refute Validator.valid?("foo")
refute Validator.valid?(%{"foo" => "bar"})
refute Validator.valid?(%{"issue" => %{"bar" => "baz"}})
end
end
end
<|start_filename|>test/lib/code_corps_web/plugs/current_user_test.exs<|end_filename|>
defmodule CodeCorpsWeb.Plug.CurrentUserTest do
use CodeCorpsWeb.ConnCase
test "sets conn.assigns[:current_user] if user is authenticated" do
user = build(:user, first_name: "John");
conn = CodeCorps.Guardian.Plug.put_current_resource(build_conn(), user)
result_conn = CodeCorpsWeb.Plug.CurrentUser.call(conn, [])
assert result_conn.assigns[:current_user] == user
end
test "simply returns conn if user is not authenticated" do
conn = build_conn()
result_conn = CodeCorpsWeb.Plug.CurrentUser.call(conn, [])
assert result_conn == conn
refute result_conn.assigns[:current_user]
end
end
<|start_filename|>test/lib/code_corps_web/views/donation_goal_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.DonationGoalViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
project = insert(:project)
plan = insert(:stripe_connect_plan, project: project)
insert(:stripe_connect_subscription, stripe_connect_plan: plan, quantity: 100)
donation_goal = insert(:donation_goal, project: project, amount: 500)
CodeCorps.Services.DonationGoalsService.update_related_goals(donation_goal)
donation_goal = CodeCorpsWeb.DonationGoalController.preload(donation_goal)
rendered_json = render(CodeCorpsWeb.DonationGoalView, "show.json-api", data: donation_goal)
expected_json = %{
"data" => %{
"id" => donation_goal.id |> Integer.to_string,
"type" => "donation-goal",
"attributes" => %{
"achieved" => false,
"amount" => donation_goal.amount,
"current" => donation_goal.current,
"description" => donation_goal.description
},
"relationships" => %{
"project" => %{
"data" => %{
"id" => donation_goal.project_id |> Integer.to_string,
"type" => "project"
}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert expected_json == rendered_json
end
end
<|start_filename|>lib/code_corps/emails/receipt_email.ex<|end_filename|>
defmodule CodeCorps.Emails.ReceiptEmail do
import Bamboo.Email, only: [to: 2]
import Bamboo.PostmarkHelper
alias CodeCorps.Emails.BaseEmail
alias CodeCorps.{DonationGoal, Project, Repo, StripeConnectCharge, StripeConnectSubscription, WebClient}
@spec create(StripeConnectCharge.t, Stripe.Invoice.t) :: Bamboo.Email.t
def create(%StripeConnectCharge{} = charge, %Stripe.Invoice{} = invoice) do
with %StripeConnectCharge{} = charge <- Repo.preload(charge, :user),
%Project{} = project <- get_project(invoice.subscription),
{:ok, %DonationGoal{} = current_donation_goal} <- get_current_donation_goal(project),
template_model <- build_model(charge, project, current_donation_goal)
do
BaseEmail.create
|> to(charge.user.email)
|> template(template_id(), template_model)
else
nil -> {:error, :project_not_found}
other -> other
end
end
@spec get_project(String.t) :: Project.t | {:error, :subscription_not_found}
defp get_project(subscription_id_from_stripe) do
with %StripeConnectSubscription{} = subscription <- get_subscription(subscription_id_from_stripe) do
subscription.stripe_connect_plan.project
else
nil -> {:error, :subscription_not_found}
end
end
@spec get_subscription(String.t) :: StripeConnectSubscription.t | nil
defp get_subscription(subscription_id_from_stripe) do
StripeConnectSubscription
|> Repo.get_by(id_from_stripe: subscription_id_from_stripe)
|> Repo.preload(stripe_connect_plan: [project: :organization])
end
@spec get_current_donation_goal(Project.t) :: DonationGoal.t | {:error, :donation_goal_not_found}
defp get_current_donation_goal(project) do
case Repo.get_by(DonationGoal, current: true, project_id: project.id) do
nil -> {:error, :donation_goal_not_found}
donation_goal -> {:ok, donation_goal}
end
end
@spec build_model(StripeConnectCharge.t, Project.t, DonationGoal.t) :: map
defp build_model(charge, project, current_donation_goal) do
%{
charge_amount: charge.amount |> format_amount(),
charge_statement_descriptor: charge.statement_descriptor,
high_five_image_url: high_five_image_url(),
name: BaseEmail.get_name(charge.user),
project_current_donation_goal_description: current_donation_goal.description,
project_title: project.title,
project_url: project |> url(),
subject: project |> build_subject_line()
}
end
@spec build_subject_line(Project.t) :: String.t
defp build_subject_line(project) do
"Your monthly donation to " <> project.title
end
@spec high_five_image_url :: String.t
defp high_five_image_url, do: Enum.random(high_five_image_urls())
@spec high_five_image_urls :: list(String.t)
defp high_five_image_urls, do: [
"https://d3pgew4wbk2vb1.cloudfront.net/emails/images/emoji-1f64c-1f3fb@2x.png",
"https://d3pgew4wbk2vb1.cloudfront.net/emails/images/emoji-1f64c-1f3fc@2x.png",
"https://d3pgew4wbk2vb1.cloudfront.net/emails/images/emoji-1f64c-1f3fd@2x.png",
"https://d3pgew4wbk2vb1.cloudfront.net/emails/images/emoji-1f64c-1f3fe@2x.png",
"https://d3pgew4wbk2vb1.cloudfront.net/emails/images/emoji-1f64c-1f3ff@2x.png"
]
@spec format_amount(integer) :: binary
defp format_amount(amount) do
amount |> Money.new(:USD) |> Money.to_string()
end
@spec url(Project.t) :: String.t
defp url(project) do
WebClient.url()
|> URI.merge(project.organization.slug <> "/" <> project.slug)
|> URI.to_string
end
@spec template_id :: String.t
defp template_id, do: Application.get_env(:code_corps, :postmark_receipt_template)
end
<|start_filename|>test/lib/code_corps/policy/github_app_installation_test.exs<|end_filename|>
defmodule CodeCorps.Policy.GithubAppInstallationTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.GithubAppInstallation, only: [create?: 2]
import CodeCorps.GithubAppInstallation, only: [create_changeset: 2]
alias CodeCorps.GithubAppInstallation
describe "create?/2" do
test "returns true when user is creating installation for project where they're an owner" do
project = insert(:project)
user = insert(:user)
insert(:project_user, project: project, user: user, role: "owner")
changeset = %GithubAppInstallation{} |> create_changeset(%{project_id: project.id, user_id: user.id})
assert create?(user, changeset)
end
test "returns false for normal user" do
project = insert(:project)
user = insert(:user)
changeset = %GithubAppInstallation{} |> create_changeset(%{project_id: project.id, user_id: user.id})
refute create?(user, changeset)
end
end
end
<|start_filename|>test/lib/code_corps/messages/conversation_query_test.exs<|end_filename|>
defmodule CodeCorps.Messages.ConversationQueryTest do
use CodeCorps.DbAccessCase
alias CodeCorps.{
Conversation,
Messages.ConversationQuery,
Repo
}
describe "status_filter/2" do
test "filters by status" do
open_conversation = insert(:conversation, status: "open")
_closed_conversation = insert(:conversation, status: "closed")
[result] =
Conversation
|> ConversationQuery.status_filter(%{"status" => "open"})
|> Repo.all()
assert result.id == open_conversation.id
end
end
end
<|start_filename|>lib/code_corps/github/sync/comment/changeset.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Comment.Changeset do
@moduledoc ~S"""
In charge of building a `Changeset` to update a `Comment` with, when handling
a GitHub Comment payload.
"""
alias CodeCorps.{
Comment,
GithubComment,
GitHub.Adapters,
Services.MarkdownRendererService,
Task,
User,
Validators.TimeValidator
}
alias Ecto.Changeset
@create_attrs ~w(created_at markdown modified_at)a
@doc ~S"""
Constructs a changeset for creating a `CodeCorps.Comment` when syncing from a
GitHub API Comment payload.
"""
@spec create_changeset(GithubComment.t(), Task.t(), User.t()) :: Changeset.t()
def create_changeset(
%GithubComment{} = github_comment,
%Task{} = task,
%User{} = user) do
%Comment{}
|> Changeset.cast(github_comment |> Adapters.Comment.to_comment, @create_attrs)
|> MarkdownRendererService.render_markdown_to_html(:markdown, :body)
|> Changeset.put_change(:created_from, "github")
|> Changeset.put_change(:modified_from, "github")
|> Changeset.put_assoc(:task, task)
|> Changeset.put_assoc(:github_comment, github_comment)
|> Changeset.put_change(:user, user)
|> Changeset.validate_required([:markdown, :body])
end
@update_attrs ~w(markdown modified_at)a
@doc ~S"""
Constructs a changeset for updating a `CodeCorps.Comment` when syncing from a
GitHub API Comment payload.
"""
@spec update_changeset(Comment.t(), GithubComment.t()) :: Changeset.t()
def update_changeset(
%Comment{} = comment,
%GithubComment{} = github_comment) do
comment
|> Changeset.cast(github_comment |> Adapters.Comment.to_comment, @update_attrs)
|> MarkdownRendererService.render_markdown_to_html(:markdown, :body)
|> Changeset.put_change(:modified_from, "github")
|> TimeValidator.validate_time_not_before(:modified_at)
|> Changeset.validate_required([:markdown, :body])
end
end
<|start_filename|>lib/code_corps/stripe_testing/helpers.ex<|end_filename|>
defmodule CodeCorps.StripeTesting.Helpers do
@moduledoc """
Used to load JSON fitures which simulate Stripe API responses into
stripity_stripe structs
"""
@fixture_path "./lib/code_corps/stripe_testing/fixtures/"
@doc """
Load a stripe response fixture through stripity_stripe, into a
stripity_stripe struct
"""
@spec load_fixture(String.t) :: struct
def load_fixture(id) do
id
|> load_raw_fixture()
|> Stripe.Converter.convert_result
end
@spec load_raw_fixture(String.t) :: map
def load_raw_fixture(id) do
id
|> build_file_path
|> File.read!
|> Poison.decode!
end
defp build_file_path(id), do: id |> append_extension |> join_with_path
defp append_extension(id), do: id <> ".json"
defp join_with_path(filename), do: @fixture_path <> filename
end
<|start_filename|>lib/code_corps/emails/reply_to_conversation_email.ex<|end_filename|>
defmodule CodeCorps.Emails.ReplyToConversationEmail do
import Bamboo.Email, only: [to: 2]
import Bamboo.PostmarkHelper
alias CodeCorps.{
Conversation,
ConversationPart,
Emails.BaseEmail,
Message,
Organization,
Project,
User,
WebClient
}
@spec create(ConversationPart.t, User.t) :: Bamboo.Email.t
def create(
%ConversationPart{
author: %User{} = author,
conversation: %Conversation{
message: %Message{
project: %Project{} = project
}
} = conversation
},
%User{} = user) do
BaseEmail.create
|> to(user.email)
|> template(template_id(), %{
author_name: author.first_name,
conversation_url: project |> conversation_url(conversation),
name: user.first_name,
project_title: project.title,
subject: "#{author.first_name} replied to your conversation in #{project.title}"
})
end
@spec template_id :: String.t
defp template_id, do: Application.get_env(:code_corps, :postmark_reply_to_conversation_template)
@spec conversation_url(Project.t, Conversation.t) :: String.t
defp conversation_url(
%Project{organization: %Organization{slug: slug}, slug: project_slug},
%Conversation{id: id}) do
WebClient.url()
|> URI.merge("#{slug}/#{project_slug}/conversations/#{id}")
|> URI.to_string
end
end
<|start_filename|>test/lib/code_corps_web/views/stripe_connect_plan_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectPlanViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
project = insert(:project)
plan = insert(:stripe_connect_plan, project: project)
rendered_json = render(CodeCorpsWeb.StripeConnectPlanView, "show.json-api", data: plan)
expected_json = %{
"data" => %{
"attributes" => %{
"amount" => plan.amount,
"created" => plan.created,
"id-from-stripe" => plan.id_from_stripe,
"inserted-at" => plan.inserted_at,
"name" => plan.name,
"updated-at" => plan.updated_at
},
"id" => plan.id |> Integer.to_string,
"relationships" => %{
"project" => %{
"data" => %{"id" => project.id |> Integer.to_string, "type" => "project"}
}
},
"type" => "stripe-connect-plan",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/support/json_api_helpers.ex<|end_filename|>
defmodule CodeCorps.JsonAPIHelpers do
@moduledoc ~S"""
Contains helpers used to build a Json API structured payload from a flat
attributes map.
"""
@spec build_json_payload(map) :: map
def build_json_payload(attrs = %{}, type \\ nil) do
%{
"data" => %{
"attributes" => attrs |> build_attributes(),
"relationships" => attrs |> build_relationships(),
"type" => type
}
}
end
@spec build_attributes(map) :: map
defp build_attributes(%{} = attrs) do
attrs
|> Enum.filter(&attribute?(&1))
|> Enum.reduce(%{}, &add_attribute(&1, &2))
end
@spec attribute?(tuple) :: boolean
defp attribute?({_key, %DateTime{} = _val}), do: true
defp attribute?({_key, val}) when is_list(val), do: false
defp attribute?({_key, val}) when is_map(val), do: false
defp attribute?({_key, _val}), do: true
@spec add_attribute(tuple, map) :: map
defp add_attribute({key, value}, %{} = attrs) do
attrs |> Map.put(key |> Atom.to_string, value)
end
@spec build_relationships(list | map) :: map
defp build_relationships(%{} = attrs) do
attrs
|> Enum.filter(&relationship?(&1))
|> Enum.reduce(%{}, &add_relationship(&1, &2))
end
@spec relationship?(any) :: boolean
defp relationship?(tuple), do: !attribute?(tuple)
@spec add_relationship(tuple, map) :: map
defp add_relationship({_, []}, %{} = rels), do: rels
defp add_relationship({atom_key, list}, %{} = rels) when is_list(list) do
string_key = atom_key |> Atom.to_string
items = list |> Enum.map(&resource_identifier_from_record(&1))
rels |> Map.put(string_key, %{"data" => items})
end
defp add_relationship({atom_key, record}, %{} = rels) do
with resource_identifier <- record |> resource_identifier_from_record(),
string_key = atom_key |> Atom.to_string
do
rels |> Map.put(string_key, %{"data" => resource_identifier})
end
end
@spec model_name_as_string(struct) :: String.t
defp model_name_as_string(record) do
record.__struct__
|> Module.split
|> List.last
|> String.downcase
end
@spec to_correct_type(any) :: any
defp to_correct_type(value) when is_integer(value), do: value |> Integer.to_string
defp to_correct_type(value), do: value
defp resource_identifier_from_record(record) do
%{
"id" => record.id |> to_correct_type(),
"type" => record |> model_name_as_string()
}
end
end
<|start_filename|>test/lib/code_corps_web/views/comment_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.CommentViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
task = insert(:task)
user = insert(:user)
comment = insert(:comment, user: user, task: task)
rendered_json = render(CodeCorpsWeb.CommentView, "show.json-api", data: comment)
expected_json = %{
"data" => %{
"id" => comment.id |> Integer.to_string,
"type" => "comment",
"attributes" => %{
"body" => comment.body,
"created-at" => comment.created_at,
"created-from" => comment.created_from,
"inserted-at" => comment.inserted_at,
"markdown" => comment.markdown,
"modified-at" => comment.modified_at,
"modified-from" => comment.modified_from,
"updated-at" => comment.updated_at
},
"relationships" => %{
"task" => %{
"data" => %{
"id" => comment.task_id |> Integer.to_string,
"type" => "task"
}
},
"user" => %{
"data" => %{
"id" => comment.user_id |> Integer.to_string,
"type" => "user"
}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert expected_json == rendered_json
end
end
<|start_filename|>test/lib/code_corps/github/adapters/app_installation_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.AppInstallationTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Adapters.AppInstallation
describe "from_installation_event/1" do
test "maps api payload correctly" do
payload = load_event_fixture("installation_created")
assert AppInstallation.from_installation_event(payload) == %{
github_id: payload["installation"]["id"],
github_account_id: payload["installation"]["account"]["id"],
github_account_login: payload["installation"]["account"]["login"],
github_account_avatar_url: payload["installation"]["account"]["avatar_url"],
github_account_type: payload["installation"]["account"]["type"],
sender_github_id: payload["sender"]["id"],
}
end
end
end
<|start_filename|>lib/code_corps/policy/donation_goal.ex<|end_filename|>
defmodule CodeCorps.Policy.DonationGoal do
import CodeCorps.Policy.Helpers, only: [get_project: 1, owned_by?: 2]
alias CodeCorps.{DonationGoal, User}
@spec create?(User.t, map) :: boolean
def create?(%User{} = user, %{} = params),
do: params |> get_project |> owned_by?(user)
@spec update?(User.t, DonationGoal.t) :: boolean
def update?(%User{} = user, %DonationGoal{} = donation_goal),
do: donation_goal |> get_project |> owned_by?(user)
@spec delete?(User.t, DonationGoal.t) :: boolean
def delete?(%User{} = user, %DonationGoal{} = donation_goal),
do: donation_goal |> get_project |> owned_by?(user)
end
<|start_filename|>lib/code_corps/policy/project.ex<|end_filename|>
defmodule CodeCorps.Policy.Project do
import CodeCorps.Policy.Helpers,
only: [get_organization: 1, owned_by?: 2, administered_by?: 2]
alias CodeCorps.{Project, User}
@spec create?(User.t, map) :: boolean
def create?(%User{} = user, params) do
params |> get_organization() |> owned_by?(user)
end
@spec update?(User.t, Project.t, map) :: boolean
def update?(%User{admin: true}, %Project{}, %{}), do: true
def update?(%User{}, %Project{}, %{"approved" => true}), do: false
def update?(%User{} = user, %Project{} = project, _), do: project |> administered_by?(user)
end
<|start_filename|>test/lib/code_corps_web/views/organization_github_app_installation_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.OrganizationGithubAppInstallationViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
github_app_installation = insert(:github_app_installation)
organization = insert(:organization)
organization_github_app_installation = insert(:organization_github_app_installation, github_app_installation: github_app_installation, organization: organization)
rendered_json = render(CodeCorpsWeb.OrganizationGithubAppInstallationView, "show.json-api", data: organization_github_app_installation)
expected_json = %{
"data" => %{
"id" => organization_github_app_installation.id |> Integer.to_string,
"type" => "organization-github-app-installation",
"attributes" => %{
"inserted-at" => organization_github_app_installation.inserted_at,
"updated-at" => organization_github_app_installation.updated_at
},
"relationships" => %{
"github-app-installation" => %{
"data" => %{"id" => organization_github_app_installation.github_app_installation_id |> Integer.to_string, "type" => "github-app-installation"}
},
"organization" => %{
"data" => %{"id" => organization_github_app_installation.organization_id |> Integer.to_string, "type" => "organization"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/lib/code_corps_web/controllers/project_skill_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ProjectSkillControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :project_skill
alias CodeCorps.{Analytics.SegmentTraitsBuilder, ProjectSkill, Repo}
describe "index" do
test "lists all entries on index", %{conn: conn} do
[project_skill_1, project_skill_2] = insert_pair(:project_skill)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([project_skill_1.id, project_skill_2.id])
end
test "filters resources on index", %{conn: conn} do
[project_skill_1, project_skill_2 | _] = insert_list(3, :project_skill)
path = "project-skills/?filter[id]=#{project_skill_1.id},#{project_skill_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([project_skill_1.id, project_skill_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
skill = insert(:skill)
project = insert(:project)
project_skill = insert(:project_skill, project: project, skill: skill)
conn
|> request_show(project_skill)
|> json_response(200)
|> assert_id_from_response(project_skill.id)
end
test "renders 404 error when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
skill = insert(:skill)
attrs = %{project: project, skill: skill}
assert conn |> request_create(attrs) |> json_response(201)
end
@tag :authenticated
test "tracks on segment", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
skill = insert(:skill)
attrs = %{project: project, skill: skill}
conn |> request_create(attrs)
user_id = current_user.id
traits = ProjectSkill |> Repo.one |> SegmentTraitsBuilder.build
assert_received({:track, ^user_id, "Added Project Skill", ^traits})
end
@tag :authenticated
test "renders 422 error when data is invalid", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
invalid_attrs = %{project: project}
assert conn |> request_create(invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "delete" do
@tag :authenticated
test "deletes chosen resource", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
project_skill = insert(:project_skill, project: project)
assert conn |> request_delete(project_skill) |> response(204)
end
@tag :authenticated
test "tracks on segment", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
project_skill = insert(:project_skill, project: project)
conn |> request_delete(project_skill)
user_id = current_user.id
traits = project_skill |> SegmentTraitsBuilder.build
assert_received({:track, ^user_id, "Removed Project Skill", ^traits})
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent on delete", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>priv/repo/migrations/20171115201624_drop_github_repos_project_id_unique_index_if_exists.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.DropGithubReposProjectIdUniqueIndexIfExists do
use Ecto.Migration
def up do
drop_if_exists index(:github_repos, [:project_id], unique: true)
create_if_not_exists index(:github_repos, [:project_id])
end
def down do
# no-op
end
end
<|start_filename|>test/lib/code_corps/policy/github_repo_test.exs<|end_filename|>
defmodule CodeCorps.Policy.GithubRepoTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.GithubRepo, only: [update?: 3]
alias CodeCorps.GithubRepo
describe "update?/3" do
test "returns true when user is adding project where they're an admin" do
project = insert(:project)
user = insert(:user)
insert(:project_user, project: project, user: user, role: "admin")
github_repo = %GithubRepo{project_id: project.id}
assert update?(user, github_repo, %{})
end
test "returns true when user is removing project where they're an admin" do
project = insert(:project)
user = insert(:user)
insert(:project_user, project: project, user: user, role: "admin")
assert update?(user, %GithubRepo{}, %{"project_id" => project.id})
end
test "returns false for normal user" do
project = insert(:project)
user = insert(:user)
github_repo = %GithubRepo{project_id: project.id}
refute update?(user, github_repo, %{})
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/stripe_platform_card_service_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.StripePlatformCardServiceTest do
use ExUnit.Case, async: true
use CodeCorps.ModelCase
alias CodeCorps.StripePlatformCard
alias CodeCorps.StripeService.StripePlatformCardService
describe "update_from_stripe/1" do
test "it just updates the platform card if there is nothing associated to update" do
platform_card = insert(:stripe_platform_card)
{:ok, %StripePlatformCard{} = platform_card, nil} =
StripePlatformCardService.update_from_stripe(platform_card.id_from_stripe)
assert platform_card.exp_year == 2020
end
# TODO: We can't really do this test until we are able to mock stripe API data
# test "it returns an {:error, changeset} if there are validation errors with the platform_card" do
# platform_card = insert(:stripe_platform_card)
# {:error, changeset} =
# StripePlatformCardService.update_from_stripe(platform_card.id_from_stripe)
# refute changeset.valid?
# end
test "it also updates the associated connect cards if there are any" do
platform_card = insert(:stripe_platform_card)
[connect_card_1, connect_card_2] = insert_pair(:stripe_connect_card, stripe_platform_card: platform_card)
{:ok, %StripePlatformCard{} = platform_card, connect_updates} =
StripePlatformCardService.update_from_stripe(platform_card.id_from_stripe)
assert platform_card.exp_year == 2020
platform_card = Repo.get(StripePlatformCard, platform_card.id)
assert platform_card.exp_year == 2020
[
{:ok, %Stripe.Card{} = stripe_record_1},
{:ok, %Stripe.Card{} = stripe_record_2}
] = connect_updates
assert stripe_record_1.id == connect_card_1.id_from_stripe
assert stripe_record_1.exp_year == 2020
assert stripe_record_2.id == connect_card_2.id_from_stripe
assert stripe_record_2.exp_year == 2020
end
end
end
<|start_filename|>test/lib/code_corps/emails/project_user_request_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.ProjectUserRequestEmailTest do
use CodeCorps.ModelCase
use Bamboo.Test
alias CodeCorps.Emails.ProjectUserRequestEmail
test "request email works" do
project = insert(:project)
%{user: requesting_user} = project_user = insert(:project_user, project: project)
%{user: owner1} = insert(:project_user, project: project, role: "owner")
%{user: owner2} = insert(:project_user, project: project, role: "owner")
email = ProjectUserRequestEmail.create(project_user)
assert email.from == "Code Corps<<EMAIL>>"
assert Enum.count(email.to) == 2
assert Enum.member?(email.to, owner1.email)
assert Enum.member?(email.to, owner2.email)
template_model = email.private.template_model
assert template_model == %{
contributors_url: "http://localhost:4200/#{project.organization.slug}/#{project.slug}/people",
project_title: project.title,
project_logo_url: "#{Application.get_env(:code_corps, :asset_host)}/icons/project_default_large_.png",
user_image_url: "#{Application.get_env(:code_corps, :asset_host)}/icons/user_default_large_.png",
user_first_name: requesting_user.first_name,
subject: "#{requesting_user.first_name} wants to join #{project.title}"
}
end
end
<|start_filename|>lib/code_corps_web/controllers/github_pull_request_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubPullRequestController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{GithubPullRequest, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with github_pull_requests <- GithubPullRequest |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: github_pull_requests)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %GithubPullRequest{} = github_pull_request <- GithubPullRequest |> Repo.get(id) do
conn |> render("show.json-api", data: github_pull_request)
end
end
end
<|start_filename|>test/lib/code_corps/github/sync/sync_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.SyncTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
import Ecto.Query, only: [where: 3]
alias CodeCorps.{
Comment,
GitHub.Adapters,
GitHub.Sync,
GithubAppInstallation,
GithubComment,
GithubIssue,
GithubPullRequest,
GithubRepo,
GithubUser,
Repo,
Task,
TaskList,
User
}
alias Ecto.Changeset
describe "pull_request_event" do
["pull_request_opened","pull_request_closed","pull_request_edited", "pull_request_opened_by_bot","pull_request_reopened"]
|> Enum.each(fn payload_name ->
@event payload_name
test "runs succesfully when " <> @event do
payload = load_event_fixture(@event)
project = insert(:project)
insert(:github_repo, github_id: payload["repository"]["id"], project: project)
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
insert(:task_list, project: project, pull_requests: true)
{:ok, _map} = Sync.pull_request_event(payload)
end
test "fails if repo not found when " <> @event do
payload = load_event_fixture(@event)
{:error, :repo_not_found} = Sync.pull_request_event(payload)
end
test "fails if api errors out when " <> @event do
payload = load_event_fixture(@event)
project = insert(:project)
insert(:github_repo, github_id: payload["repository"]["id"], project: project)
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
insert(:task_list, project: project, pull_requests: true)
with_mock_api(CodeCorps.GitHub.FailureAPI) do
assert {:error, :fetching_issue, _error} = Sync.pull_request_event(payload)
end
end
test "fails with validation error if pull request is invalid when " <> @event do
payload = load_event_fixture(@event)
project = insert(:project)
insert(:github_repo, github_id: payload["repository"]["id"], project: project)
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
insert(:task_list, project: project, pull_requests: true)
%{"pull_request" => pull} = payload
corrupt_pull = %{pull | "created_at" => nil, "updated_at" => nil, "html_url" => nil, "locked" => nil,
"number" => nil, "state" => nil, "title" => nil }
corrupt_pull_request = Map.put(payload, "pull_request", corrupt_pull)
{:error, :validating_github_pull_request, _changeset} = Sync.pull_request_event(corrupt_pull_request)
end
test "fails with validation error if task_list isn't found " <> @event do
payload = load_event_fixture(@event)
project = insert(:project)
insert(:github_repo, github_id: payload["repository"]["id"], project: project)
{:error, :validating_task, _changeset} = Sync.pull_request_event(payload)
end
end)
end
# Some clauses defined seem difficult or impossible to reach so their tests were omitted
# - {:error, :validation_error_on_syncing_installation, Changeset.t()}
# - {:error, :validation_error_on_marking_installation_processed, Changeset.t()}
# - {:error, :unexpected_transaction_outcome, any}
# However, if these clauses can be caused by some updates upstream we should cover them with tests
describe "installation_event" do
@payload load_event_fixture("installation_created")
test "syncs_correctly_with valid data" do
%{"installation" => %{"id" => installation_id}} = @payload
assert Repo.aggregate(GithubAppInstallation, :count, :id) == 0
{:ok, installation} = Sync.installation_event(@payload)
assert Repo.aggregate(GithubAppInstallation, :count, :id) == 1
assert installation.github_id == installation_id
end
test "fails if multiple installations are unprocessed" do
user = insert(:user, github_id: @payload["sender"]["id"])
project = insert(:project)
attrs = %{project: project, user: user, sender_github_id: user.id, github_id: nil}
insert(:github_app_installation, attrs)
insert(:github_app_installation, attrs)
{:error, :multiple_unprocessed_installations_found} = Sync.installation_event(@payload)
end
test "fails on syncing api error" do
with_mock_api(CodeCorps.GitHub.FailureAPI) do
assert {:error, :github_api_error_on_syncing_repos, _error} = Sync.installation_event(@payload)
end
end
end
describe "installation_repositories_event/1 added" do
@payload load_event_fixture("installation_repositories_added")
test "syncs_correctly when adding" do
%{"installation" => %{
"id" => installation_id
},
"repositories_added" => added_repos,
"sender" => %{"id" => _user_id}
} = @payload
project = insert(:project)
user = insert(:user)
insert(:github_app_installation, github_id: installation_id, project: project, user: user)
{:ok, _repos} = Sync.installation_repositories_event(@payload)
repo_ids = Enum.map(added_repos, &Map.get(&1, "id"))
for repo <- Repo.all(GithubRepo) do
assert repo.github_id in repo_ids
end
assert Repo.aggregate(GithubRepo, :count, :id) == 2
assert Repo.aggregate(GithubAppInstallation, :count, :id) == 1
end
test "can fail when installation not found" do
assert {:error, :unmatched_installation} == @payload |> Sync.installation_repositories_event()
end
test "fails with validation errors when syncing repos" do
%{"installation" => %{
"id" => installation_id
},
"repositories_added" => repos,
"sender" => %{"id" => _user_id}
} = @payload
project = insert(:project)
user = insert(:user)
insert(:github_app_installation, github_id: installation_id, project: project, user: user)
corrupt_repos = Enum.map(repos, &(Map.put(&1,"name", "")))
corrupted_payload = Map.put(@payload, "repositories_added", corrupt_repos)
assert {:error, :validation_error_on_syncing_repos, %{}} == corrupted_payload |> Sync.installation_repositories_event()
end
end
describe "installation_repositories_event/1 removed" do
@payload load_event_fixture("installation_repositories_removed")
test "syncs_correctly when removing" do
%{"installation" => %{
"id" => installation_id
},
"repositories_removed" => removed_repos
} = @payload
project = insert(:project)
user = insert(:user)
installation = insert(:github_app_installation, github_id: installation_id, project: project, user: user)
for repo <- removed_repos do
insert(:github_repo, github_id: repo["id"], github_app_installation: installation)
end
assert Repo.aggregate(GithubRepo, :count, :id) == 2
assert Repo.aggregate(GithubAppInstallation, :count, :id) == 1
{:ok, _repos} = Sync.installation_repositories_event(@payload)
assert Repo.aggregate(GithubRepo, :count, :id) == 0
end
end
describe "issue_comment_event/1 on comment created for pull request" do
@issue_comment_preloads [
:user,
[task: :user],
[github_comment: [github_issue: [:github_pull_request, :github_repo]]]
]
@payload load_event_fixture("issue_comment_created_on_pull_request")
test "syncs correctly" do
%{
"issue" => %{
"body" => issue_body,
"id" => issue_github_id,
"number" => issue_number,
"user" => %{
"id" => issue_user_github_id
}
},
"comment" => %{
"body" => comment_body,
"id" => comment_github_id,
"user" => %{
"id" => comment_user_github_id
}
},
"repository" => %{
"id" => repo_github_id
}
} = @payload
project = insert(:project)
github_repo = insert(:github_repo, github_id: repo_github_id, project: project)
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
insert(:task_list, project: project, pull_requests: true)
{:ok, comment} = Sync.issue_comment_event(@payload)
assert Repo.aggregate(GithubComment, :count, :id) == 1
assert Repo.aggregate(GithubIssue, :count, :id) == 1
assert Repo.aggregate(GithubPullRequest, :count, :id) == 1
assert Repo.aggregate(Comment, :count, :id) == 1
assert Repo.aggregate(Task, :count, :id) == 1
issue_user = Repo.get_by(User, github_id: issue_user_github_id)
assert issue_user
comment_user = Repo.get_by(User, github_id: comment_user_github_id)
assert comment_user
%{
github_comment: %{
github_issue: %{
github_pull_request: github_pull_request
} = github_issue
} = github_comment,
task: task
} = comment = comment |> Repo.preload(@issue_comment_preloads)
assert github_comment.github_id == comment_github_id
assert github_issue.github_id == issue_github_id
assert github_issue.body == issue_body
assert github_issue.number == issue_number
assert github_pull_request.number == issue_number
assert github_pull_request.github_repo_id == github_repo.id
assert task.markdown == issue_body
assert task.project_id == project.id
assert task.user.github_id == issue_user_github_id
assert task.user_id == issue_user.id
assert comment.markdown == comment_body
assert comment.user_id == comment_user.id
assert comment.user.github_id == comment_user_github_id
end
test "can fail when finding repo" do
assert {:error, :repo_not_found} == @payload |> Sync.issue_comment_event()
end
test "can fail when fetching pull request" do
insert(:github_repo, github_id: @payload["repository"]["id"])
with_mock_api(CodeCorps.GitHub.FailureAPI) do
assert {:error, :fetching_pull_request, %CodeCorps.GitHub.APIError{}} =
@payload |> Sync.issue_comment_event()
end
end
test "can fail on github pull request validation" do
defmodule InvalidPullRequestAPI do
@moduledoc false
def request(:get, "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1", _, _, _) do
{:ok, body} =
"pull_request"
|> load_endpoint_fixture()
|> Map.put("number", nil)
|> Poison.encode
{:ok, %HTTPoison.Response{status_code: 200, body: body}}
end
def request(method, endpoint, body, headers, options) do
CodeCorps.GitHub.SuccessAPI.request(method, endpoint, body, headers, options)
end
end
insert(:github_repo, github_id: @payload["repository"]["id"])
with_mock_api(InvalidPullRequestAPI) do
assert {:error, :validating_github_pull_request, %Changeset{} = changeset} =
@payload |> Sync.issue_comment_event()
refute changeset.valid?
end
end
test "can fail on github user validation for github pull request" do
defmodule InvalidUserAPI do
@moduledoc false
def request(:get, "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1", _, _, _) do
{:ok, body} =
"pull_request"
|> load_endpoint_fixture()
|> Kernel.put_in(["user", "login"], nil)
|> Poison.encode
{:ok, %HTTPoison.Response{status_code: 200, body: body}}
end
def request(method, endpoint, body, headers, options) do
CodeCorps.GitHub.SuccessAPI.request(method, endpoint, body, headers, options)
end
end
insert(:github_repo, github_id: @payload["repository"]["id"])
with_mock_api(InvalidUserAPI) do
assert {
:error,
:validating_github_user_on_github_pull_request,
%Changeset{} = changeset
} = @payload |> Sync.issue_comment_event()
refute changeset.valid?
end
end
test "can fail on github issue validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
assert {:error, :validating_github_issue, %Changeset{} = changeset} =
@payload
|> Kernel.put_in(["issue", "number"], nil)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on github user validation for github issue" do
insert(:github_repo, github_id: @payload["repository"]["id"])
assert {
:error,
:validating_github_user_on_github_issue,
%Changeset{} = changeset
} =
@payload
|> Kernel.put_in(["issue", "user", "login"], nil)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on task user validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
# setup data to trigger a unique constraint
email = "<EMAIL>"
insert(:user, email: email)
payload = @payload |> Kernel.put_in(["issue", "user", "email"], email)
assert {:error, :validating_task_user, %Changeset{} = changeset} =
payload |> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail if task matched with multiple users" do
github_repo =
insert(:github_repo, github_id: @payload["repository"]["id"])
attrs =
@payload["issue"]
|> Adapters.Issue.to_issue()
|> Map.put(:github_repo, github_repo)
github_issue = insert(:github_issue, attrs)
# creates a user for each task, which should never happen normally
insert_pair(:task, github_issue: github_issue)
assert {:error, :multiple_task_users_match} ==
@payload |> Sync.issue_comment_event()
end
test "can fail on task validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
# validation is triggered due to missing task list
assert {:error, :validating_task, %Changeset{} = changeset} =
@payload |> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on github comment validation" do
%{project: project} =
insert(:github_repo, github_id: @payload["repository"]["id"])
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, pull_requests: true)
assert {:error, :validating_github_comment, %Changeset{} = changeset} =
@payload
|> Kernel.put_in(["comment", "url"], nil)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on github user validation for github comment" do
%{project: project} =
insert(:github_repo, github_id: @payload["repository"]["id"])
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, pull_requests: true)
assert {
:error,
:validating_github_user_on_github_comment,
%Changeset{} = changeset
} =
@payload
|> Kernel.put_in(["comment", "user", "login"], nil)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on comment user validation" do
%{project: project} =
insert(:github_repo, github_id: @payload["repository"]["id"])
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, pull_requests: true)
# setup data to trigger a unique constraint
email = "<EMAIL>"
insert(:user, email: email)
assert {:error, :validating_comment_user, %Changeset{} = changeset} =
@payload
|> Kernel.put_in(["comment", "user", "email"], email)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail if commment matched with multiple users" do
%{project: project} = github_repo =
insert(:github_repo, github_id: @payload["repository"]["id"])
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, pull_requests: true)
attrs =
@payload["comment"]
|> Adapters.Comment.to_github_comment()
|> Map.put(:github_repo, github_repo)
github_comment = insert(:github_comment, attrs)
# creates a user for each comment, which should never happen normally
insert_pair(:comment, github_comment: github_comment)
assert {:error, :multiple_comment_users_match} ==
@payload |> Sync.issue_comment_event()
end
end
describe "issue_comment_event/1 on comment created for regular issue" do
@payload load_event_fixture("issue_comment_created")
test "syncs correctly" do
%{
"issue" => %{
"body" => issue_body,
"id" => issue_github_id,
"number" => issue_number,
"user" => %{
"id" => issue_user_github_id
}
},
"comment" => %{
"body" => comment_body,
"id" => comment_github_id,
"user" => %{
"id" => comment_user_github_id
}
},
"repository" => %{
"id" => repo_github_id
}
} = @payload
project = insert(:project)
insert(:github_repo, github_id: repo_github_id, project: project)
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
{:ok, comment} = Sync.issue_comment_event(@payload)
assert Repo.aggregate(GithubComment, :count, :id) == 1
assert Repo.aggregate(GithubIssue, :count, :id) == 1
assert Repo.aggregate(GithubPullRequest, :count, :id) == 0
assert Repo.aggregate(Comment, :count, :id) == 1
assert Repo.aggregate(Task, :count, :id) == 1
issue_user = Repo.get_by(User, github_id: issue_user_github_id)
assert issue_user
comment_user = Repo.get_by(User, github_id: comment_user_github_id)
assert comment_user
%{
github_comment: %{
github_issue: %{
github_pull_request: github_pull_request
} = github_issue
} = github_comment,
task: task
} = comment = comment |> Repo.preload(@issue_comment_preloads)
assert github_comment.github_id == comment_github_id
assert github_issue.github_id == issue_github_id
assert github_issue.body == issue_body
assert github_issue.number == issue_number
assert github_pull_request == nil
assert task.markdown == issue_body
assert task.project_id == project.id
assert task.user.github_id == issue_user_github_id
assert task.user_id == issue_user.id
assert comment.markdown == comment_body
assert comment.user_id == comment_user.id
assert comment.user.github_id == comment_user_github_id
end
test "can fail when finding repo" do
assert {:error, :repo_not_found} == @payload |> Sync.issue_comment_event()
end
test "can fail on github issue validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
assert {:error, :validating_github_issue, %Changeset{} = changeset} =
@payload
|> Kernel.put_in(["issue", "number"], nil)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on github user validation for github issue" do
insert(:github_repo, github_id: @payload["repository"]["id"])
assert {
:error,
:validating_github_user_on_github_issue,
%Changeset{} = changeset
} =
@payload
|> Kernel.put_in(["issue", "user", "login"], nil)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on task user validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
# setup data to trigger a unique constraint
email = "<EMAIL>"
insert(:user, email: email)
payload = @payload |> Kernel.put_in(["issue", "user", "email"], email)
assert {:error, :validating_task_user, %Changeset{} = changeset} =
payload |> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail if task matched with multiple users" do
github_repo =
insert(:github_repo, github_id: @payload["repository"]["id"])
attrs =
@payload["issue"]
|> Adapters.Issue.to_issue()
|> Map.put(:github_repo, github_repo)
github_issue = insert(:github_issue, attrs)
# creates a user for each task, which should never happen normally
insert_pair(:task, github_issue: github_issue)
assert {:error, :multiple_task_users_match} ==
@payload |> Sync.issue_comment_event()
end
test "can fail on task validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
# validation is triggered due to missing task list
assert {:error, :validating_task, %Changeset{} = changeset} =
@payload |> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on github comment validation" do
%{project: project} =
insert(:github_repo, github_id: @payload["repository"]["id"])
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
assert {:error, :validating_github_comment, %Changeset{} = changeset} =
@payload
|> Kernel.put_in(["comment", "url"], nil)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on github user validation for github comment" do
%{project: project} =
insert(:github_repo, github_id: @payload["repository"]["id"])
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
assert {
:error,
:validating_github_user_on_github_comment,
%Changeset{} = changeset
} =
@payload
|> Kernel.put_in(["comment", "user", "login"], nil)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail on comment user validation" do
%{project: project} =
insert(:github_repo, github_id: @payload["repository"]["id"])
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
# setup data to trigger a unique constraint
email = "<EMAIL>"
insert(:user, email: email)
assert {:error, :validating_comment_user, %Changeset{} = changeset} =
@payload
|> Kernel.put_in(["comment", "user", "email"], email)
|> Sync.issue_comment_event()
refute changeset.valid?
end
test "can fail if commment matched with multiple users" do
%{project: project} = github_repo =
insert(:github_repo, github_id: @payload["repository"]["id"])
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
attrs =
@payload["comment"]
|> Adapters.Comment.to_github_comment()
|> Map.put(:github_repo, github_repo)
github_comment = insert(:github_comment, attrs)
# creates a user for each comment, which should never happen normally
insert_pair(:comment, github_comment: github_comment)
assert {:error, :multiple_comment_users_match} ==
@payload |> Sync.issue_comment_event()
end
end
describe "issue_comment_event/1 on comment deleted" do
test "syncs correctly" do
%{"comment" => %{"id" => github_id}} = payload =
load_event_fixture("issue_comment_deleted")
github_comment = insert(:github_comment, github_id: github_id)
comment = insert(:comment, github_comment: github_comment)
{:ok, %{deleted_comments: [deleted_comment], deleted_github_comment: deleted_github_comment}}
= payload |> Sync.issue_comment_event()
assert deleted_comment.id == comment.id
assert deleted_github_comment.id == github_comment.id
assert Repo.aggregate(Comment, :count, :id) == 0
assert Repo.aggregate(GithubComment, :count, :id) == 0
end
end
describe "issue_event/1" do
@payload load_event_fixture("issues_opened")
test "with unmatched user, creates user, creates task for project associated to github repo" do
%{
"issue" => %{
"body" => markdown, "title" => title, "number" => number,
"user" => %{"id" => user_github_id}
},
"repository" => %{"id" => repo_github_id}
} = @payload
project = insert(:project)
github_repo = insert(:github_repo, github_id: repo_github_id, project: project)
insert(:task_list, project: project, inbox: true)
{:ok, %Task{} = task} = @payload |> Sync.issue_event()
assert Repo.aggregate(Task, :count, :id) == 1
user = Repo.get_by(User, github_id: user_github_id)
assert user
task = task |> Repo.preload(:github_issue)
assert task.user_id == user.id
assert task.github_issue_id
assert task.github_repo_id == github_repo.id
assert task.project_id == project.id
assert task.markdown == markdown
assert task.title == title
assert task.github_issue.number == number
assert task.status == "open"
assert task.order
end
test "with matched user, creates or updates task for project associated to github repo" do
%{
"issue" => %{
"id" => issue_github_id,
"body" => markdown,
"title" => title,
"number" => number,
"user" => %{"id" => user_github_id}
} ,
"repository" => %{"id" => repo_github_id}
} = @payload
user = insert(:user, github_id: user_github_id)
project = insert(:project)
github_repo = insert(:github_repo, github_id: repo_github_id, project: project)
github_issue = insert(:github_issue, github_id: issue_github_id, number: number, github_repo: github_repo)
insert(:task_list, project: project, inbox: true)
existing_task = insert(:task, project: project, user: user, github_repo: github_repo, github_issue: github_issue)
{:ok, %Task{} = task} = @payload |> Sync.issue_event()
assert Repo.aggregate(Task, :count, :id) == 1
task = task |> Repo.preload(:github_issue)
assert task.github_issue_id == github_issue.id
assert task.github_repo_id == github_repo.id
assert task.project_id == project.id
assert task.markdown == markdown
assert task.title == title
assert task.github_issue.number == number
assert task.status == "open"
assert task.order
assert existing_task.id == task.id
end
test "can fail when finding repo" do
assert {:error, :repo_not_found} == @payload |> Sync.issue_event()
end
test "can fail on github issue validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
assert {:error, :validating_github_issue, %Changeset{} = changeset} =
@payload
|> Kernel.put_in(["issue", "number"], nil)
|> Sync.issue_event()
refute changeset.valid?
end
test "can fail on github user validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
assert {:error, :validating_github_user, %Changeset{} = changeset} =
@payload
|> Kernel.put_in(["issue", "user", "login"], nil)
|> Sync.issue_event()
refute changeset.valid?
end
test "can fail on user validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
# setup data to trigger a unique constraint
email = "<EMAIL>"
insert(:user, email: email)
payload = @payload |> Kernel.put_in(["issue", "user", "email"], email)
assert {:error, :validating_user, %Changeset{} = changeset} =
payload |> Sync.issue_event()
refute changeset.valid?
end
test "can fail if matched by multiple users" do
github_repo =
insert(:github_repo, github_id: @payload["repository"]["id"])
attrs =
@payload["issue"]
|> Adapters.Issue.to_issue()
|> Map.put(:github_repo, github_repo)
github_issue = insert(:github_issue, attrs)
# creates a user for each task, which should never happen normally
insert_pair(:task, github_issue: github_issue)
assert {:error, :multiple_task_users_match} ==
@payload |> Sync.issue_event()
end
test "can fail on task validation" do
insert(:github_repo, github_id: @payload["repository"]["id"])
# validation is triggered due to missing task list
assert {:error, :validating_task, %Changeset{} = changeset} =
@payload |> Sync.issue_event()
refute changeset.valid?
end
end
describe "sync_repo/1" do
defp setup_test_repo do
project = insert(:project)
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
insert(:task_list, project: project, pull_requests: true)
owner = "baxterthehacker"
repo = "public-repo"
github_app_installation = insert(:github_app_installation, github_account_login: owner)
insert(
:github_repo,
github_app_installation: github_app_installation,
name: repo,
github_account_id: 6_752_317,
github_account_avatar_url: "https://avatars3.githubusercontent.com/u/6752317?v=4",
github_account_type: "User",
github_id: 35_129_377,
project: project)
end
test "syncs and resyncs with the project repo" do
github_repo = setup_test_repo()
# Sync the first time
Sync.sync_repo(github_repo)
repo = Repo.one(GithubRepo)
assert repo.syncing_pull_requests_count == 4
assert repo.syncing_issues_count == 8
assert repo.syncing_comments_count == 12
assert Repo.aggregate(GithubComment, :count, :id) == 12
assert Repo.aggregate(GithubIssue, :count, :id) == 8
assert Repo.aggregate(GithubPullRequest, :count, :id) == 4
assert Repo.aggregate(GithubUser, :count, :id) == 10
assert Repo.aggregate(Comment, :count, :id) == 12
assert Repo.aggregate(Task, :count, :id) == 8
assert Repo.aggregate(User, :count, :id) == 13
# Sync a second time – should run without trouble
Sync.sync_repo(github_repo)
repo = Repo.one(GithubRepo)
assert repo.syncing_pull_requests_count == 4
assert repo.syncing_issues_count == 8
assert repo.syncing_comments_count == 12
assert Repo.aggregate(GithubComment, :count, :id) == 12
assert Repo.aggregate(GithubIssue, :count, :id) == 8
assert Repo.aggregate(GithubPullRequest, :count, :id) == 4
assert Repo.aggregate(GithubUser, :count, :id) == 10
assert Repo.aggregate(Comment, :count, :id) == 12
assert Repo.aggregate(Task, :count, :id) == 8
assert Repo.aggregate(User, :count, :id) == 13
end
# coupled to fixtures. depends on
# - fixtures/github/endpoints/issues.json on having at least 4 issues
# linked to pull requests
# - fixtures/github/endpoints/pulls.json having payloads for those 4 pull
# requests (matched by "number")
test "matches github issue with github pull request correctly" do
{:ok, github_repo} = setup_test_repo() |> Sync.sync_repo
%GithubRepo{github_issues: github_issues} =
GithubRepo |> Repo.get(github_repo.id) |> Repo.preload(:github_issues)
linked_issues =
github_issues
|> Enum.reject(fn i -> is_nil(i.github_pull_request_id) end)
assert linked_issues |> Enum.count == 4
end
@tag acceptance: true
test "syncs with the project repo with the real API" do
github_repo = setup_coderly_repo()
with_real_api do
Sync.sync_repo(github_repo)
end
repo = Repo.one(GithubRepo)
assert repo.syncing_pull_requests_count == 1
assert repo.syncing_issues_count == 3
assert repo.syncing_comments_count == 2
assert Repo.aggregate(GithubComment, :count, :id) == 2
assert Repo.aggregate(GithubIssue, :count, :id) == 3
assert Repo.aggregate(GithubPullRequest, :count, :id) == 1
assert Repo.aggregate(GithubUser, :count, :id) == 2
assert Repo.aggregate(Comment, :count, :id) == 2
assert Repo.aggregate(Task, :count, :id) == 3
assert Repo.aggregate(User, :count, :id) == 2
# Tasks closed more than 30 days ago
archived_tasks =
Task
|> where([object], is_nil(object.task_list_id))
|> Repo.all()
%TaskList{tasks: inbox_tasks} =
TaskList |> Repo.get_by(inbox: true) |> Repo.preload(:tasks)
%TaskList{tasks: pull_requests_tasks} =
TaskList |> Repo.get_by(pull_requests: true) |> Repo.preload(:tasks)
assert Enum.count(archived_tasks) == 1
assert Enum.count(inbox_tasks) == 1
assert Enum.count(pull_requests_tasks) == 1
end
end
end
<|start_filename|>lib/code_corps/github/sync/github_comment/github_comment.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubComment do
@moduledoc ~S"""
In charge of finding a `CodeCorps.GithubComment` to link with a
`CodeCorps.Comment` when processing a GitHub Comment payload.
The only entry point is `create_or_update_comment/2`.
"""
alias CodeCorps.{
GitHub.Adapters,
GitHub.Sync,
GithubComment,
GithubIssue,
GithubRepo,
GithubUser,
Repo
}
alias Ecto.Changeset
@type result :: {:ok, GithubComment.t()} | {:error, Changeset.t()}
@doc ~S"""
Finds or creates a `CodeCorps.GithubComment` using the data in a GitHub
IssueComment payload.
The process is as follows:
- Search for the comment in our database with the payload data.
- If found, update it with payload data
- If not found, create it from payload data
`CodeCorps.GitHub.Adapters.Comment.to_github_comment/1` is used to adapt the
payload data.
"""
@spec create_or_update_comment(GithubIssue.t, map) :: result
def create_or_update_comment(%GithubIssue{} = github_issue, %{} = attrs) do
with {:ok, %GithubUser{} = github_user} <- Sync.GithubUser.create_or_update_github_user(attrs),
params <- attrs |> Adapters.Comment.to_github_comment()
do
case attrs |> find_comment() do
nil ->
params |> create_comment(github_issue |> find_repo(), github_issue, github_user)
%GithubComment{} = github_comment ->
github_comment |> update_comment(params)
end
else
{:error, error} -> {:error, error}
end
end
@doc ~S"""
Finds or creates a `CodeCorps.GithubComment` using the data in a
GitHubComment payload.
The comment is matched with an existing GithubIssue record using the
`issue_url` property of the payload.
"""
@spec create_or_update_comment(GithubRepo.t, map) :: result
def create_or_update_comment(%GithubRepo{} = github_repo, %{} = attrs) do
with {:ok, %GithubUser{} = github_user} <- Sync.GithubUser.create_or_update_github_user(attrs),
params <- attrs |> Adapters.Comment.to_github_comment()
do
case attrs |> find_comment() do
nil ->
params
|> create_comment(github_repo, attrs |> find_issue(), github_user)
%GithubComment{} = github_comment ->
github_comment |> update_comment(params)
end
else
{:error, error} -> {:error, error}
end
end
@spec find_comment(map) :: GithubComment.t() | nil
defp find_comment(%{"id" => github_id}) do
GithubComment |> Repo.get_by(github_id: github_id)
end
@spec find_issue(map) :: GithubIssue.t() | nil
defp find_issue(%{"issue_url" => issue_url}) do
GithubIssue |> Repo.get_by(url: issue_url)
end
@spec find_repo(GithubIssue.t()) :: GithubRepo.t() | nil
defp find_repo(%GithubIssue{github_repo_id: github_repo_id}) do
GithubRepo |> Repo.get(github_repo_id)
end
@spec create_comment(map, GithubRepo.t() | nil, GithubIssue.t() | nil, GithubUser.t() | nil) :: result()
defp create_comment(%{} = params, github_repo, github_issue, github_user) do
%GithubComment{}
|> GithubComment.create_changeset(params)
|> Changeset.put_assoc(:github_issue, github_issue)
|> Changeset.put_assoc(:github_repo, github_repo)
|> Changeset.put_assoc(:github_user, github_user)
|> Changeset.validate_required([:github_issue, :github_repo, :github_user])
|> Repo.insert()
end
@spec update_comment(GitHubComment.t(), map) :: result()
defp update_comment(%GithubComment{} = github_comment, %{} = params) do
github_comment |> GithubComment.update_changeset(params) |> Repo.update()
end
@doc ~S"""
Deletes the `CodeCorps.GithubComment` record using the GitHub ID from a GitHub
API comment payload.
Returns the deleted `CodeCorps.GithubComment` record or an empty
`CodeCorps.GithubComment` record if no such record existed.
"""
@spec delete(String.t) :: {:ok, GithubComment.t()}
def delete(github_id) do
comment = Repo.get_by(GithubComment, github_id: github_id)
case comment do
nil -> {:ok, %GithubComment{}}
_ -> Repo.delete(comment, returning: true)
end
end
end
<|start_filename|>lib/code_corps/model/organization_invite.ex<|end_filename|>
defmodule CodeCorps.OrganizationInvite do
@moduledoc """
Handles inviting organizations via email
"""
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "organization_invites" do
field :code, :string
field :email, :string
field :organization_name, :string
belongs_to :organization, CodeCorps.Organization
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:email, :organization_name])
|> validate_required([:email, :organization_name])
|> validate_format(:email, ~r/@/)
end
@doc """
Builds a changeset for creating an organization invite.
"""
def create_changeset(struct, params) do
struct
|> changeset(params)
|> generate_code
|> unique_constraint(:code)
end
def update_changeset(struct, params) do
struct
|> changeset(params)
|> cast(params, [:organization_id])
|> assoc_constraint(:organization)
end
defp generate_code(changeset) do
case changeset do
%Ecto.Changeset{valid?: true} ->
code = do_generate_code(10)
put_change(changeset, :code, code)
_ -> changeset
end
end
defp do_generate_code(length) do
length
|> :crypto.strong_rand_bytes
|> Base.encode64
|> binary_part(0, length)
end
end
<|start_filename|>test/lib/code_corps/messages/conversations_test.exs<|end_filename|>
defmodule CodeCorps.Messages.ConversationsTest do
@moduledoc false
import DateTime, only: [compare: 2]
use CodeCorps.DbAccessCase
alias CodeCorps.{
Conversation, Messages
}
describe "part_added_changeset/1" do
test "sets the updated_at to the current time" do
old_updated_at = DateTime.utc_now() |> Timex.shift(days: -5)
conversation = %Conversation{updated_at: old_updated_at}
changeset = conversation |> Messages.Conversations.part_added_changeset()
assert compare(old_updated_at, changeset.changes[:updated_at]) == :lt
end
test "sets status to open" do
conversation = %Conversation{status: "closed"}
changeset = conversation |> Messages.Conversations.part_added_changeset()
assert changeset.changes[:status] == "open"
end
end
end
<|start_filename|>lib/code_corps_web/controllers/user_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.UserController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{
Analytics,
GitHub,
Helpers.Query,
Services.UserService,
User,
Accounts
}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
users =
User
|> Query.id_filter(params)
|> Query.limit_filter(params)
|> Query.user_filter(params)
|> Accounts.Users.project_filter(params)
|> Repo.all()
|> preload()
conn |> render("index.json-api", data: users)
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %User{} = user <- User |> Repo.get(id) |> preload() do
conn |> render("show.json-api", data: user)
end
end
@spec create(Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with {:ok, %User{} = user} <- %User{} |> User.registration_changeset(params) |> Repo.insert(),
user <- preload(user)
do
conn |> put_status(:created) |> render("show.json-api", data: user)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %User{} = user <- User |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, user),
{:ok, user, _, _} <- user |> UserService.update(params),
user <- preload(user)
do
conn |> render("show.json-api", data: user)
end
end
@doc """
Differs from other resources by path: `/oauth/github`
"""
@spec github_oauth(Conn.t, map) :: Conn.t
def github_oauth(%Conn{} = conn, %{"code" => code, "state" => state}) do
current_user = Guardian.Plug.current_resource(conn)
with {:ok, user} <- GitHub.API.User.connect(current_user, code, state),
user <- preload(user)
do
Analytics.SegmentTracker.track(user.id, "Connected to GitHub", user)
conn |> render("show.json-api", data: user)
end
end
@spec email_available(Conn.t, map) :: Conn.t
def email_available(%Conn{} = conn, %{"email" => email}) do
hash = User.check_email_availability(email)
conn |> json(hash)
end
@spec username_available(Conn.t, map) :: Conn.t
def username_available(%Conn{} = conn, %{"username" => username}) do
hash = User.check_username_availability(username)
conn |> json(hash)
end
@preloads [
:categories, :github_app_installations, :organizations, :project_users,
:slugged_route, :stripe_connect_subscriptions, :stripe_platform_card,
:stripe_platform_customer, :user_categories, :user_roles, :user_skills
]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>test/lib/code_corps_web/controllers/password_reset_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.PasswordResetControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :password_reset
import CodeCorps.TestEnvironmentHelper, only: [modify_env: 2]
alias CodeCorps.{AuthToken, User}
test "updates user password when data is valid and deletes auth token model", %{conn: conn} do
current_user = insert(:user)
{:ok, auth_token} = AuthToken.changeset(%AuthToken{}, current_user) |> Repo.insert
attrs = %{"token" => auth_token.value, "password" => "<PASSWORD>", "password_confirmation" => "<PASSWORD>"}
conn = post conn, password_reset_path(conn, :reset_password), attrs
response = json_response(conn, 201)
assert response
encrypted_password = Repo.get(User, current_user.id).encrypted_password
assert Comeonin.Bcrypt.checkpw("<PASSWORD>", encrypted_password)
assert AuthToken |> Repo.get(auth_token.id) == nil
end
test "does not create resource and renders errors when password does not match", %{conn: conn} do
current_user = insert(:user)
{:ok, auth_token} = AuthToken.changeset(%AuthToken{}, current_user) |> Repo.insert()
attrs = %{"token" => auth_token.value, "password" => "<PASSWORD>", "password_confirmation" => "<PASSWORD>"}
conn = post conn, password_reset_path(conn, :reset_password), attrs
response = json_response(conn, 422)
assert %{"errors" => [%{"detail" => "Password confirmation passwords do not match"}]} = response
end
test "does not create resource and renders errors when token is invalid", %{conn: conn} do
current_user = insert(:user)
{:ok, _} = AuthToken.changeset(%AuthToken{}, current_user) |> Repo.insert()
attrs = %{"token" => "random token", "password" => "<PASSWORD>", "password_confirmation" => "<PASSWORD>"}
conn = post conn, password_reset_path(conn, :reset_password), attrs
assert json_response(conn, 404)
end
test "does not create resource and renders errors when error in token timeout occurs", %{conn: conn} do
modify_env(:code_corps, password_reset_timeout: 0)
current_user = insert(:user)
{:ok, auth_token} = AuthToken.changeset(%AuthToken{}, current_user) |> Repo.insert()
attrs = %{"token" => auth_token.value, "password" => "<PASSWORD>", "password_confirmation" => "<PASSWORD>"}
conn = post conn, password_reset_path(conn, :reset_password), attrs
assert json_response(conn, 404)
end
end
<|start_filename|>test/lib/code_corps/admin/github_event_query_test.exs<|end_filename|>
defmodule CodeCorps.Admin.GithubEventQueryTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.{
Admin.GithubEventQuery,
GithubEvent,
Repo
}
describe "action_filter/2" do
test "when action is present it filters" do
created_event = insert(:github_event, action: "created")
insert(:github_event, action: "opened")
[result] =
GithubEvent
|> GithubEventQuery.action_filter(%{"action" => "created"})
|> Repo.all()
assert created_event == result
end
test "when action is not present returns the queryable" do
created_event = insert(:github_event, action: "created")
opened_event = insert(:github_event, action: "opened")
[result1, result2] =
GithubEvent
|> GithubEventQuery.action_filter(%{})
|> Repo.all()
assert created_event == result1
assert opened_event == result2
end
end
describe "status_filter/2" do
test "when status is present it filters" do
processed_event = insert(:github_event, status: "processed")
insert(:github_event, status: "unprocessed")
[result] =
GithubEvent
|> GithubEventQuery.status_filter(%{"status" => "processed"})
|> Repo.all()
assert processed_event == result
end
test "when status is not present returns the queryable" do
processed_event = insert(:github_event, status: "processed")
unprocessed_event = insert(:github_event, status: "unprocessed")
[result1, result2] =
GithubEvent
|> GithubEventQuery.status_filter(%{})
|> Repo.all()
assert processed_event == result1
assert unprocessed_event == result2
end
end
describe "type_filter/2" do
test "when type is present it filters" do
created_event = insert(:github_event, type: "issues")
insert(:github_event, type: "installation")
[result] =
GithubEvent
|> GithubEventQuery.type_filter(%{"type" => "issues"})
|> Repo.all()
assert created_event == result
end
test "when type is not present returns the queryable" do
issues_event = insert(:github_event, type: "issues")
installation_event = insert(:github_event, type: "installation")
[result1, result2] =
GithubEvent
|> GithubEventQuery.type_filter(%{})
|> Repo.all()
assert issues_event == result1
assert installation_event == result2
end
end
end
<|start_filename|>priv/repo/migrations/20171016125516_change_github_comment_relationships.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.ChangeGithubCommentRelationships do
use Ecto.Migration
def up do
alter table(:comments) do
remove :github_id
add :github_comment_id, references(:github_comments)
end
end
def down do
alter table(:comments) do
add :github_id, :integer
remove :github_comment_id
end
end
end
<|start_filename|>test/lib/code_corps/github/event/installation/validator_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.Installation.ValidatorTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Event.Installation.Validator
describe "valid?/1" do
test "returns true for any Installation event fixture" do
assert "installation_created" |> load_event_fixture() |> Validator.valid?
end
test "returns false for an unsupported structure" do
refute Validator.valid?("foo")
refute Validator.valid?(%{"foo" => "bar"})
refute Validator.valid?(%{"installation" => %{"bar" => "baz"}})
refute Validator.valid?(%{"sender" => %{"bar" => "baz"}})
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/stripe_connect_charge_service_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.StripeConnectChargeServiceTest do
@moduledoc false
use CodeCorps.StripeCase
alias CodeCorps.{StripeConnectCharge, StripeTesting}
alias CodeCorps.StripeService.StripeConnectChargeService
describe "create" do
test "creates a StripeConnectCharge, with proper associations" do
# we load in the fixture we will be using, so we have access to the data it contains
fixture = StripeTesting.Helpers.load_fixture("charge")
customer = insert(:stripe_connect_customer, id_from_stripe: fixture.customer)
# service expects a Stripe.Charge id, so we pass in an id for a predefined fixture we have
{:ok, %StripeConnectCharge{} = charge} = StripeConnectChargeService.create("charge", customer.stripe_connect_account.id_from_stripe)
assert charge.id_from_stripe == "charge"
assert charge.stripe_connect_customer_id == customer.id
assert charge.user_id == customer.user_id
end
end
end
<|start_filename|>lib/code_corps/services/markdown_renderer_service.ex<|end_filename|>
defmodule CodeCorps.Services.MarkdownRendererService do
@moduledoc """
Used to render provided markdown into html using an external renderer package.
"""
alias Ecto.Changeset
@spec render_markdown_to_html(Changeset.t(), atom, atom) :: Changeset.t()
def render_markdown_to_html(%Changeset{valid?: false} = changeset, _, _), do: changeset
def render_markdown_to_html(changeset, source_field, destination_field) do
change = changeset |> Changeset.get_change(source_field)
changeset |> handle_change(change, destination_field)
end
@spec handle_change(Changeset.t(), String.t() | nil, atom) :: Changeset.t()
defp handle_change(changeset, nil, _), do: changeset
defp handle_change(changeset, "", destination_field) do
Changeset.put_change(changeset, destination_field, nil)
end
defp handle_change(changeset, lines, destination_field) when is_binary(lines) do
lines
|> convert_into_html()
|> put_into(changeset, destination_field)
end
# Prism.js requires a `language-` prefix in code classes
# See: https://github.com/pragdave/earmark#syntax-highlightning
@spec convert_into_html(String.t()) :: String.t()
defp convert_into_html(lines) do
lines
|> Earmark.as_html!(%Earmark.Options{code_class_prefix: "language-"})
end
@spec put_into(String.t(), Changeset.t(), atom) :: Changeset.t()
defp put_into(html, changeset, destination_field) do
changeset |> Changeset.put_change(destination_field, html)
end
end
<|start_filename|>lib/code_corps/projects/projects.ex<|end_filename|>
defmodule CodeCorps.Projects do
@moduledoc """
Work with `CodeCorps.Projects`.
"""
import CodeCorpsWeb.ProjectController, only: [preload: 1]
alias CodeCorps.{
Analytics.SegmentTracker, Emails, Mailer, Project, Repo, User
}
alias Ecto.Changeset
@doc """
Create a project.
"""
@spec create(map, User.t) :: {:ok, Project.t} | {:error, Changeset.t}
def create(%{} = params, %User{} = user) do
with {:ok, %Project{} = project} <- %Project{} |> Project.create_changeset(params) |> Repo.insert(),
project <- preload(project) do
user |> track_created(project)
{:ok, project}
end
end
@doc """
Update a project.
"""
@spec update(Project.t, map, User.t) :: {:ok, Project.t} | {:error, Changeset.t}
def update(%Project{} = project, %{} = params, %User{} = user) do
with {:ok, %Project{} = updated_project} <- project |> Project.update_changeset(params) |> Repo.update(),
updated_project <- preload(updated_project) do
maybe_send_approval_request_email(updated_project, project)
maybe_send_approved_email(updated_project, project)
user |> track_updated(updated_project)
user |> maybe_track_approved(updated_project, project)
user |> maybe_track_approval_requested(updated_project, project)
{:ok, updated_project}
end
end
@spec track_created(User.t, Project.t) :: any
defp track_created(%User{id: user_id}, %Project{} = project) do
user_id |> SegmentTracker.track("Created Project", project)
end
@spec track_updated(User.t, Project.t) :: any
defp track_updated(%User{id: user_id}, %Project{} = project) do
user_id |> SegmentTracker.track("Updated Project", project)
end
@spec maybe_track_approval_requested(User.t, Project.t, Project.t) :: any
defp maybe_track_approval_requested(
%User{id: user_id},
%Project{approval_requested: true} = updated_project,
%Project{approval_requested: false}) do
user_id |> SegmentTracker.track("Requested Project Approval", updated_project)
end
defp maybe_track_approval_requested(%User{}, %Project{}, %Project{}), do: :nothing
@spec maybe_track_approved(User.t, Project.t, Project.t) :: any
defp maybe_track_approved(
%User{id: user_id},
%Project{approved: true} = updated_project,
%Project{approved: false}) do
user_id |> SegmentTracker.track("Approved Project", updated_project)
end
defp maybe_track_approved(%User{}, %Project{}, %Project{}), do: :nothing
@spec maybe_send_approval_request_email(Project.t, Project.t) :: any
defp maybe_send_approval_request_email(
%Project{approval_requested: true} = updated_project,
%Project{approval_requested: false}) do
send_approval_request_email(updated_project)
end
defp maybe_send_approval_request_email(%Project{}, %Project{}), do: :nothing
@spec send_approval_request_email(Project.t) :: Bamboo.Email.t
defp send_approval_request_email(project) do
project
|> preload()
|> Emails.ProjectApprovalRequestEmail.create()
|> Mailer.deliver_now()
end
@spec maybe_send_approved_email(Project.t, Project.t) :: any
defp maybe_send_approved_email(
%Project{approved: true} = updated_project,
%Project{approved: false}) do
send_approved_email(updated_project)
end
defp maybe_send_approved_email(%Project{}, %Project{}), do: :nothing
@spec send_approved_email(Project.t) :: Bamboo.Email.t
defp send_approved_email(project) do
project
|> preload()
|> Emails.ProjectApprovedEmail.create()
|> Mailer.deliver_now()
end
end
<|start_filename|>lib/code_corps_web/controllers/stripe_connect_subscription_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectSubscriptionController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{StripeConnectSubscription, User}
alias CodeCorps.StripeService.StripeConnectSubscriptionService
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
%StripeConnectSubscription{} = subscription <- StripeConnectSubscription |> Repo.get(id),
{:ok, :authorized} <- current_user |> Policy.authorize(:show, subscription, params)
do
subscription = preload(subscription)
conn |> render("show.json-api", data: subscription)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %StripeConnectSubscription{}, params),
{:ok, %StripeConnectSubscription{} = subscription} <- StripeConnectSubscriptionService.find_or_create(params),
subscription <- preload(subscription)
do
conn |> put_status(:created) |> render("show.json-api", data: subscription)
end
end
@preloads [:project]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>test/lib/code_corps/model/github_app_installation_test.exs<|end_filename|>
defmodule CodeCorps.GithubAppInstallationTest do
@moduledoc false
use CodeCorps.ModelCase
alias CodeCorps.{GithubAppInstallation, Repo}
describe "create_changeset/2" do
test "casts the changes appropriately" do
attrs = %{foo: "bar", project_id: 1, user_id: 2}
changeset =
%GithubAppInstallation{}
|> GithubAppInstallation.create_changeset(attrs)
assert changeset.valid?
assert changeset.changes == %{project_id: 1, user_id: 2}
assert changeset |> Ecto.Changeset.get_field(:origin) == "codecorps"
assert changeset |> Ecto.Changeset.get_field(:state) == "unprocessed"
end
test "ensures user record exists" do
project = insert(:project)
attrs = %{project_id: project.id, user_id: -1, state: "processed"}
changeset =
%GithubAppInstallation{}
|> GithubAppInstallation.create_changeset(attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :user)
end
test "ensures project record exists" do
user = insert(:user)
attrs = %{project_id: -1, user_id: user.id, state: "processed"}
changeset =
%GithubAppInstallation{}
|> GithubAppInstallation.create_changeset(attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :project)
end
end
describe "access_token_changeset/2" do
test "with valid attributes" do
expires_at = "2016-07-11T22:14:10Z"
attrs = %{access_token: "<KEY>", access_token_expires_at: expires_at}
changeset =
%GithubAppInstallation{}
|> GithubAppInstallation.access_token_changeset(attrs)
assert changeset.valid?
assert changeset |> get_change(:access_token_expires_at) |> DateTime.to_iso8601() == expires_at
end
test "with invalid attributes" do
changeset =
%GithubAppInstallation{}
|> GithubAppInstallation.access_token_changeset(%{})
refute changeset.valid?
assert_error_message(changeset, :access_token, "can't be blank")
assert_error_message(changeset, :access_token_expires_at, "can't be blank")
end
end
end
<|start_filename|>test/lib/code_corps/model/stripe_platform_customer_test.exs<|end_filename|>
defmodule CodeCorps.StripePlatformCustomerTest do
use CodeCorps.ModelCase
alias CodeCorps.StripePlatformCustomer
@valid_attrs %{
id_from_stripe: "abc123"
}
@invalid_attrs %{}
describe "create_changeset/2" do
test "reports as valid when attributes are valid" do
user_id = insert(:user).id
changes = Map.merge(@valid_attrs, %{user_id: user_id})
changeset = StripePlatformCustomer.create_changeset(%StripePlatformCustomer{}, changes)
assert changeset.valid?
end
test "reports as invalid when attributes are invalid" do
changeset = StripePlatformCustomer.create_changeset(%StripePlatformCustomer{}, @invalid_attrs)
refute changeset.valid?
changeset |> assert_validation_triggered(:id_from_stripe, :required)
changeset |> assert_validation_triggered(:user_id, :required)
end
test "ensures associations link to records that exist" do
attrs = @valid_attrs |> Map.merge(%{user_id: -1})
{result, changeset} =
%StripePlatformCustomer{}
|> StripePlatformCustomer.create_changeset(attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
changeset |> assert_error_message(:user, "does not exist")
end
end
describe "update_changeset/2" do
test "reports as valid when attributes are valid" do
platform_customer = insert(:stripe_platform_customer)
changeset = StripePlatformCustomer.update_changeset(platform_customer, %{email: "<EMAIL>"})
assert changeset.valid?
end
test "requires email" do
platform_customer = insert(:stripe_platform_customer)
changeset = StripePlatformCustomer.update_changeset(platform_customer, %{email: nil})
refute changeset.valid?
changeset |> assert_validation_triggered(:email, :required)
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/stripe_connect_card_service_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.StripeConnectCardServiceTest do
use ExUnit.Case, async: true
use CodeCorps.ModelCase
alias CodeCorps.StripeConnectCard
alias CodeCorps.StripeService.StripeConnectCardService
describe "update/1" do
@attributes %{name: "<NAME>", exp_month: 6, exp_year: 2030}
test "it just updates the connect card on Stripe API, not locally" do
connect_card = insert(:stripe_connect_card)
connect_card =
StripeConnectCard
|> Repo.get(connect_card.id)
|> Repo.preload([:stripe_platform_card, :stripe_connect_account])
updated_at = connect_card.updated_at
{:ok, %Stripe.Card{} = stripe_card} =
StripeConnectCardService.update(connect_card, @attributes)
assert stripe_card.id == connect_card.id_from_stripe
assert stripe_card.name == "<NAME>"
assert stripe_card.exp_year == 2030
assert stripe_card.exp_month == 6
connect_card = Repo.get(StripeConnectCard, connect_card.id)
assert connect_card.updated_at == updated_at
end
end
end
<|start_filename|>test/lib/code_corps/model/donation_goal_test.exs<|end_filename|>
defmodule CodeCorps.DonationGoalTest do
@moduledoc false
use CodeCorps.ModelCase
alias CodeCorps.DonationGoal
describe "%create_changeset/2" do
test "requires amount, description and project_id" do
changeset = DonationGoal.create_changeset(%DonationGoal{}, %{})
refute changeset.valid?
changeset |> assert_validation_triggered(:amount, :required)
changeset |> assert_validation_triggered(:description, :required)
changeset |> assert_validation_triggered(:project_id, :required)
end
test "ensures project with specified id actually exists" do
attrs = %{amount: 100, description: "Bar", project_id: -1}
{result, changeset} =
%DonationGoal{}
|> DonationGoal.create_changeset(attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
changeset |> assert_error_message(:project, "does not exist")
end
test "amount must not be negative" do
attrs = %{amount: -100, description: "Cashback for donators", project_id: 2}
donation_goal = insert(:donation_goal)
changeset = DonationGoal.create_changeset(donation_goal, attrs)
refute changeset.valid?
changeset |> assert_error_message(:amount, "must be greater than %{number}")
end
end
describe "&update_changeset/2" do
test "requires amount, description" do
attrs = %{amount: nil, description: nil}
donation_goal = insert(:donation_goal)
changeset = DonationGoal.update_changeset(donation_goal, attrs)
refute changeset.valid?
changeset |> assert_validation_triggered(:amount, :required)
changeset |> assert_validation_triggered(:description, :required)
end
end
describe "&set_current_changeset/2" do
test "requires current" do
attrs = %{current: nil}
donation_goal = insert(:donation_goal)
changeset = DonationGoal.set_current_changeset(donation_goal, attrs)
refute changeset.valid?
changeset |> assert_validation_triggered(:current, :required)
end
test "ensures only one donation goal per project can be current" do
project = insert(:project)
insert(:donation_goal, current: true, project: project)
donation_goal = insert(:donation_goal, project: project)
attrs = %{current: true}
changeset = DonationGoal.set_current_changeset(donation_goal, attrs)
assert changeset.valid?
{:error, errored_changeset} = Repo.update(changeset)
assert_error_message(errored_changeset, :current, "has already been taken")
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/github_app_installation_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.GithubAppInstallationControllerTest do
@moduledoc false
use CodeCorpsWeb.ApiCase, resource_name: :github_app_installation
alias CodeCorps.{Analytics.SegmentTraitsBuilder, GithubAppInstallation, Repo}
describe "index" do
test "lists all resources", %{conn: conn} do
[record_1, record_2] = insert_pair(:github_app_installation)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
test "filters resources by record id", %{conn: conn} do
[record_1, record_2 | _] = insert_list(3, :github_app_installation)
path = "github-app-installations/?filter[id]=#{record_1.id},#{record_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
record = insert(:github_app_installation)
conn
|> request_show(record)
|> json_response(200)
|> assert_id_from_response(record.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: user} do
project = insert(:project)
insert(:project_user, project: project, user: user, role: "owner")
attrs = %{project: project, user: user}
assert conn |> request_create(attrs) |> json_response(201)
end
@tag :authenticated
test "tracks creation", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
attrs = %{project: project, user: current_user}
conn |> request_create(attrs)
user_id = current_user.id
traits = GithubAppInstallation |> Repo.one |> SegmentTraitsBuilder.build
assert_receive({:track, ^user_id, "Created GitHub App Installation", ^traits})
end
@tag :authenticated
test "does not create resource and renders 422 when data is invalid", %{conn: conn, current_user: user} do
project = insert(:project)
insert(:project_user, project: project, user: user, role: "owner")
attrs = %{project: project, user: nil}
assert conn |> request_create(attrs) |> json_response(422)
end
test "does not create resource and renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
end
<|start_filename|>lib/code_corps_web/views/task_view.ex<|end_filename|>
defmodule CodeCorpsWeb.TaskView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [
:archived, :body, :created_at, :created_from, :has_github_pull_request,
:inserted_at, :markdown, :modified_at, :modified_from, :number, :order,
:overall_status, :status, :title, :updated_at
]
has_one :github_issue, type: "github-issue", field: :github_issue_id
has_one :github_pull_request, serializer: CodeCorpsWeb.GithubPullRequestView, identifiers: :always
has_one :github_repo, type: "github-repo", field: :github_repo_id
has_one :project, type: "project", field: :project_id
has_one :task_list, type: "task-list", field: :task_list_id
has_one :user, type: "user", field: :user_id
has_one :user_task, serializer: CodeCorpsWeb.UserTaskView, identifiers: :always
has_many :comments, serializer: CodeCorpsWeb.CommentView, identifiers: :always
has_many :task_skills, serializer: CodeCorpsWeb.TaskSkillView, identifiers: :always
def has_github_pull_request(%{
github_pull_request: %CodeCorps.GithubPullRequest{}
}), do: true
def has_github_pull_request(%{github_pull_request: nil}), do: false
def overall_status(%{
github_pull_request: %CodeCorps.GithubPullRequest{merged: merged, state: state}
}, _conn) do
case merged do
true -> "merged"
false -> state
end
end
def overall_status(%{github_pull_request: nil, status: status}, _conn) do
status
end
end
<|start_filename|>test/lib/code_corps/analytics/segment_event_name_builder_test.exs<|end_filename|>
defmodule CodeCorps.Analytics.SegmentEventNameBuilderTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.Factories
alias CodeCorps.Analytics.SegmentEventNameBuilder
describe "get_action_name/2" do
test "with comment" do
assert SegmentEventNameBuilder.build(:create, build(:comment)) == "Created Comment"
assert SegmentEventNameBuilder.build(:update, build(:comment)) == "Edited Comment"
end
test "with task" do
assert SegmentEventNameBuilder.build(:create, build(:task)) == "Created Task"
assert SegmentEventNameBuilder.build(:update, build(:task)) == "Edited Task"
end
test "with user" do
assert SegmentEventNameBuilder.build(:create, build(:user)) == "Signed Up"
end
test "with user category" do
assert SegmentEventNameBuilder.build(:create, build(:user_category)) == "Added User Category"
assert SegmentEventNameBuilder.build(:delete, build(:user_category)) == "Removed User Category"
end
test "with user role" do
assert SegmentEventNameBuilder.build(:create, build(:user_role)) == "Added User Role"
assert SegmentEventNameBuilder.build(:delete, build(:user_role)) == "Removed User Role"
end
test "with user skill" do
assert SegmentEventNameBuilder.build(:create, build(:user_skill)) == "Added User Skill"
assert SegmentEventNameBuilder.build(:delete, build(:user_skill)) == "Removed User Skill"
end
end
end
<|start_filename|>test/lib/code_corps_web/views/user_role_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.UserRoleViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
user_role = insert(:user_role)
rendered_json = render(CodeCorpsWeb.UserRoleView, "show.json-api", data: user_role)
expected_json = %{
"data" => %{
"id" => user_role.id |> Integer.to_string,
"type" => "user-role",
"attributes" => %{},
"relationships" => %{
"role" => %{
"data" => %{"id" => user_role.role_id |> Integer.to_string, "type" => "role"}
},
"user" => %{
"data" => %{"id" => user_role.user_id |> Integer.to_string, "type" => "user"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps/model/conversation.ex<|end_filename|>
defmodule CodeCorps.Conversation do
@moduledoc ~S"""
A header of a `CodeCorps.Message` thread, depicting a start of a conversation
with a specific `CodeCorps.User`
When a project sends a `CodeCorps.Message` to one or more users, a
`CodeCorps.Conversation` needs to be created for each of those users, so
separate conversations can be held with different users starting from the same
original `CodeCorps.Message`
Once replies start coming in, a `CodeCorps.ConversationPart` is created for
each of those replies.
"""
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "conversations" do
field :read_at, :utc_datetime, null: true
field :status, :string, null: false, default: "open"
belongs_to :message, CodeCorps.Message
belongs_to :user, CodeCorps.User
has_many :conversation_parts, CodeCorps.ConversationPart
timestamps()
end
def update_changeset(struct, %{} = params) do
struct
|> cast(params, [:status])
|> validate_inclusion(:status, statuses())
end
defp statuses do
~w{ open closed }
end
end
<|start_filename|>test/lib/code_corps/model/task_test.exs<|end_filename|>
defmodule CodeCorps.TaskTest do
use CodeCorps.ModelCase
alias CodeCorps.Task
alias Ecto.Changeset
@valid_attrs %{
title: "Test task",
markdown: "A test task"
}
@invalid_attrs %{}
describe "changeset/2" do
test "is invalid with invalid attributes" do
changeset = Task.changeset(%Task{}, @invalid_attrs)
refute changeset.valid?
end
test "renders body html from markdown" do
changes = Map.merge(@valid_attrs, %{
markdown: "A **strong** body",
project_id: 1,
task_list_id: 1,
user_id: 1
})
changeset = Task.changeset(%Task{}, changes)
assert changeset.valid?
assert changeset |> get_change(:body) == "<p>A <strong>strong</strong> body</p>\n"
end
test "removes the order and task list when the task is archived" do
changes = Map.put(@valid_attrs, :archived, true)
changeset = Task.update_changeset(%Task{order: 1, task_list_id: 1}, changes)
%{archived: archived, order: order, task_list_id: task_list_id} = changeset.changes
assert changeset.valid?
assert archived
refute order
refute task_list_id
end
test "validates task list when the task is not archived and position is set" do
changes = Map.merge(@valid_attrs, %{
position: 1,
project_id: 1,
user_id: 1
})
changeset = Task.changeset(%Task{}, changes)
refute changeset.valid?
assert changeset.errors[:task_list_id]
end
end
describe "create_changeset/2" do
test "sets created_at and modified_at to the same time" do
project = insert(:project)
task_list = insert(:task_list)
user = insert(:user)
changes = Map.merge(@valid_attrs, %{
project_id: project.id,
task_list_id: task_list.id,
user_id: user.id
})
changeset = Task.create_changeset(%Task{}, changes)
assert changeset.valid?
{:ok, %Task{created_at: created_at, modified_at: modified_at}} = Repo.insert(changeset)
assert created_at == modified_at
end
test "sets modified_from to 'code_corps'" do
assert(
%Task{}
|> Task.create_changeset(%{})
|> Changeset.get_field(:modified_from) == "code_corps"
)
end
test "sets the order when the task is not archived and position is set" do
project = insert(:project)
task_list = insert(:task_list)
insert(:task, task_list: task_list, order: 1)
user = insert(:user)
changes = Map.merge(@valid_attrs, %{
position: 1,
project_id: project.id,
task_list_id: task_list.id,
user_id: user.id
})
changeset = Task.create_changeset(%Task{}, changes)
assert changeset.valid?
{:ok, %Task{order: order}} = Repo.insert(changeset)
# We really want to test the order is set, but we have no good way to
# test this since the column default is `0`
assert order !== 0
end
end
describe "update_changeset/2" do
test "only allows specific values for status" do
changes = Map.put(@valid_attrs, :status, "nonexistent")
changeset = Task.update_changeset(%Task{task_list_id: 1}, changes)
refute changeset.valid?
end
test "closed_at is set when status changes to closed" do
changes = Map.put(@valid_attrs, :status, "closed")
changeset = Task.update_changeset(%Task{task_list_id: 1}, changes)
%{closed_at: closed_at} = changeset.changes
assert changeset.valid?
assert closed_at
end
test "closed_at is set to nil when status changes to open" do
changes = Map.put(@valid_attrs, :status, "open")
changeset = Task.update_changeset(%Task{task_list_id: 1, status: "closed", closed_at: DateTime.utc_now}, changes)
%{closed_at: closed_at} = changeset.changes
assert changeset.valid?
refute closed_at
end
test "archived field changes appropriately" do
changes = Map.put(@valid_attrs, :archived, true)
changeset = Task.update_changeset(%Task{task_list_id: 1}, changes)
%{archived: archived} = changeset.changes
assert changeset.valid?
assert archived
end
test "does not reset order when task was already archived" do
project = insert(:project)
user = insert(:user)
changes = Map.merge(@valid_attrs, %{
archived: true,
position: 1,
project_id: project.id,
user_id: user.id
})
changeset = Task.create_changeset(%Task{}, changes)
{:ok, %Task{order: order} = task} = Repo.insert(changeset)
refute order
changeset = Task.update_changeset(task, %{title: "New title"})
{:ok, %Task{order: order}} = Repo.update(changeset)
refute order
end
test "sets :modified_from to 'code_corps'" do
assert(
:task
|> insert(modified_from: "github")
|> Task.update_changeset(%{})
|> Changeset.get_field(:modified_from) == "code_corps"
)
end
end
end
<|start_filename|>lib/code_corps/processor/sync.ex<|end_filename|>
defmodule CodeCorps.Processor.Sync do
@behaviour CodeCorps.Processor
@spec process((() -> any)) :: any
def process(fun) do
apply(fun, [])
end
end
<|start_filename|>test/lib/code_corps_web/views/user_task_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.UserTaskViewTest do
@moduledoc false
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
user_task = insert(:user_task)
rendered_json = render(CodeCorpsWeb.UserTaskView, "show.json-api", data: user_task)
expected_json = %{
"data" => %{
"id" => user_task.id |> Integer.to_string,
"type" => "user-task",
"attributes" => %{},
"relationships" => %{
"task" => %{
"data" => %{"id" => user_task.task_id |> Integer.to_string, "type" => "task"}
},
"user" => %{
"data" => %{"id" => user_task.user_id |> Integer.to_string, "type" => "user"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps/stripe_service/adapters/stripe_external_account.ex<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripeExternalAccountAdapter do
alias CodeCorps.MapUtils
alias CodeCorps.StripeConnectAccount
@stripe_attributes [
:account_holder_name, :account_holder_type, :bank_name, :country,
:currency, :default_for_currency, :fingerprint, :id, :last4,
:routing_number, :status
]
def to_params(%Stripe.BankAccount{} = external_account, %StripeConnectAccount{} = connect_account) do
params =
external_account
|> Map.from_struct
|> Map.take(@stripe_attributes)
|> MapUtils.rename(:id, :id_from_stripe)
|> add_association_attributes(connect_account)
{:ok, params}
end
defp add_association_attributes(attributes, %StripeConnectAccount{} = connect_account) do
association_attributes = build_association_attributes(connect_account)
attributes |> Map.merge(association_attributes)
end
defp build_association_attributes(%StripeConnectAccount{id: id, id_from_stripe: id_from_stripe}) do
%{account_id_from_stripe: id_from_stripe, stripe_connect_account_id: id}
end
end
<|start_filename|>lib/code_corps/github/sync/github_app_installation/changeset.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubAppInstallation.Changeset do
@moduledoc ~S"""
In charge of managing changesets when creating or updating a
`GithubAppInstallation` in the process of handling an Installation event.
"""
alias CodeCorps.{
GitHub.Adapters,
GithubAppInstallation,
User
}
alias Ecto.Changeset
@doc ~S"""
Builds a changeset to create a `GithubAppInstallation` based on the payload
from an Installation event.
An optional user to associate the installation with can be provided.
"""
@spec create_changeset(map, User.t | nil) :: Changeset.t
def create_changeset(%{} = params, user \\ nil) do
attrs = params |> Adapters.AppInstallation.from_installation_event()
%GithubAppInstallation{}
|> Changeset.change(attrs)
|> Changeset.put_change(:installed, true)
|> Changeset.put_assoc(:user, user)
|> Changeset.put_change(:origin, "github")
|> Changeset.unique_constraint(
:github_id, name: :github_app_installations_github_id_index
)
end
@doc ~S"""
Builds a changeset to update a `GithubAppInstallation` based on the payload
from an Installation event.
"""
@spec update_changeset(GithubAppInstallation.t, map) :: Changeset.t
def update_changeset(%GithubAppInstallation{} = record, %{} = params) do
attrs = params |> Adapters.AppInstallation.from_installation_event()
record
|> Changeset.change(attrs)
|> Changeset.put_change(:installed, true)
|> Changeset.unique_constraint(
:github_id, name: :github_app_installations_github_id_index
)
end
end
<|start_filename|>lib/code_corps/model/role.ex<|end_filename|>
defmodule CodeCorps.Role do
@moduledoc """
This module defines a "role" on Code Corps.
Examples of roles are "Backend Developer" and "Front End Developer".
"""
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "roles" do
field :name, :string
field :ability, :string
field :kind, :string
has_many :role_skills, CodeCorps.RoleSkill
has_many :skills, through: [:role_skills, :skill]
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
@spec changeset(CodeCorps.Role.t, map) :: Ecto.Changeset.t
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:name, :ability, :kind])
|> validate_required([:name, :ability, :kind])
|> validate_inclusion(:kind, kinds())
end
defp kinds do
~w{ technology creative support }
end
end
<|start_filename|>test/lib/code_corps/stripe_service/stripe_invoice_service_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.StripeInvoiceServiceTest do
use ExUnit.Case, async: true
use CodeCorps.ModelCase
alias CodeCorps.{StripeInvoice}
alias CodeCorps.StripeService.StripeInvoiceService
describe "create" do
test "creates a StripeInvoice" do
invoice_fixture = CodeCorps.StripeTesting.Helpers.load_fixture("invoice")
subscription = insert(:stripe_connect_subscription, id_from_stripe: invoice_fixture.subscription)
connect_customer = insert(:stripe_connect_customer, id_from_stripe: invoice_fixture.customer)
{:ok, %StripeInvoice{} = invoice} =
StripeInvoiceService.create(invoice_fixture.id, invoice_fixture.customer)
assert invoice.id_from_stripe == invoice_fixture.id
assert invoice.stripe_connect_subscription_id == subscription.id
assert invoice.user_id == connect_customer.user_id
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/conversation_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ConversationControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :conversation
describe "index" do
@tag :authenticated
test "lists all entries user is authorized to view", %{conn: conn, current_user: user} do
%{project: project} = insert(:project_user, role: "admin", user: user)
message_on_user_administered_project = insert(:message, project: project)
conversation_on_user_administered_project =
insert(:conversation, message: message_on_user_administered_project)
conversation_by_user = insert(:conversation, user: user)
_other_conversation = insert(:conversation)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([
conversation_on_user_administered_project.id,
conversation_by_user.id
])
end
@tag authenticated: :admin
test "lists all entries if user is admin", %{conn: conn} do
[conversation_1, conversation_2] = insert_pair(:conversation)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([conversation_1.id, conversation_2.id])
end
@tag authenticated: :admin
test "lists all entries by status", %{conn: conn} do
insert_pair(:conversation)
user = insert(:user)
conversation_other = insert(:conversation, user: user)
conn
|> get("conversations?user_id=#{user.id}")
|> json_response(200)
|> assert_ids_from_response([conversation_other.id])
end
end
describe "show" do
@tag :authenticated
test "shows chosen resource", %{conn: conn, current_user: user} do
conversation = insert(:conversation, user: user)
conn
|> request_show(conversation)
|> json_response(200)
|> assert_id_from_response(conversation.id)
end
test "renders 401 when unauthenticated", %{conn: conn} do
conversation = insert(:conversation)
assert conn |> request_show(conversation) |> json_response(401)
end
@tag :authenticated
test "renders 403 when unauthorized", %{conn: conn} do
conversation = insert(:conversation)
assert conn |> request_show(conversation) |> json_response(403)
end
end
describe "update" do
@tag authenticated: :admin
test "updates and renders chosen resource when data is valid", %{conn: conn, current_user: user} do
%{project: project} = insert(:project_user, role: "admin", user: user)
message_on_user_administered_project = insert(:message, project: project)
conversation_on_user_administered_project =
insert(:conversation, message: message_on_user_administered_project)
data =
conn
|> request_update(conversation_on_user_administered_project, %{status: "closed"})
|> json_response(200)
|> Map.get("data")
assert data["attributes"]["status"] == "closed"
end
@tag authenticated: :admin
test "renders 422 when data is invalid", %{conn: conn, current_user: current_user} do
conversation = insert(:conversation, user: current_user)
assert conn |> request_update(conversation, %{status: "wat"}) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "does not update resource and renders 403 when not authorized", %{conn: conn} do
user = insert(:user)
insert(:conversation, user: user)
assert conn |> request_update() |> json_response(403)
end
end
end
<|start_filename|>lib/code_corps/comment/service.ex<|end_filename|>
defmodule CodeCorps.Comment.Service do
@moduledoc ~S"""
In charge of performing CRUD operations on `Comment` records, as well as any
additional actions that need to be performed when such an operation happens.
"""
alias CodeCorps.{
Comment,
GitHub,
GitHub.Sync,
GithubComment,
GithubIssue,
Task,
Repo
}
alias Ecto.{Changeset, Multi}
require Logger
@type record_result ::
{:ok, Comment.t} | {:error, Changeset.t} | {:error, GitHub.api_error_struct()}
# :user, :github_issue and :github_repo are required for connecting to github
# :project and :organization are required in order to add a header to the
# github comment body when the user themselves are not connected to github,
# but the parent task is
#
# Right now, all of these preloads are loaded at once. If there are
# performance issues, we can split them up according the the information
# provided here.
@preloads [
:github_comment,
:user,
task: [
:github_issue,
[github_repo: :github_app_installation],
[project: :organization]
]
]
@doc ~S"""
Creates a `Comment` record using the provided parameters
Also creates comment on GitHub if associated `Task` is github-connected.
"""
@spec create(map) :: {:ok, Comment.t} | {:error, Changeset.t}
def create(%{} = attributes) do
Multi.new()
|> Multi.insert(:comment, %Comment{} |> Comment.create_changeset(attributes))
|> Multi.run(:preload, fn %{comment: %Comment{} = comment} ->
{:ok, comment |> Repo.preload(@preloads)}
end)
|> Multi.run(:github, fn %{preload: %Comment{} = comment} -> comment |> create_on_github() end)
|> Repo.transaction()
|> marshall_result
end
@doc ~S"""
Updates the provided `Comment` record using the provided parameters
"""
@spec update(Comment.t, map) :: {:ok, Comment.t} | {:error, Changeset.t}
def update(%Comment{} = comment, %{} = attributes) do
Multi.new()
|> Multi.update(:comment, comment |> Comment.update_changeset(attributes))
|> Multi.run(:preload, fn %{comment: %Comment{} = comment} ->
{:ok, comment |> Repo.preload(@preloads)}
end)
|> Multi.run(:github, fn %{preload: %Comment{} = comment} -> comment |> update_on_github() end)
|> Repo.transaction()
|> marshall_result()
end
@spec marshall_result(tuple) :: {:ok, Comment.t} | {:error, Changeset.t} | {:error, :github}
defp marshall_result({:ok, %{github: %Comment{} = comment}}), do: {:ok, comment}
defp marshall_result({:error, :comment, %Changeset{} = changeset, _steps}), do: {:error, changeset}
defp marshall_result({:error, :github, result, _steps}) do
Logger.info("An error occurred when creating/updating the comment with the GitHub API")
Logger.info("#{inspect(result)}")
{:error, :github}
end
@spec create_on_github(Comment.t) :: record_result
defp create_on_github(%Comment{task: %Task{github_issue_id: nil}} = comment), do: {:ok, comment}
defp create_on_github(%Comment{task: %Task{github_issue: github_issue}} = comment) do
with {:ok, payload} <- comment |> GitHub.API.Comment.create(),
{:ok, %GithubComment{} = github_comment} <-
Sync.GithubComment.create_or_update_comment(github_issue, payload) do
comment |> link_with_github_changeset(github_comment) |> Repo.update()
else
{:error, error} -> {:error, error}
end
end
@spec link_with_github_changeset(Comment.t, GithubComment.t) :: Changeset.t
defp link_with_github_changeset(%Comment{} = comment, %GithubComment{} = github_comment) do
comment |> Changeset.change(%{github_comment: github_comment})
end
@spec update_on_github(Comment.t) :: record_result
defp update_on_github(%Comment{github_comment_id: nil} = comment), do: {:ok, comment}
defp update_on_github(
%Comment{task: %Task{github_issue: %GithubIssue{} = github_issue}} = comment
) do
with {:ok, payload} <- comment |> GitHub.API.Comment.update(),
{:ok, %GithubComment{}} <-
Sync.GithubComment.create_or_update_comment(github_issue, payload) do
{:ok, comment}
else
{:error, error} -> {:error, error}
end
end
end
<|start_filename|>lib/code_corps/auth/ensure_auth_pipeline.ex<|end_filename|>
defmodule CodeCorps.Auth.EnsureAuthPipeline do
use Guardian.Plug.Pipeline, otp_app: :code_corps,
module: CodeCorps.Guardian,
error_handler: CodeCorps.Auth.ErrorHandler
plug Guardian.Plug.EnsureAuthenticated
end
<|start_filename|>lib/code_corps_web/controllers/page_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.PageController do
@moduledoc false
use CodeCorpsWeb, :controller
def index(conn, _params) do
redirect conn, external: "http://docs.codecorpsapi.apiary.io/"
end
end
<|start_filename|>lib/code_corps_web/controllers/organization_github_app_installation_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.OrganizationGithubAppInstallationController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{OrganizationGithubAppInstallation, User, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with organization_installations <- OrganizationGithubAppInstallation |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: organization_installations)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %OrganizationGithubAppInstallation{} = organization_installation <- OrganizationGithubAppInstallation |> Repo.get(id) do
conn |> render("show.json-api", data: organization_installation)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %OrganizationGithubAppInstallation{}, params),
{:ok, %OrganizationGithubAppInstallation{} = organization_installation} <- %OrganizationGithubAppInstallation{} |> OrganizationGithubAppInstallation.create_changeset(params) |> Repo.insert do
conn |> put_status(:created) |> render("show.json-api", data: organization_installation)
end
end
@spec delete(Plug.Conn.t, map) :: Conn.t
def delete(%Conn{} = conn, %{"id" => id} = params) do
with %OrganizationGithubAppInstallation{} = organization_github_installation <- OrganizationGithubAppInstallation |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:delete, organization_github_installation, params),
{:ok, _organization_github_installation} <-
organization_github_installation
|> Repo.delete do
conn |> send_resp(:no_content, "")
end
end
end
<|start_filename|>test/lib/code_corps/tasks/query_test.exs<|end_filename|>
defmodule CodeCorps.Tasks.QueryTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.Tasks
describe "filter/2" do
defp get_sorted_ids(tasks) do
tasks |> Enum.map(&Map.get(&1, :id)) |> Enum.sort
end
defp list_sorted_ids(params) do
params |> Tasks.Query.list |> get_sorted_ids()
end
defp find_with_query(params) do
params |> Tasks.Query.find
end
test "filters by project_id" do
project_1 = insert(:project)
project_1_tasks = insert_list(3, :task, project: project_1)
project_1_task_ids = project_1_tasks |> get_sorted_ids()
project_2 = insert(:project)
project_2_tasks = insert_list(3, :task, project: project_2)
project_2_task_ids = project_2_tasks |> get_sorted_ids()
assert project_1_task_ids ==
list_sorted_ids(%{"project_id" => project_1.id})
assert project_2_task_ids ==
list_sorted_ids(%{"project_id" => project_2.id})
end
test "filters by coalesced task_list_ids" do
task_list_1 = insert(:task_list)
list_1_tasks = insert_list(3, :task, task_list: task_list_1)
list_1_task_ids = list_1_tasks |> get_sorted_ids()
task_list_2 = insert(:task_list)
list_2_tasks = insert_list(3, :task, task_list: task_list_2)
list_2_task_ids = list_2_tasks |> get_sorted_ids()
task_list_3 = insert(:task_list)
list_3_tasks = insert_list(3, :task, task_list: task_list_3)
list_3_task_ids = list_3_tasks |> get_sorted_ids()
assert list_1_task_ids ==
list_sorted_ids(%{"task_list_ids" => "#{task_list_1.id}"})
assert list_2_task_ids ==
list_sorted_ids(%{"task_list_ids" => "#{task_list_2.id}"})
assert list_3_task_ids ==
list_sorted_ids(%{"task_list_ids" => "#{task_list_3.id}"})
assert (list_1_task_ids ++ list_2_task_ids) |> Enum.sort ==
list_sorted_ids(%{"task_list_ids" => "#{task_list_1.id},#{task_list_2.id}"})
assert (list_2_task_ids ++ list_3_task_ids) |> Enum.sort ==
list_sorted_ids(%{"task_list_ids" => "#{task_list_2.id},#{task_list_3.id}"})
assert (list_1_task_ids ++ list_3_task_ids) |> Enum.sort ==
list_sorted_ids(%{"task_list_ids" => "#{task_list_1.id},#{task_list_3.id}"})
end
test "filters by status" do
open_tasks = insert_list(3, :task, status: "open")
open_task_ids = open_tasks |> get_sorted_ids()
closed_tasks = insert_list(3, :task, status: "closed")
closed_task_ids = closed_tasks |> get_sorted_ids()
assert open_task_ids ==
list_sorted_ids(%{"status" => "open"})
assert closed_task_ids ==
list_sorted_ids(%{"status" => "closed"})
end
test "filter by archived" do
tasks = insert_list(3, :task)
task_ids = tasks |> get_sorted_ids()
archived_tasks = insert_list(3, :task, archived: true)
archived_task_ids = archived_tasks |> get_sorted_ids()
assert task_ids ==
list_sorted_ids(%{})
assert task_ids ==
list_sorted_ids(%{"archived" => false})
assert archived_task_ids ==
list_sorted_ids(%{"archived" => true})
end
test "works with multiple filters" do
project_1 = insert(:project)
project_2 = insert(:project)
list_1 = insert(:task_list)
list_2 = insert(:task_list)
task_1 = insert(:task, status: "open", project: project_1, task_list: list_1)
task_2 = insert(:task, status: "closed", project: project_1, task_list: list_1)
task_3 = insert(:task, status: "open", project: project_1, task_list: list_2)
task_4 = insert(:task, status: "closed", project: project_1, task_list: list_2)
task_5 = insert(:task, status: "open", project: project_2, task_list: list_1)
task_6 = insert(:task, status: "closed", project: project_2, task_list: list_1)
task_7 = insert(:task, status: "open", project: project_2, task_list: list_2)
task_8 = insert(:task, status: "closed", project: project_2, task_list: list_2)
task_9 = insert(:task, status: "open", project: project_1, task_list: list_2, archived: true)
task_10 = insert(:task, status: "closed", project: project_1, task_list: list_1, archived: true)
task_11 = insert(:task, status: "open", project: project_2, task_list: list_1, archived: true)
assert [task_1.id] ==
list_sorted_ids(%{"status" => "open", "project_id" => project_1.id, "task_list_ids" => "#{list_1.id}"})
assert [task_2.id] ==
list_sorted_ids(%{"status" => "closed", "project_id" => project_1.id, "task_list_ids" => "#{list_1.id}"})
assert [task_1, task_2] |> get_sorted_ids() ==
list_sorted_ids(%{"project_id" => project_1.id, "task_list_ids" => "#{list_1.id}"})
assert [task_1, task_5] |> get_sorted_ids() ==
list_sorted_ids(%{"status" => "open", "task_list_ids" => "#{list_1.id}"})
assert [task_1, task_3, task_5, task_7] |> get_sorted_ids() ==
list_sorted_ids(%{"status" => "open", "task_list_ids" => "#{list_1.id},#{list_2.id}"})
assert [task_2, task_4, task_6, task_8] |> get_sorted_ids() ==
list_sorted_ids(%{"status" => "closed", "task_list_ids" => "#{list_1.id},#{list_2.id}"})
assert [task_1, task_3] |> get_sorted_ids() ==
list_sorted_ids(%{"status" => "open", "project_id" => project_1.id})
assert [task_9, task_10] |> get_sorted_ids() ==
list_sorted_ids(%{"archived" => true, "project_id" => project_1.id})
assert [task_10, task_11] |> get_sorted_ids() ==
list_sorted_ids(%{"archived" => true, "task_list_ids" => "#{list_1.id}"})
assert [task_9, task_11] |> get_sorted_ids() ==
list_sorted_ids(%{"archived" => true, "status" => "open"})
assert [task_11] |> get_sorted_ids() ==
list_sorted_ids(%{"archived" => true,
"project_id" => project_2.id,
"status" => "open",
"task_list_ids" => "#{list_1.id}"})
end
end
describe "query/2" do
test "queries by project_id and number" do
[task, _] = insert_pair(:task)
retrieved_task =
find_with_query(%{"number" => task.number, "project_id" => task.project_id})
assert retrieved_task.id == task.id
end
test "queries by task_list_id and number" do
[task, _] = insert_pair(:task)
retrieved_task =
find_with_query(%{"number" => task.number, "task_list_id" => task.task_list_id})
assert retrieved_task.id == task.id
end
test "queries by id" do
[task, _] = insert_pair(:task)
retrieved_task = find_with_query(%{"id" => task.id})
assert retrieved_task.id == task.id
end
end
end
<|start_filename|>test/lib/code_corps/github/api/gateway_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.API.GatewayTest do
@moduledoc false
use ExUnit.Case
alias CodeCorps.GitHub.API.Gateway
alias Plug.Conn
alias HTTPoison.Response
@port 12345
@endpoint "http://localhost" |> URI.merge("") |> Map.put(:port, @port) |> URI.to_string
@body %{"bar" => "baz"} |> Poison.encode!
@url @endpoint |> URI.merge("/foo") |> URI.to_string
setup do
bypass = Bypass.open(port: @port)
{:ok, bypass: bypass}
end
describe "request/5" do
[200, 201, 302, 401, 404, 500] |> Enum.each(fn code ->
@code code
test "returns a HTTPoison.Response in case of #{code}", %{bypass: bypass} do
Bypass.expect(bypass, "GET", "/foo", fn %Conn{req_headers: req_headers} = conn ->
assert {"foo", "bar"} in req_headers
conn |> Conn.resp(@code, @body)
end)
{:ok, %Response{} = response} =
Gateway.request(:get, @url, @body, [{"foo", "bar"}], [])
assert response.body == @body
assert response.status_code == @code
assert response.request_url == @url
end
end)
end
end
<|start_filename|>test/lib/code_corps/stripe_service/adapters/stripe_invoice_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripeInvoiceTest do
use CodeCorps.ModelCase
import CodeCorps.StripeService.Adapters.StripeInvoiceAdapter, only: [to_params: 1]
@stripe_invoice %Stripe.Invoice{
amount_due: 1000,
application_fee: 50,
attempt_count: 1,
attempted: true,
charge: "ch_123",
closed: true,
currency: "usd",
customer: "cus_123",
date: 1_483_553_506,
description: nil,
discount: nil,
ending_balance: 0,
forgiven: false,
id: "in_123",
livemode: false,
metadata: %{},
next_payment_attempt: nil,
paid: true,
period_end: 1_483_553_506,
period_start: 1_483_553_506,
receipt_number: nil,
starting_balance: 0,
statement_descriptor: nil,
subscription: "sub_123",
subscription_proration_date: nil,
subtotal: 1000,
tax: nil,
tax_percent: nil,
total: 1000,
webhooks_delivered_at: 1_483_553_511
}
@local_map %{
"amount_due" => 1000,
"application_fee" => 50,
"attempt_count" => 1,
"attempted" => true,
"charge_id_from_stripe" => "ch_123",
"closed" => true,
"currency" => "usd",
"customer_id_from_stripe" => "cus_123",
"date" => 1_483_553_506,
"description" => nil,
"ending_balance" => 0,
"forgiven" => false,
"id_from_stripe" => "in_123",
"next_payment_attempt" => nil,
"paid" => true,
"period_end" => 1_483_553_506,
"period_start" => 1_483_553_506,
"receipt_number" => nil,
"starting_balance" => 0,
"statement_descriptor" => nil,
"subscription_id_from_stripe" => "sub_123",
"subscription_proration_date" => nil,
"subtotal" => 1000,
"tax" => nil,
"tax_percent" => nil,
"total" => 1000,
"webhooks_delivered_at" => 1_483_553_511
}
describe "to_params/2" do
test "converts from stripe map to local properly" do
user = insert(:user)
stripe_platform_customer = insert(:stripe_platform_customer, user: user)
insert(:stripe_connect_customer, id_from_stripe: "cus_123", stripe_platform_customer: stripe_platform_customer, user: user).id
stripe_connect_subscription_id = insert(:stripe_connect_subscription, id_from_stripe: "sub_123", user: user).id
relationships = %{
"stripe_connect_subscription_id" => stripe_connect_subscription_id,
"user_id" => user.id
}
local_map = Map.merge(@local_map, relationships)
{:ok, result} = to_params(@stripe_invoice)
assert result == local_map
end
end
end
<|start_filename|>lib/code_corps/github/event/installation_repositories/validator.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.InstallationRepositories.Validator do
@moduledoc ~S"""
In charge of validatng a GitHub.API.InstallationRepositories webhook payload.
[https://developer.github.com/v3/activity/events/types/#installationrepositoriesevent](https://developer.github.com/v3/activity/events/types/#installationrepositoriesevent)
"""
@behaviour CodeCorps.GitHub.Event.Validator
@doc ~S"""
Returns `true` if all keys required to properly handle an
InstallationRepositories webhook are present in the provided payload.
"""
@impl CodeCorps.GitHub.Event.Validator
@spec valid?(map) :: boolean
def valid?(%{
"action" => _, "installation" => %{"id" => _},
"repositories_added" => added, "repositories_removed" => removed})
when is_list(added) when is_list(removed) do
(added ++ removed) |> Enum.all?(&repository_valid?/1)
end
def valid?(%{
"action" => _, "installation" => %{"id" => _},
"repositories_added" => added}) when is_list(added) do
added |> Enum.all?(&repository_valid?/1)
end
def valid?(%{
"action" => _, "installation" => %{"id" => _},
"repositories_removed" => removed}) when is_list(removed) do
removed |> Enum.all?(&repository_valid?/1)
end
def valid?(_), do: false
@spec repository_valid?(any) :: boolean
defp repository_valid?(%{"id" => _, "name" => _}), do: true
defp repository_valid?(_), do: false
end
<|start_filename|>lib/code_corps/github/event/installation/validator.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.Installation.Validator do
@moduledoc ~S"""
In charge of validatng a GitHub.API.Installation webhook payload.
https://developer.github.com/v3/activity/events/types/#installationevent
"""
@behaviour CodeCorps.GitHub.Event.Validator
@doc ~S"""
Returns `true` if all keys required to properly handle an Installation webhook
are present in the provided payload.
"""
@impl CodeCorps.GitHub.Event.Validator
@spec valid?(map) :: boolean
def valid?(%{
"action" => _,
"installation" => %{
"id" => _,
"account" => %{
"id" => _
}
},
"sender" => %{
"id" => _
}
}), do: true
def valid?(_), do: false
end
<|start_filename|>mix.exs<|end_filename|>
defmodule CodeCorps.Mixfile do
@moduledoc false
alias CodeCorps.{
Analytics, GitHub, Policy, StripeService, StripeTesting
}
use Mix.Project
def project do
[app: :code_corps,
version: "0.0.1",
elixir: "~> 1.6",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
dialyzer: [ignore_warnings: "dialyzer.ignore-warnings", plt_add_apps: [:kernel, :stdlib], plt_add_deps: :transitive],
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps(),
docs: docs(),
test_coverage: [tool: ExCoveralls]]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {CodeCorps, []},
extra_applications: [:sentry, :logger, :scout_apm, :timex, :tzdata]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:bamboo, "~> 0.7"}, # emails
{:bamboo_postmark, "~> 0.4.1"}, # postmark adapter for emails
{:dialyxir, "~> 0.5", only: [:dev, :test], runtime: false},
{:phoenix, "~> 1.3"},
{:phoenix_pubsub, "~> 1.0.2"},
{:phoenix_ecto, "~> 3.3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.10.3"},
{:phoenix_live_reload, "~> 1.1", only: :dev},
{:gettext, "~> 0.13"},
{:cowboy, "~> 1.0"},
{:bcrypt_elixir, "~> 1.0"},
{:benchfella, "~> 0.3.0", only: :dev},
{:bypass, "~> 0.8.1", only: :test},
{:cloudex, "~> 1.0"},
{:comeonin, "~> 4.0"},
{:corsica, "~> 1.0"}, # CORS
{:credo, "~> 0.8", only: [:dev, :test]}, # Code style suggestions
{:earmark, "~> 1.2"}, # Markdown rendering
{:ecto_ordered, "0.2.0-beta1"},
{:ex_aws, "~> 1.1"}, # Amazon AWS
{:excoveralls, "~> 0.7", only: :test}, # Test coverage
{:ex_doc, "~> 0.17", only: [:dev, :test]},
{:ex_machina, "~> 2.0", only: :test}, # test factories
{:guardian, "~> 1.0"}, # Authentication (JWT)
{:hackney, ">= 1.4.4"},
{:httpoison, "~> 0.13"},
{:inch_ex, "~> 0.5", only: [:dev, :test]}, # Inch CI
{:inflex, "~> 1.9"},
{:ja_serializer, "~> 0.12"}, # JSON API
{:joken, "~> 1.5"}, # JWT encoding
{:mix_test_watch, "~> 0.5", only: :dev, runtime: false},
{:money, "~> 1.2.1"},
{:poison, "~> 3.0", override: true},
{:scout_apm, "~> 0.0"},
{:scrivener_ecto, "~> 1.2"}, # DB query pagination
{:segment, "~> 0.1"}, # Segment analytics
{:sentry, "~> 6.0"}, # Sentry error tracking
{:stripity_stripe, git: "https://github.com/code-corps/stripity_stripe.git", branch: "2.0-beta"}, # Stripe
{:sweet_xml, "~> 0.5"},
{:timber, "~> 2.0"}, # Logging
{:timex, "~> 3.0"},
{:timex_ecto, "~> 3.0"}
]
end
defp docs do
[
main: "README",
source_url: "https://github.com/code-corps/code-corps-api",
groups_for_modules: groups_for_modules(),
extras: [
"README.md": [title: "README"],
"LICENSE.md": [title: "LICENSE"]
],
]
end
defp groups_for_modules do
[
"Models": [
CodeCorps.Accounts,
CodeCorps.Accounts.Changesets,
CodeCorps.AuthToken,
CodeCorps.Category,
CodeCorps.Comment,
CodeCorps.DonationGoal,
CodeCorps.GithubAppInstallation,
CodeCorps.GithubComment,
CodeCorps.GithubEvent,
CodeCorps.GithubIssue,
CodeCorps.GithubPullRequest,
CodeCorps.GithubRepo,
CodeCorps.MapUtils,
CodeCorps.Model,
CodeCorps.Organization,
CodeCorps.OrganizationGithubAppInstallation,
CodeCorps.OrganizationInvite,
CodeCorps.Preview,
CodeCorps.Project,
CodeCorps.Project.Query,
CodeCorps.ProjectCategory,
CodeCorps.ProjectSkill,
CodeCorps.ProjectUser,
CodeCorps.Repo,
CodeCorps.Role,
CodeCorps.RoleSkill,
CodeCorps.Skill,
CodeCorps.SluggedRoute,
CodeCorps.StripeConnectAccount,
CodeCorps.StripeConnectCard,
CodeCorps.StripeConnectCharge,
CodeCorps.StripeConnectCustomer,
CodeCorps.StripeConnectPlan,
CodeCorps.StripeConnectSubscription,
CodeCorps.StripeEvent,
CodeCorps.StripeExternalAccount,
CodeCorps.StripeFileUpload,
CodeCorps.StripeInvoice,
CodeCorps.StripePlatformCard,
CodeCorps.StripePlatformCustomer,
CodeCorps.TaskList,
CodeCorps.TaskSkill,
CodeCorps.Tasks,
CodeCorps.Tasks.Query,
CodeCorps.Transition.UserState,
CodeCorps.User,
CodeCorps.UserCategory,
CodeCorps.UserRole,
CodeCorps.UserSkill,
CodeCorps.UserTask,
CodeCorps.Validators.SlugValidator,
CodeCorps.Validators.TimeValidator
],
"Services": [
CodeCorps.Comment.Service,
CodeCorps.Services.DonationGoalsService,
CodeCorps.Services.ForgotPasswordService,
CodeCorps.Services.MarkdownRendererService,
CodeCorps.Services.ProjectService,
CodeCorps.Services.UserService,
CodeCorps.Tasks
],
"Policies": [
Policy,
Policy.Category,
Policy.Comment,
Policy.DonationGoal,
Policy.GithubAppInstallation,
Policy.Helpers,
Policy.Organization,
Policy.OrganizationGithubAppInstallation,
Policy.OrganizationInvite,
Policy.Preview,
Policy.Project,
Policy.ProjectCategory,
Policy.ProjectSkill,
Policy.ProjectUser,
Policy.Role,
Policy.RoleSkill,
Policy.Skill,
Policy.StripeConnectAccount,
Policy.StripeConnectPlan,
Policy.StripeConnectSubscription,
Policy.StripePlatformCard,
Policy.StripePlatformCustomer,
Policy.Task,
Policy.TaskSkill,
Policy.User,
Policy.UserCategory,
Policy.UserRole,
Policy.UserSkill,
Policy.UserTask
],
"Helpers": [
CodeCorps.Helpers.Query,
CodeCorps.Helpers.RandomIconColor,
CodeCorps.Helpers.Slug,
CodeCorps.Helpers.String,
CodeCorps.Helpers.URL,
CodeCorps.RandomIconColor.Generator,
CodeCorps.RandomIconColor.TestGenerator
],
"Emails": [
CodeCorps.Mailer,
CodeCorps.Emails.BaseEmail,
CodeCorps.Emails.ForgotPasswordEmail,
CodeCorps.Emails.OrganizationInviteEmail,
CodeCorps.Emails.ProjectUserAcceptanceEmail,
CodeCorps.Emails.ReceiptEmail
],
"Web": [
CodeCorpsWeb,
CodeCorpsWeb.Endpoint,
CodeCorpsWeb.ErrorHelpers,
CodeCorpsWeb.Gettext,
CodeCorpsWeb.Router,
CodeCorpsWeb.Router.Helpers,
CodeCorpsWeb.UserSocket
],
"Web – Plugs": [
CodeCorpsWeb.Plug.AnalyticsIdentify,
CodeCorpsWeb.Plug.CurrentUser,
CodeCorpsWeb.Plug.DataToAttributes,
CodeCorpsWeb.Plug.IdsToIntegers,
CodeCorpsWeb.Plug.Segment,
CodeCorpsWeb.Plug.SetSentryUserContext,
CodeCorpsWeb.Plug.SetTimberUserContext
],
"Miscellaneous": [
CodeCorps.Adapter.MapTransformer,
CodeCorps.ConnUtils,
CodeCorps.Presenters.ImagePresenter,
CodeCorps.WebClient
],
"GitHub – API": [
GitHub,
GitHub.HTTPClientError,
GitHub.Utils.ResultAggregator,
GitHub.API,
GitHub.API.Comment,
GitHub.API.Headers,
GitHub.API.Installation,
GitHub.API.Issue,
GitHub.API.JWT,
GitHub.API.PullRequest,
GitHub.API.User,
GitHub.APIError,
GitHub.APIErrorObject
],
"GitHub – Sync": [
GitHub.Sync,
GitHub.Sync.Comment,
GitHub.Sync.Comment.Comment,
GitHub.Sync.Comment.Comment.Changeset,
GitHub.Sync.Comment.GithubComment,
GitHub.Sync.Installation.Changeset,
GitHub.Sync.Issue,
GitHub.Sync.Issue.GithubIssue,
GitHub.Sync.Issue.Task,
GitHub.Sync.Issue.Task.Changeset,
GitHub.Sync.PullRequest,
GitHub.Sync.PullRequest.BodyParser,
GitHub.Sync.PullRequest.GithubPullRequest,
GitHub.Sync.User.RecordLinker,
GitHub.Sync.Utils.RepoFinder
],
"Github – Webhooks": [
GitHub.Webhook.EventSupport,
GitHub.Webhook.Handler,
GitHub.Webhook.Processor,
GitHub.Event,
GitHub.Event.Handler,
GitHub.Event.Installation,
GitHub.Event.Installation.MatchedUser,
GitHub.Event.Installation.Repos,
GitHub.Event.Installation.UnmatchedUser,
GitHub.Event.Installation.Validator,
GitHub.Event.InstallationRepositories,
GitHub.Event.InstallationRepositories.Validator,
GitHub.Event.IssueComment,
GitHub.Event.IssueComment.CommentDeleter,
GitHub.Event.IssueComment.Validator,
GitHub.Event.Issues,
GitHub.Event.Issues.Validator,
GitHub.Event.PullRequest,
GitHub.Event.PullRequest.Validator
],
"GitHub – Adapters": [
GitHub.Adapters.AppInstallation,
GitHub.Adapters.Comment,
GitHub.Adapters.Issue,
GitHub.Adapters.PullRequest,
GitHub.Adapters.Repo,
GitHub.Adapters.User,
GitHub.Adapters.Utils.BodyDecorator
],
"Stripe – Services": [
StripeService.StripeConnectAccountService,
StripeService.StripeConnectCardService,
StripeService.StripeConnectChargeService,
StripeService.StripeConnectCustomerService,
StripeService.StripeConnectExternalAccountService,
StripeService.StripeConnectPlanService,
StripeService.StripeConnectSubscriptionService,
StripeService.StripeInvoiceService,
StripeService.StripePlatformCardService,
StripeService.StripePlatformCustomerService
],
"Stripe – Webhooks": [
StripeService.WebhookProcessing.ConnectEventHandler,
StripeService.WebhookProcessing.EnvironmentFilter,
StripeService.WebhookProcessing.EventHandler,
StripeService.WebhookProcessing.IgnoredEventHandler,
StripeService.WebhookProcessing.PlatformEventHandler,
StripeService.WebhookProcessing.WebhookProcessor,
StripeService.Events.AccountUpdated,
StripeService.Events.ConnectChargeSucceeded,
StripeService.Events.ConnectExternalAccountCreated,
StripeService.Events.CustomerSourceUpdated,
StripeService.Events.CustomerSubscriptionDeleted,
StripeService.Events.CustomerSubscriptionUpdated,
StripeService.Events.CustomerUpdated,
StripeService.Events.InvoicePaymentSucceeded
],
"Stripe – Adapters": [
StripeService.Adapters.StripeConnectAccountAdapter,
StripeService.Adapters.StripeConnectCardAdapter,
StripeService.Adapters.StripeConnectChargeAdapter,
StripeService.Adapters.StripeConnectCustomerAdapter,
StripeService.Adapters.StripeConnectPlanAdapter,
StripeService.Adapters.StripeConnectSubscriptionAdapter,
StripeService.Adapters.StripeEventAdapter,
StripeService.Adapters.StripeExternalAccountAdapter,
StripeService.Adapters.StripeFileUploadAdapter,
StripeService.Adapters.StripeInvoiceAdapter,
StripeService.Adapters.StripePlatformCardAdapter,
StripeService.Adapters.StripePlatformCustomerAdapter,
],
"Stripe – Validators": [
StripeService.Validators.ProjectCanEnableDonations,
StripeService.Validators.ProjectSubscribable,
StripeService.Validators.UserCanSubscribe,
],
"Stripe – Testing": [
StripeTesting.Account,
StripeTesting.Card,
StripeTesting.Charge,
StripeTesting.Customer,
StripeTesting.Event,
StripeTesting.ExternalAccount,
StripeTesting.Helpers,
StripeTesting.Invoice,
StripeTesting.Plan,
StripeTesting.Subscription,
StripeTesting.Token
],
"Analytics": [
Analytics.InMemoryAPI,
Analytics.SegmentAPI,
Analytics.SegmentDataExtractor,
Analytics.SegmentEventNameBuilder,
Analytics.SegmentPlugTracker,
Analytics.SegmentTracker,
Analytics.SegmentTrackingSupport,
Analytics.SegmentTraitsBuilder,
Analytics.TestAPI
],
"Cloudinary": [
CodeCorps.Cloudex.CloudinaryUrl,
CodeCorps.Cloudex.Uploader,
CloudexTest,
CloudexTest.Url
]
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"ecto.migrate": ["ecto.migrate", "ecto.dump"],
"ecto.rollback": ["ecto.rollback", "ecto.dump"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"],
"test.acceptance": ["ecto.create --quiet", "ecto.migrate", "test --include acceptance:true"]]
end
end
<|start_filename|>lib/code_corps/policy/github_app_installation.ex<|end_filename|>
defmodule CodeCorps.Policy.GithubAppInstallation do
@moduledoc """
Handles `User` authorization of actions on `GithubAppInstallation` records
"""
import CodeCorps.Policy.Helpers, only: [get_project: 1, owned_by?: 2]
alias CodeCorps.User
@spec create?(User.t, map) :: boolean
def create?(%User{} = user, params), do: params |> get_project |> owned_by?(user)
end
<|start_filename|>test/lib/code_corps/helpers/random_icon_color_test.exs<|end_filename|>
defmodule CodeCorps.RandomIconColor.RandomIconColorTest do
use ExUnit.Case, async: true
import CodeCorps.Helpers.RandomIconColor
import Ecto.Changeset
test "inserts color into changeset" do
changeset = generate_icon_color(cast({%{}, %{}}, %{}, []), :color_key)
assert get_field(changeset, :color_key) == "blue"
end
test "ignores invalid changeset" do
changeset = {%{}, %{color_key: :required}}
|> cast(%{}, [])
|> validate_required(:color_key)
assert generate_icon_color(changeset, :color_key) == changeset
end
end
<|start_filename|>lib/code_corps/policy/user.ex<|end_filename|>
defmodule CodeCorps.Policy.User do
@moduledoc ~S"""
Contains authorization policies for performing actions on a `User` record.
Used to authorize controller actions.
"""
alias CodeCorps.User
@spec update?(User.t, User.t) :: boolean
def update?(%User{id: current_user_id}, %User{id: user_id})
when current_user_id == user_id, do: true
def update?(%User{}, %User{}), do: false
end
<|start_filename|>lib/code_corps/analytics/segment_plug_tracker.ex<|end_filename|>
defmodule CodeCorps.Analytics.SegmentPlugTracker do
@moduledoc """
Segment tracking
"""
alias CodeCorps.Analytics.{
SegmentDataExtractor,
SegmentTracker,
SegmentTrackingSupport
}
@spec maybe_track(Plug.Conn.t) :: Plug.Conn.t
def maybe_track(conn) do
successful? = successful?(conn)
action = SegmentDataExtractor.get_action(conn)
resource = SegmentDataExtractor.get_resource(conn)
if successful? && SegmentTrackingSupport.includes?(action, resource) do
user_id = SegmentDataExtractor.get_user_id(conn, resource)
SegmentTracker.track(user_id, action, resource)
mark_tracked(conn)
else
mark_untracked(conn)
end
end
@spec successful?(Plug.Conn.t) :: boolean
defp successful?(%Plug.Conn{status: status}) when status in [200, 201, 204], do: true
defp successful?(_), do: false
@spec mark_untracked(Plug.Conn.t) :: Plug.Conn.t
defp mark_untracked(conn), do: conn |> Plug.Conn.assign(:segment_tracked, false)
@spec mark_tracked(Plug.Conn.t) :: Plug.Conn.t
defp mark_tracked(conn), do: conn |> Plug.Conn.assign(:segment_tracked, true)
end
<|start_filename|>test/lib/code_corps/github/adapters/repo_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.RepoTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Adapters.Repo
describe "from_api/1" do
test "maps api payload correctly" do
%{"repositories" => [repo]} = load_event_fixture("user_repositories")
assert Repo.from_api(repo) == %{
github_id: repo |> get_in(["id"]),
name: repo |> get_in(["name"]),
github_account_id: repo |> get_in(["owner", "id"]),
github_account_login: repo |> get_in(["owner", "login"]),
github_account_avatar_url: repo |> get_in(["owner", "avatar_url"]),
github_account_type: repo |> get_in(["owner", "type"]),
}
end
end
end
<|start_filename|>test/lib/code_corps/github/sync/utils/finder_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Utils.FinderTest do
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Sync.Utils.Finder
@supported_repo_fixtures ~w(issue_comment_created issue_comment_edited issue_comment_deleted issues_closed issues_opened issues_edited issues_reopened)
@supported_repo_fixtures |> Enum.each(fn repo_fixture ->
@repo_fixture repo_fixture
describe "find_repo for #{@repo_fixture}" do
test "returns error if no matched repository" do
payload = load_event_fixture(@repo_fixture)
assert Finder.find_repo(payload) == {:error, :unmatched_repository}
end
test "returns repository if matched, preloads github repos" do
payload = load_event_fixture(@repo_fixture)
github_repo = insert(:github_repo, github_id: payload["repository"]["id"])
{:ok, %{id: found_repo_id}} = Finder.find_repo(payload)
assert found_repo_id == github_repo.id
end
end
end)
@supported_installation_fixtures ~w(installation_repositories_added installation_repositories_removed)
@supported_installation_fixtures |> Enum.each(fn installation_fixture ->
@installation_fixture installation_fixture
setup do
{:ok, %{payload: load_event_fixture(@installation_fixture)}}
end
describe "find_installation for #{@installation_fixture}" do
test "returns error if no matched repository" do
payload = load_event_fixture(@installation_fixture)
assert Finder.find_installation(payload) == {:error, :unmatched_installation}
end
test "returns repository if matched, preloads github repos" do
payload = load_event_fixture(@installation_fixture)
installation = insert(:github_app_installation, github_id: payload["installation"]["id"])
{:ok, %{id: installation_id}} = Finder.find_installation(payload)
assert installation_id == installation.id
end
end
end)
end
<|start_filename|>lib/code_corps/model/task_skill.ex<|end_filename|>
defmodule CodeCorps.TaskSkill do
@moduledoc """
Represents a link record between a task and a skill, indicating that
for a user to be suitable to work on a task, they need to posses skills
associated with that task.
"""
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "task_skills" do
belongs_to :skill, CodeCorps.Skill
belongs_to :task, CodeCorps.Task
timestamps()
end
@permitted_attrs [:skill_id, :task_id]
@required_attrs @permitted_attrs
@doc """
Builds a changeset used to insert a record into the database
"""
@spec create_changeset(CodeCorps.TaskSkill.t, map) :: Ecto.Changeset.t
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, @permitted_attrs)
|> validate_required(@required_attrs)
|> assoc_constraint(:task)
|> assoc_constraint(:skill)
|> unique_constraint(:skill, name: :task_skills_task_id_skill_id_index)
end
end
<|start_filename|>test/lib/code_corps/github/api/comment_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.API.CommentTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub.API.Comment,
GitHub.Adapters
}
describe "create/1" do
test "calls github API to create a github comment for assigned comment, makes user request if user is connected, returns response" do
github_issue = insert(:github_issue, number: 5)
github_repo = insert(:github_repo, github_account_login: "foo", name: "bar")
user = insert(:user, github_auth_token: "baz")
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
comment = insert(:comment, task: task, user: user)
assert Comment.create(comment)
assert_received({
:post,
"https://api.github.com/repos/foo/bar/issues/5/comments",
body,
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token baz"}
],
_options
})
assert body == Adapters.Comment.to_api(comment) |> Poison.encode!
end
test "calls github API to create a github comment for assigned comment, makes integration request if user is not connected, returns response" do
github_issue = insert(:github_issue, number: 5)
github_repo = insert(:github_repo, github_account_login: "foo", name: "bar")
user = insert(:user, github_auth_token: nil)
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
comment = insert(:comment, task: task, user: user)
assert Comment.create(comment)
assert_received({
:post,
"https://api.github.com/repos/foo/bar/issues/5/comments",
body,
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token" <> _tok}
],
_options
})
assert body == Adapters.Comment.to_api(comment) |> Poison.encode!
end
test "returns error response if there was trouble" do
github_issue = insert(:github_issue, number: 5)
github_repo = insert(:github_repo, github_account_login: "foo", name: "bar")
user = insert(:user, github_auth_token: nil)
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
comment = insert(:comment, task: task, user: user)
with_mock_api CodeCorps.GitHub.FailureAPI do
assert Comment.create(comment)
end
assert_received({
:post,
"https://api.github.com/repos/foo/bar/issues/5/comments",
body,
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token" <> _tok}
],
_options
})
assert body == Adapters.Comment.to_api(comment) |> Poison.encode!
end
end
describe "update/1" do
test "calls github API to update a github comment for assigned comment, makes user request if user is connected, returns response" do
github_repo = insert(:github_repo, github_account_login: "foo", name: "bar")
github_issue = insert(:github_issue, number: 5, github_repo: github_repo)
user = insert(:user, github_auth_token: "baz")
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
github_comment = insert(:github_comment, github_id: 6, github_issue: github_issue)
comment = insert(:comment, task: task, user: user, github_comment: github_comment)
assert Comment.update(comment)
assert_received({
:patch,
"https://api.github.com/repos/foo/bar/issues/comments/6",
body,
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token" <> _tok}
],
_options
})
assert body == Adapters.Comment.to_api(comment) |> Poison.encode!
end
test "calls github API to update a github comment for assigned comment, makes integration request if user is not connected, returns response" do
github_issue = insert(:github_issue, number: 5)
github_repo = insert(:github_repo, github_account_login: "foo", name: "bar")
user = insert(:user, github_auth_token: nil)
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
github_comment = insert(:github_comment, github_id: 6, github_issue: github_issue)
comment = insert(:comment, task: task, user: user, github_comment: github_comment)
assert Comment.update(comment)
assert_received({
:patch,
"https://api.github.com/repos/foo/bar/issues/comments/6",
body,
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token" <> _tok}
],
_options
})
assert body == Adapters.Comment.to_api(comment) |> Poison.encode!
end
test "returns error response if there was trouble" do
github_issue = insert(:github_issue, number: 5)
github_repo = insert(:github_repo, github_account_login: "foo", name: "bar")
user = insert(:user, github_auth_token: nil)
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
github_comment = insert(:github_comment, github_id: 6, github_issue: github_issue)
comment = insert(:comment, task: task, user: user, github_comment: github_comment)
with_mock_api CodeCorps.GitHub.FailureAPI do
assert Comment.update(comment)
end
assert_received({
:patch,
"https://api.github.com/repos/foo/bar/issues/comments/6",
body,
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token" <> _tok}
],
_options
})
assert body == Adapters.Comment.to_api(comment) |> Poison.encode!
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/stripe_connect_subscription_service_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.StripeConnectSubscriptionServiceTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeService.StripeConnectSubscriptionService
setup do
organization = insert(:organization)
insert(:stripe_connect_account, organization: organization)
stripe_connect_plan = insert(:stripe_connect_plan)
project = insert(:project, stripe_connect_plan: stripe_connect_plan, organization: organization)
user = insert(:user)
insert(:stripe_platform_customer, user: user)
insert(:stripe_platform_card, user: user)
{:ok, project: project, user: user}
end
describe "find_or_create/1" do
test "retrieves and returns a subscription if one is already present", %{project: project, user: user} do
insert(:stripe_connect_subscription, user: user, stripe_connect_plan: project.stripe_connect_plan, quantity: 300)
{:ok, subscription} =
StripeConnectSubscriptionService.find_or_create(%{"project_id" => project.id, "user_id" => user.id, "quantity" => 200})
assert subscription.quantity == 300
end
test "creates and returns a subscription if none is present", %{project: project, user: user} do
{:ok, subscription} =
StripeConnectSubscriptionService.find_or_create(%{"project_id" => project.id, "user_id" => user.id, "quantity" => 200})
assert subscription.quantity == 200
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/message_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.MessageControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :message
alias CodeCorps.{Conversation, Message, Repo}
@valid_attrs %{
body: "Test body.",
initiated_by: "admin",
subject: "A test subject"
}
@invalid_attrs %{
body: nil,
initiated_by: "admin",
subject: nil
}
describe "index" do
@tag :authenticated
test "lists all entries user is authorized to view", %{conn: conn, current_user: user} do
[message_1, message_2] = insert_pair(:message, initiated_by: "user", author: user)
_message_3 = insert(:message)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([message_1.id, message_2.id])
end
@tag authenticated: :admin
test "lists all entries if user is admin", %{conn: conn} do
[message_1, message_2] = insert_pair(:message)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([message_1.id, message_2.id])
end
end
describe "show" do
@tag :authenticated
test "shows chosen resource", %{conn: conn, current_user: user} do
message = insert(:message, initiated_by: "user", author: user)
conn
|> request_show(message)
|> json_response(200)
|> assert_id_from_response(message.id)
end
test "renders 401 when unauthenticated", %{conn: conn} do
message = insert(:message)
assert conn |> request_show(message) |> json_response(401)
end
@tag :authenticated
test "renders 403 when unauthorized", %{conn: conn} do
message = insert(:message)
assert conn |> request_show(message) |> json_response(403)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: user} do
project = insert(:project)
insert(:project_user, project: project, user: user, role: "owner")
attrs = @valid_attrs |> Map.merge(%{author_id: user.id, project_id: project.id})
assert conn |> request_create(attrs) |> json_response(201)
assert Repo.get_by(Message, project_id: project.id, author_id: user.id)
end
@tag :authenticated
test "creates child conversation if attributes for it are provided", %{conn: conn, current_user: user} do
project = insert(:project)
insert(:project_user, project: project, user: user, role: "owner")
recipient = insert(:user)
conversation_payload =
%{user_id: recipient.id}
|> CodeCorps.JsonAPIHelpers.build_json_payload("conversation")
payload =
@valid_attrs
|> Map.merge(%{author_id: user.id, project_id: project.id})
|> CodeCorps.JsonAPIHelpers.build_json_payload
|> Map.put("included", [conversation_payload])
path = conn |> message_path(:create)
assert conn |> post(path, payload) |> json_response(201)
message = Repo.get_by(Message, project_id: project.id, author_id: user.id)
assert message
assert Repo.get_by(Conversation, user_id: recipient.id, message_id: message.id)
end
@tag :authenticated
test "does not create resource and renders 422 when data is invalid", %{
conn: conn,
current_user: user
} do
project = insert(:project)
insert(:project_user, project: project, user: user, role: "owner")
attrs = @invalid_attrs |> Map.merge(%{author_id: user.id, project_id: project.id})
assert conn |> request_create(attrs) |> json_response(422)
refute Repo.one(Message)
end
test "does not create resource and renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
refute Repo.one(Message)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
refute Repo.one(Message)
end
@tag :authenticated
test "renders 403 when initiated by admin and not authorized", %{conn: conn, current_user: user} do
project = insert(:project)
insert(:project_user, project: project, user: user, role: "contributor")
params = %{
author_id: user.id,
initiated_by: "admin",
project_id: project.id
}
attrs = @valid_attrs |> Map.merge(params)
assert conn |> request_create(attrs) |> json_response(403)
end
end
end
<|start_filename|>test/lib/code_corps/accounts/accounts_test.exs<|end_filename|>
defmodule CodeCorps.AccountsTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.{Accounts, Comment, Task, User, GitHub.TestHelpers}
alias Ecto.Changeset
describe "create_from_github/1" do
test "creates proper user from provided payload" do
{:ok, %User{} = user} =
"user"
|> TestHelpers.load_endpoint_fixture
|> Accounts.create_from_github
assert user.id
assert user.default_color
assert user.sign_up_context == "github"
assert user.type == "user"
end
test "validates the uniqueness of email" do
%{"email" => email} = payload = TestHelpers.load_endpoint_fixture("user")
# Ensure a user exists so there's a duplicate email
insert(:user, email: email)
{:error, %Changeset{} = changeset} =
payload
|> Accounts.create_from_github
assert changeset.errors[:email] == {"has already been taken", []}
end
test "validates the uniqueness of the github_id" do
%{"id" => github_id} = payload = TestHelpers.load_endpoint_fixture("user")
# Ensure a user exists so there's a duplicate github_id
insert(:user, github_id: github_id)
{:error, %Changeset{} = changeset} =
payload
|> Accounts.create_from_github
assert changeset.errors[:github_id] == {"account is already connected to someone else", []}
end
test "uploads photo from GitHub avatar" do
{:ok, %User{} = user} =
"user"
|> TestHelpers.load_endpoint_fixture
|> Accounts.create_from_github
user = Repo.get(User, user.id)
assert user.cloudinary_public_id
end
end
describe "update_from_github_oauth/3" do
test "updates proper user and associations given GitHub payload" do
user = insert(:user)
%{"id" => github_id} = params = TestHelpers.load_endpoint_fixture("user")
token = "random_token"
{:ok, %User{} = user_for_github_user} =
params
|> Accounts.create_from_github()
comment = insert(:comment, user: user_for_github_user)
task = insert(:task, user: user_for_github_user)
{:ok, %User{} = user} =
user
|> Accounts.update_from_github_oauth(params, token)
user_for_github_user = Repo.get(User, user_for_github_user.id)
comment = Repo.get(Comment, comment.id)
task = Repo.get(Task, task.id)
# Unsets the old user's github_id
assert user_for_github_user.sign_up_context == "github"
assert user_for_github_user.github_id_was == github_id
refute user_for_github_user.github_id
# Sets the new user data
assert user.id
assert user.github_auth_token == token
assert user.github_id == github_id
assert user.sign_up_context == "default"
assert user.type == "user"
# Changes associations
assert comment.user_id == user.id
assert task.user_id == user.id
end
test "does not update their image if it already exists" do
user = insert(:user, cloudinary_public_id: "123")
params = TestHelpers.load_endpoint_fixture("user")
{:ok, %User{} = user} =
user
|> Accounts.update_from_github_oauth(params, "random_token")
user = Repo.get(User, user.id)
assert user.cloudinary_public_id === "123"
end
test "updates their image if does not exist" do
user = insert(:user, cloudinary_public_id: nil)
params = TestHelpers.load_endpoint_fixture("user")
{:ok, %User{} = user} =
user
|> Accounts.update_from_github_oauth(params, "random_token")
user = Repo.get(User, user.id)
assert user.cloudinary_public_id
end
end
end
<|start_filename|>lib/code_corps_web/controllers/fallback_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.FallbackController do
@moduledoc false
use CodeCorpsWeb, :controller
alias Ecto.Changeset
require Logger
@type supported_fallbacks :: {:error, Changeset.t} |
{:error, :not_authorized} |
{:error, :github} |
nil
@doc ~S"""
Default fallback for different `with` clause errors in controllers across the
application.
"""
@spec call(Conn.t, supported_fallbacks) :: Conn.t
def call(%Conn{} = conn, {:error, %Changeset{} = changeset}) do
conn
|> put_status(:unprocessable_entity)
|> render(CodeCorpsWeb.ChangesetView, "422.json", changeset: changeset)
end
def call(%Conn{} = conn, {:error, :not_authorized}) do
conn
|> put_status(403)
|> render(CodeCorpsWeb.TokenView, "403.json", message: "You are not authorized to perform this action.")
end
def call(%Conn{} = conn, {:error, :expired}) do
conn
|> put_status(:not_found)
|> render(CodeCorpsWeb.ErrorView, "404.json", %{})
end
def call(%Conn{} = conn, nil) do
conn
|> put_status(:not_found)
|> render(CodeCorpsWeb.ErrorView, "404.json", %{})
end
def call(%Conn{} = conn, {:error, :github}) do
conn
|> put_status(500)
|> render(CodeCorpsWeb.ErrorView, "500.json", message: "An unknown error occurred with GitHub's API.")
end
def call(%Conn{} = conn, {:error, %Stripe.Error{message: message}}) do
Logger.info message
conn
|> put_status(500)
|> render(CodeCorpsWeb.ErrorView, "500.json", message: message)
end
def call(%Conn{} = conn, {:error, %CodeCorps.GitHub.APIError{message: message}}) do
Logger.info message
conn
|> put_status(500)
|> render(CodeCorpsWeb.ErrorView, "github-error.json", message: message)
end
end
<|start_filename|>test/lib/code_corps/github/sync/github_issue/github_issue_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubIssueTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub.Adapters,
GitHub.Sync,
GithubIssue,
GithubUser,
Repo
}
alias Ecto.Changeset
@issue_event_payload load_event_fixture("issues_opened")
describe "create_or_update_issue/2+3" do
test "creates issue if none exists" do
%{"issue" => attrs} = @issue_event_payload
github_repo = insert(:github_repo)
{:ok, %GithubIssue{} = created_issue} =
attrs |> Sync.GithubIssue.create_or_update_issue(github_repo)
assert Repo.one(GithubIssue)
created_attributes =
attrs
|> Adapters.Issue.to_issue
|> Map.delete(:closed_at)
|> Map.delete(:repository_url)
returned_issue = Repo.get_by(GithubIssue, created_attributes)
assert returned_issue.id == created_issue.id
assert returned_issue.github_repo_id == github_repo.id
end
test "updates issue if it already exists" do
%{"issue" => %{"id" => issue_id} = attrs} = @issue_event_payload
github_repo = insert(:github_repo)
issue =
insert(:github_issue, github_id: issue_id, github_repo: github_repo)
{:ok, %GithubIssue{} = updated_issue} =
attrs |> Sync.GithubIssue.create_or_update_issue(github_repo)
assert updated_issue.id == issue.id
assert updated_issue.github_repo_id == github_repo.id
end
test "creates new issue linked to pull request if specified" do
%{"issue" => attrs} = @issue_event_payload
github_repo = insert(:github_repo)
github_pull_request = insert(:github_pull_request, github_repo: github_repo)
{:ok, %GithubIssue{} = created_issue} =
attrs
|> Sync.GithubIssue.create_or_update_issue(github_repo, github_pull_request)
assert created_issue.github_pull_request_id == github_pull_request.id
end
test "updates issue linked to pull request if specified" do
%{"issue" => %{"id" => issue_id} = attrs} = @issue_event_payload
github_repo = insert(:github_repo)
github_pull_request = insert(:github_pull_request, github_repo: github_repo)
issue = insert(:github_issue, github_id: issue_id, github_repo: github_repo)
{:ok, %GithubIssue{} = updated_issue} =
attrs
|> Sync.GithubIssue.create_or_update_issue(github_repo, github_pull_request)
assert updated_issue.id == issue.id
assert updated_issue.github_pull_request_id == github_pull_request.id
end
test "returns changeset if payload is somehow not as expected" do
bad_payload = @issue_event_payload |> put_in(["issue", "number"], nil)
%{"issue" => attrs} = bad_payload
github_repo = insert(:github_repo)
{:error, changeset} = attrs |> Sync.GithubIssue.create_or_update_issue(github_repo)
refute changeset.valid?
end
test "returns github user changeset if insert of github user fails" do
%{"issue" => attrs} = @issue_event_payload
github_repo = insert(:github_repo)
assert {:error, %Changeset{data: %GithubUser{}} = changeset} =
attrs
|> Kernel.put_in(["user", "login"], nil)
|> Sync.GithubIssue.create_or_update_issue(github_repo)
refute changeset.valid?
end
end
end
<|start_filename|>lib/code_corps/policy/conversation.ex<|end_filename|>
defmodule CodeCorps.Policy.Conversation do
@moduledoc ~S"""
Handles `CodeCorps.User` authorization of actions on `CodeCorps.Conversation`
records.
"""
import CodeCorps.Policy.Helpers,
only: [administered_by?: 2, get_message: 1, get_project: 1]
import Ecto.Query
alias CodeCorps.{Conversation, Message, Project, ProjectUser, Repo, User}
@spec scope(Ecto.Queryable.t, User.t) :: Ecto.Queryable.t
def scope(queryable, %User{admin: true}), do: queryable
def scope(queryable, %User{id: id}) do
projects_administered_by_user_ids =
Project
|> join(:inner, [p], pu in ProjectUser, pu.project_id == p.id)
|> where([_p, pu], pu.user_id == ^id)
|> where([_p, pu], pu.role in ~w(admin owner))
|> select([p], p.id)
|> Repo.all
scoped_message_ids =
Message
|> where([m], m.author_id == ^id)
|> or_where([m], m.project_id in ^projects_administered_by_user_ids)
|> select([m], m.id)
|> Repo.all
queryable
|> where(user_id: ^id)
|> or_where([c], c.message_id in ^scoped_message_ids)
end
def show?(%User{id: user_id}, %Conversation{user_id: target_user_id})
when user_id == target_user_id do
true
end
def show?(%User{} = user, %Conversation{} = conversation) do
conversation |> get_message() |> get_project() |> administered_by?(user)
end
def show?(_, _), do: false
def update?(%User{admin: true}, _conversation), do: true
def update?(%User{} = user, %Conversation{} = conversation) do
conversation |> get_message() |> get_project() |> administered_by?(user)
end
def update?(_, _), do: false
end
<|start_filename|>priv/repo/migrations/20171114033357_add_unique_constraint_for_project_github_repo_and_project.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddUniqueConstraintForProjectGithubRepoAndProject do
use Ecto.Migration
def up do
drop_if_exists index(:project_github_repos, [:project_id, :github_repo_id], unique: true)
drop_if_exists index(:project_github_repos, [:github_repo_id], unique: true)
create unique_index(:project_github_repos, [:github_repo_id])
end
def down do
drop_if_exists index(:project_github_repos, [:github_repo_id], unique: true)
create unique_index(:project_github_repos, [:project_id, :github_repo_id])
end
end
<|start_filename|>test/lib/code_corps/model/github_event_test.exs<|end_filename|>
defmodule CodeCorps.GithubEventTest do
use CodeCorps.ModelCase
alias CodeCorps.GithubEvent
@valid_attrs %{
action: "some content",
github_delivery_id: "71aeab80-9e59-11e7-81ac-198364bececc",
payload: %{"key" => "value"},
status: "processing",
type: "some content"
}
@invalid_attrs %{}
describe "changeset/2" do
test "with valid attributes" do
changeset = GithubEvent.changeset(%GithubEvent{}, @valid_attrs)
assert changeset.valid?
end
test "with invalid attributes" do
changeset = GithubEvent.changeset(%GithubEvent{}, @invalid_attrs)
refute changeset.valid?
end
test "validates inclusion of status" do
attrs = @valid_attrs |> Map.put(:status, "foo")
changeset = GithubEvent.changeset(%GithubEvent{}, attrs)
refute changeset.valid?
assert changeset.errors[:status] == {"is invalid", [validation: :inclusion]}
end
end
describe "update_changeset/2" do
test "with retry true and status errored" do
attrs = @valid_attrs |> Map.merge(%{retry: true, status: "errored"})
changeset = GithubEvent.update_changeset(%GithubEvent{status: "errored"}, attrs)
assert changeset.valid?
assert changeset.changes[:status] == "reprocessing"
end
test "with retry true and status not errored" do
attrs = @valid_attrs |> Map.put(:retry, true)
changeset = GithubEvent.update_changeset(%GithubEvent{status: "foo"}, attrs)
refute changeset.valid?
assert_error_message(changeset, :retry, "only possible when status is errored")
end
test "with retry false" do
attrs = @valid_attrs |> Map.put(:retry, false)
changeset = GithubEvent.update_changeset(%GithubEvent{}, attrs)
refute changeset.valid?
refute changeset.changes[:status] == "reprocessing"
end
end
end
<|start_filename|>lib/code_corps/model/skill.ex<|end_filename|>
defmodule CodeCorps.Skill do
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "skills" do
field :description, :string
field :original_row, :integer
field :title, :string
has_many :project_skills, CodeCorps.ProjectSkill
has_many :projects, through: [:project_skills, :project]
has_many :role_skills, CodeCorps.RoleSkill
has_many :roles, through: [:role_skills, :role]
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
@spec changeset(CodeCorps.Skill.t, map) :: Ecto.Changeset.t
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:description, :original_row, :title])
|> validate_required([:title])
|> unique_constraint(:title)
end
end
<|start_filename|>lib/code_corps/stripe_service/webhook_processing/webhook_processor.ex<|end_filename|>
defmodule CodeCorps.StripeService.WebhookProcessing.WebhookProcessor do
@moduledoc """
Used to process a Stripe webhook request.
"""
alias CodeCorps.{Processor, StripeService.WebhookProcessing.EventHandler}
@api Application.get_env(:code_corps, :stripe)
@doc """
Used to process a Stripe webhook event in an async manner.
Receives the event JSON as the first parameter.
Since a webhook can be a platform or a connect webhook, the function requires
the handler module as the second parameter.
Returns `{:ok, pid}`
"""
@spec process_async(map, module) :: Processor.result
def process_async(event_params, handler) do
Processor.process(fn -> process(event_params, handler) end)
end
@doc """
Used to process a Stripe webhook event.
Receives the event JSON as the first parameter.
Since a webhook can be a platform or a connect webhook, the function requires
the handler module as the second parameter.
# Returns
- `{:ok, %CodeCorps.StripeEvent{}}` if the event was processed in some way. This includes
the event being previously processed, or erroring out, or even just not being handled at the moment.
- `{:error, :already_processing}` if the event already exists locally and is in the process of
being handled.
"""
def process(%{"id" => id} = event_params, handler) do
with account <- event_params |> Map.get("account"),
{:ok, %Stripe.Event{} = api_event} <- retrieve_event_from_api(id, account)
do
EventHandler.handle(api_event, handler, account)
end
end
defp retrieve_event_from_api(id, nil), do: @api.Event.retrieve(id)
defp retrieve_event_from_api(id, account), do: @api.Event.retrieve(id, connect_account: account)
end
<|start_filename|>lib/code_corps_web/views/project_category_view.ex<|end_filename|>
defmodule CodeCorpsWeb.ProjectCategoryView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
has_one :project, type: "project", field: :project_id
has_one :category, type: "category", field: :category_id
end
<|start_filename|>test/lib/code_corps/github/sync/user/record_linker_test.exs<|end_filename|>
defmodule CodeCorps.Sync.User.RecordLinkerTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub.Sync.User.RecordLinker,
Repo,
User
}
alias CodeCorps.GitHub.Adapters.User, as: UserAdapter
def remove_nils(payload) do
payload
|> Enum.reject(fn {_, v} -> is_nil(v) end)
|> Map.new()
end
describe "link_to/2 for comments" do
@payload load_event_fixture("issue_comment_created")
@bot_payload load_event_fixture("issue_comment_created_by_bot")
@user_payload @payload["comment"]["user"]
@bot_user_payload @bot_payload["comment"]["user"]
test "finds user by comment association" do
%{"comment" => %{"id" => github_id} = comment} = @payload
user = insert(:user)
# multiple comments, but with same user is ok
github_comment = insert(:github_comment, github_id: github_id)
insert_pair(:comment, github_comment: github_comment, user: user)
{:ok, %User{} = returned_user} = RecordLinker.link_to(github_comment, comment)
assert user.id == returned_user.id
end
test "returns error if multiple users by comment association found" do
%{"comment" => %{"id" => github_id} = comment} = @payload
# multiple matched comments each with different user is not ok
github_comment = insert(:github_comment, github_id: github_id)
insert_pair(:comment, github_comment: github_comment)
assert {:error, :multiple_users} ==
RecordLinker.link_to(github_comment, comment)
end
test "finds user by github id if none is found by comment association" do
%{"comment" => %{"id" => github_id} = comment} = @payload
attributes = @user_payload |> UserAdapter.to_user() |> remove_nils()
preinserted_user = insert(:user, attributes)
github_comment = insert(:github_comment, github_id: github_id)
{:ok, %User{} = returned_user} = RecordLinker.link_to(github_comment, comment)
assert preinserted_user.id == returned_user.id
assert Repo.get_by(User, attributes)
end
test "creates user if none is by comment or id association" do
%{"comment" => %{"id" => github_id} = comment} = @payload
github_comment = insert(:github_comment, github_id: github_id)
{:ok, %User{} = returned_user} = RecordLinker.link_to(github_comment, comment)
created_attributes = @user_payload |> UserAdapter.to_user() |> remove_nils()
created_user = Repo.get_by(User, created_attributes)
assert created_user.id == returned_user.id
end
test "if comment created by bot, finds user by comment association" do
%{"comment" => %{
"id" => github_id,
"user" => %{"id" => bot_user_github_id}
} = comment
} = @bot_payload
github_comment = insert(:github_comment, github_id: github_id)
%{user: preinserted_user} = insert(:comment, github_comment: github_comment)
{:ok, %User{} = returned_user} =
RecordLinker.link_to(github_comment, comment)
assert preinserted_user.id == returned_user.id
refute Repo.get_by(User, github_id: bot_user_github_id)
end
test "if issue opened by bot, and no user by comment association, creates a bot user" do
%{"comment" => %{"id" => github_id} = comment} = @bot_payload
github_comment = insert(:github_comment, github_id: github_id)
{:ok, %User{} = returned_user} = RecordLinker.link_to(github_comment, comment)
created_attributes = @bot_user_payload |> UserAdapter.to_user() |> remove_nils()
created_user = Repo.get_by(User, created_attributes)
assert created_user.id == returned_user.id
end
test "returns changeset if payload is somehow not as expected" do
bad_payload = @payload |> put_in(["comment", "user", "type"], "Organization")
%{"comment" => %{"id" => github_id} = comment} = bad_payload
github_comment = insert(:github_comment, github_id: github_id)
{:error, changeset} = RecordLinker.link_to(github_comment, comment)
refute changeset.valid?
end
end
describe "link_to/2 for issues" do
@payload load_event_fixture("issues_opened")
@bot_payload load_event_fixture("issues_opened_by_bot")
@user_payload @payload["issue"]["user"]
@bot_user_payload @bot_payload["issue"]["user"]
test "finds user by task association" do
%{
"issue" => %{"number" => number} = issue,
"repository" => %{"id" => github_repo_id}
} = @payload
user = insert(:user)
github_repo = insert(:github_repo, github_id: github_repo_id)
github_issue = insert(:github_issue, number: number, github_repo: github_repo)
# multiple tasks, all with same user is ok
insert_pair(
:task, user: user, github_repo: github_repo, github_issue: github_issue)
{:ok, %User{} = returned_user} = RecordLinker.link_to(github_issue, issue)
assert user.id == returned_user.id
end
test "returns error if multiple users by task association found" do
%{
"issue" => %{"number" => number} = issue,
"repository" => %{"id" => github_repo_id}
} = @payload
github_repo = insert(:github_repo, github_id: github_repo_id)
github_issue = insert(:github_issue, number: number, github_repo: github_repo)
# multiple tasks, each with different user is not ok
insert_pair(:task, github_repo: github_repo, github_issue: github_issue)
assert {:error, :multiple_users} ==
RecordLinker.link_to(github_issue, issue)
end
test "returns user by github id if no user by task association found" do
%{"issue" => %{"number" => number} = issue} = @payload
attributes = @user_payload |> UserAdapter.to_user() |> remove_nils()
preinserted_user = insert(:user, attributes)
github_issue = insert(:github_issue, number: number)
{:ok, %User{} = returned_user} = RecordLinker.link_to(github_issue, issue)
assert preinserted_user.id == returned_user.id
assert Repo.get_by(User, attributes)
end
test "creates user if none is found by any other method" do
%{"issue" => %{"number" => number} = issue} = @payload
github_issue = insert(:github_issue, number: number)
{:ok, %User{} = returned_user} = RecordLinker.link_to(github_issue, issue)
created_attributes = @user_payload |> UserAdapter.to_user() |> remove_nils()
created_user = Repo.get_by(User, created_attributes)
assert created_user.id == returned_user.id
end
test "if issue opened by bot, finds user by task association" do
%{
"issue" => %{
"number" => number, "user" => %{"id" => bot_user_github_id}
} = issue,
"repository" => %{"id" => github_repo_id}
} = @bot_payload
preinserted_user = insert(:user)
github_issue = insert(:github_issue, number: number)
repo = insert(:github_repo, github_id: github_repo_id)
insert(
:task,
user: preinserted_user, github_repo: repo,
github_issue: github_issue)
{:ok, %User{} = returned_user} =
RecordLinker.link_to(github_issue, issue)
assert preinserted_user.id == returned_user.id
refute Repo.get_by(User, github_id: bot_user_github_id)
end
test "if issue opened by bot, and no user by task association, creates a bot user" do
%{"issue" => %{"number" => number} = issue} = @bot_payload
github_issue = insert(:github_issue, number: number)
{:ok, %User{} = returned_user} = RecordLinker.link_to(github_issue, issue)
created_attributes = @bot_user_payload |> UserAdapter.to_user() |> remove_nils()
created_user = Repo.get_by(User, created_attributes)
assert created_user.id == returned_user.id
end
test "returns changeset if payload is somehow not as expected" do
%{"issue" => %{"number" => number} = issue} = @payload
github_issue = insert(:github_issue, number: number)
bad_payload = issue |> put_in(["user", "type"], "Organization")
{:error, changeset} = RecordLinker.link_to(github_issue, bad_payload)
refute changeset.valid?
end
end
end
<|start_filename|>test/lib/code_corps/accounts/changesets_test.exs<|end_filename|>
defmodule CodeCorps.Accounts.ChangesetsTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.{Accounts.Changesets, User}
describe "create_from_github_changeset/1" do
test "validates inclusion of type" do
params = %{"email" => "<EMAIL>", "type" => "Organization"}
changeset = Changesets.create_from_github_changeset(%User{}, params)
assert changeset.errors[:type] == {"is invalid", [validation: :inclusion]}
end
test "generates the default icon color" do
changeset = Changesets.create_from_github_changeset(%User{}, %{})
assert changeset.changes.default_color
end
test "ensures nil values are omitted" do
params = %{"email" => nil, "github_avatar_url" => nil, "type" => "bot"}
changeset = Changesets.create_from_github_changeset(%User{}, params)
refute changeset.changes[:email]
refute changeset.changes[:github_avatar_url]
end
end
describe "update_from_github_oauth_changeset/2" do
test "ensures an email is not overridden when the user has an email" do
user = insert(:user, email: "<EMAIL>")
params = %{"email" => "<EMAIL>"}
changeset = Changesets.update_from_github_oauth_changeset(user, params)
refute changeset.changes[:email]
end
test "ensures an email is not set to nil" do
user = insert(:user, email: "<EMAIL>")
params = %{"email" => nil}
changeset = Changesets.update_from_github_oauth_changeset(user, params)
refute changeset.changes[:email]
end
test "ensures an email is set when initially nil" do
user = insert(:user, email: nil)
params = %{"email" => "<EMAIL>"}
changeset = Changesets.update_from_github_oauth_changeset(user, params)
assert changeset.changes[:email]
end
test "works without email params" do
user = insert(:user)
params = %{}
changeset = Changesets.update_from_github_oauth_changeset(user, params)
refute changeset.errors[:email]
end
end
end
<|start_filename|>priv/repo/migrations/20171127215847_change_organization_invite_fulfillment.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.ChangeOrganizationInviteFulfillment do
use Ecto.Migration
def up do
alter table(:organization_invites) do
add :organization_id, references(:organizations, on_delete: :nothing)
remove :fulfilled
end
create index(:organization_invites, [:organization_id], unique: true)
end
def down do
drop_if_exists index(:organization_invites, [:organization_id], unique: true)
alter table(:organization_invites) do
remove :organization_id
add :fulfilled, :boolean, default: false
end
end
end
<|start_filename|>priv/repo/migrations/20170913114958_remove_github_event_source_field.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.RemoveGithubEventSourceField do
use Ecto.Migration
def up do
alter table(:github_events) do
remove :source
end
end
def down do
alter table(:github_events) do
add :source, :string
end
end
end
<|start_filename|>priv/repo/migrations/20171017235433_add_missing_github_id_indexes.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddMissingGithubIdIndexes do
use Ecto.Migration
def change do
create index(:github_comments, [:github_id], unique: true)
create index(:github_issues, [:github_id], unique: true)
create index(:github_repos, [:github_id], unique: true)
end
end
<|start_filename|>priv/repo/migrations/20171106050209_add_pull_requests_to_task_list.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddPullRequestsToTaskList do
use Ecto.Migration
import Ecto.Query
alias CodeCorps.Repo
def up do
alter table(:task_lists) do
add :pull_requests, :boolean, default: false
end
flush()
# set all "In Progress" task lists to now contain pull requests
from(
tl in "task_lists",
where: [name: "In Progress"],
update: [set: [pull_requests: true]]
) |> Repo.update_all([])
# get projects paired with associated pull request task list as ids
task_parent_data = from(
p in "projects",
left_join:
tl in "task_lists",
on: tl.project_id == p.id,
where: tl.pull_requests == true,
select: {p.id, tl.id}
) |> Repo.all
# get all tasks for projects, associated to github pull requests and
# assign them to the pull request task list
task_parent_data |> Enum.each(fn {project_id, pr_list_id} ->
from(
t in "tasks",
where: [project_id: ^project_id],
where: t.status != "closed",
where: not is_nil(t.github_issue_id),
inner_join:
gi in "github_issues",
on: t.github_issue_id == gi.id,
where: not is_nil(gi.github_pull_request_id),
update: [set: [task_list_id: ^pr_list_id]]
) |> Repo.update_all([])
end)
end
def down do
alter table(:task_lists) do
remove :pull_requests
end
end
end
<|start_filename|>lib/code_corps/policy/stripe_connect_subscription.ex<|end_filename|>
defmodule CodeCorps.Policy.StripeConnectSubscription do
alias CodeCorps.{StripeConnectSubscription, User}
@spec create?(User.t, map) :: boolean
def create?(user, params), do: user |> owns?(params)
@spec show?(User.t, StripeConnectSubscription.t) :: boolean
def show?(user, subscription), do: user |> owns?(subscription)
defp owns?(%User{id: current_user_id}, %StripeConnectSubscription{user_id: user_id}) do
current_user_id == user_id
end
defp owns?(%User{id: current_user_id}, %{"user_id" => user_id}) do
current_user_id == user_id
end
defp owns?(_, _), do: false
end
<|start_filename|>lib/code_corps_web/views/category_view.ex<|end_filename|>
defmodule CodeCorpsWeb.CategoryView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:name, :slug, :description]
has_many :project_categories, serializer: CodeCorpsWeb.ProjectCategoryView, identifiers: :always
end
<|start_filename|>priv/repo/migrations/20171121075226_migrate_stripe_connect_accounts.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.MigrateStripeConnectAccounts do
use Ecto.Migration
def change do
rename table(:stripe_connect_accounts), :transfers_enabled, to: :payouts_enabled
end
end
<|start_filename|>lib/code_corps_web/views/conversation_view.ex<|end_filename|>
defmodule CodeCorpsWeb.ConversationView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:read_at, :status, :inserted_at, :updated_at]
has_one :user, type: "user", field: :user_id
has_one :message, type: "message", field: :message_id
has_many :conversation_parts, serializer: CodeCorpsWeb.ConversationPartView, identifiers: :always
end
<|start_filename|>test/lib/code_corps/policy/skill_test.exs<|end_filename|>
defmodule CodeCorps.Policy.SkillTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.Skill, only: [create?: 1]
describe "create?" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
assert create?(user)
end
test "returns false if user is not an admin" do
user = build(:user, admin: false)
refute create?(user)
end
end
end
<|start_filename|>lib/code_corps_web/views/message_view.ex<|end_filename|>
defmodule CodeCorpsWeb.MessageView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:body, :initiated_by, :inserted_at, :subject, :updated_at]
has_one :author, type: "user", field: :author_id
has_one :project, type: "project", field: :project_id
has_many :conversations, serializer: CodeCorpsWeb.ConversationView, identifiers: :always
end
<|start_filename|>test/lib/code_corps_web/views/user_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.UserViewTest do
use CodeCorpsWeb.ViewCase
alias CodeCorpsWeb.UserView
alias Phoenix.ConnTest
alias Plug.Conn
test "renders all attributes and relationships properly" do
user = insert(:user, first_name: "First", github_avatar_url: "foo", github_id: 123, github_username: "githubuser", last_name: "Last", default_color: "blue")
github_app_installation = insert(:github_app_installation, user: user)
slugged_route = insert(:slugged_route, user: user)
stripe_connect_subscription = insert(:stripe_connect_subscription, user: user)
stripe_platform_card = insert(:stripe_platform_card, user: user)
stripe_platform_customer = insert(:stripe_platform_customer, user: user)
user_category = insert(:user_category, user: user)
user_role = insert(:user_role, user: user)
user_skill = insert(:user_skill, user: user)
organization = insert(:organization, owner: user)
project_user = insert(:project_user, user: user)
host = Application.get_env(:code_corps, :asset_host)
intercom_user_hash = UserView.intercom_user_hash(user, %Plug.Conn{})
user = CodeCorpsWeb.UserController.preload(user)
rendered_json = render(UserView, "show.json-api", data: user)
expected_json = %{
"data" => %{
"id" => user.id |> Integer.to_string,
"type" => "user",
"attributes" => %{
"admin" => user.admin,
"biography" => user.biography,
"cloudinary-public-id" => nil,
"email" => "",
"first-name" => "First",
"github-avatar-url" => "foo",
"github-id" => 123,
"github-username" => "githubuser",
"inserted-at" => user.inserted_at,
"intercom-user-hash" => intercom_user_hash,
"last-name" => "Last",
"name" => "<NAME>",
"photo-large-url" => "#{host}/icons/user_default_large_blue.png",
"photo-thumb-url" => "#{host}/icons/user_default_thumb_blue.png",
"sign-up-context" => "default",
"state" => "signed_up",
"state-transition" => nil,
"twitter" => user.twitter,
"username" => user.username,
"updated-at" => user.updated_at,
"website" => user.website
},
"relationships" => %{
"categories" => %{
"data" => [
%{"id" => user_category.category_id |> Integer.to_string, "type" => "category"}
]
},
"github-app-installations" => %{
"data" => [
%{"id" => github_app_installation.id |> Integer.to_string, "type" => "github-app-installation"}
]
},
"organizations" => %{
"data" => [
%{"id" => organization.id |> Integer.to_string, "type" => "organization"}
]
},
"project-users" => %{
"data" => [
%{"id" => project_user.id |> Integer.to_string, "type" => "project-user"}
]
},
"slugged-route" => %{
"data" => %{"id" => slugged_route.id |> Integer.to_string, "type" => "slugged-route"}
},
"stripe-connect-subscriptions" => %{
"data" => [
%{"id" => stripe_connect_subscription.id |> Integer.to_string, "type" => "stripe-connect-subscription"}
]
},
"stripe-platform-card" => %{
"data" => %{"id" => stripe_platform_card.id |> Integer.to_string, "type" => "stripe-platform-card"}
},
"stripe-platform-customer" => %{
"data" => %{"id" => stripe_platform_customer.id |> Integer.to_string, "type" => "stripe-platform-customer"}
},
"user-categories" => %{
"data" => [
%{"id" => user_category.id |> Integer.to_string, "type" => "user-category"}
]
},
"user-roles" => %{
"data" => [
%{"id" => user_role.id |> Integer.to_string, "type" => "user-role"}
]
},
"user-skills" => %{
"data" => [
%{"id" => user_skill.id |> Integer.to_string, "type" => "user-skill"}
]
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
test "renders email when user is the authenticated user" do
user = insert(:user)
conn =
ConnTest.build_conn()
|> Conn.assign(:current_user, user)
user = CodeCorpsWeb.UserController.preload(user)
rendered_json = render(UserView, "show.json-api", data: user, conn: conn)
assert rendered_json["data"]["attributes"]["email"] == user.email
end
test "renders email for only the authenticated user when rendering list" do
users = insert_list(4, :user)
auth_user = users |> List.last
conn =
ConnTest.build_conn()
|> Conn.assign(:current_user, auth_user)
users = CodeCorpsWeb.UserController.preload(users)
rendered_json = render(UserView, "show.json-api", data: users, conn: conn)
emails =
rendered_json["data"]
|> Enum.map(&Map.get(&1, "attributes"))
|> Enum.map(&Map.get(&1, "email"))
|> Enum.filter(fn(email) -> email != "" end)
assert emails == [auth_user.email]
end
test "renders first and last name as name" do
user = build(:user, id: 1, first_name: "First", last_name: "Last")
assert render_user_json(user)["data"]["attributes"]["name"] == "First Last"
end
test "renders first name only as name" do
user = build(:user, id: 1, first_name: "", last_name: "Last")
assert render_user_json(user)["data"]["attributes"]["name"] == "Last"
end
test "renders last name only as name" do
user = build(:user, id: 1, first_name: "First", last_name: "")
assert render_user_json(user)["data"]["attributes"]["name"] == "First"
end
test "renders nil name if first or last name blank" do
user = build(:user, id: 1, first_name: "", last_name: "")
assert render_user_json(user)["data"]["attributes"]["name"] == nil
user = build(:user, id: 1, first_name: nil, last_name: nil)
assert render_user_json(user)["data"]["attributes"]["name"] == nil
end
defp render_user_json(user) do
user = CodeCorpsWeb.UserController.preload(user)
conn =
ConnTest.build_conn()
|> Conn.assign(:current_user, user)
render(UserView, "show.json-api", data: user, conn: conn)
end
end
<|start_filename|>lib/code_corps/github/adapters/app_installation.ex<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.AppInstallation do
@moduledoc """
Module used to convert GitHub payloads into attributes for a
`GithubAppInstallation`.
"""
alias CodeCorps.{
Adapter.MapTransformer,
GithubAppInstallation
}
@installation_event_mapping [
{:github_account_avatar_url, ["installation", "account", "avatar_url"]},
{:github_account_id, ["installation", "account", "id"]},
{:github_account_login, ["installation", "account", "login"]},
{:github_account_type, ["installation", "account", "type"]},
{:github_id, ["installation", "id"]},
{:sender_github_id, ["sender", "id"]}
]
@doc ~S"""
Converts an installation event payload into attributes to create or update a
`GithubAppInstallation`.
"""
@spec from_installation_event(map) :: map
def from_installation_event(%{} = payload) do
payload
|> CodeCorps.Adapter.MapTransformer.transform(@installation_event_mapping)
end
@github_app_installation_to_repo_mapping [
{:github_account_avatar_url, [:github_account_avatar_url]},
{:github_account_id, [:github_account_id]},
{:github_account_login, [:github_account_login]},
{:github_account_type, [:github_account_type]}
]
@doc ~S"""
Converts a `GithubAppInstallation` record attributes into a map of attributes
that can be used for a `GithubRepo` record.
"""
@spec to_github_repo_attrs(GithubAppInstallation.t) :: map
def to_github_repo_attrs(%GithubAppInstallation{} = installation) do
installation
|> Map.from_struct
|> MapTransformer.transform(@github_app_installation_to_repo_mapping)
end
end
<|start_filename|>test/lib/code_corps/model/preview_test.exs<|end_filename|>
defmodule CodeCorps.PreviewTest do
use CodeCorps.ModelCase
alias CodeCorps.Preview
describe "create_changeset/2" do
test "renders body html from markdown" do
user = insert(:user)
changeset = Preview.create_changeset(%Preview{}, %{
markdown: "A **strong** element",
user_id: user.id
})
assert changeset.valid?
assert changeset |> get_change(:body) == "<p>A <strong>strong</strong> element</p>\n"
end
test "requires markdown change" do
changeset = Preview.create_changeset(%Preview{}, %{})
refute changeset.valid?
changeset |> assert_validation_triggered(:markdown, :required)
end
end
end
<|start_filename|>test/lib/code_corps_web/views/conversation_part_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ConversationPartViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
conversation_part = insert(:conversation_part)
rendered_json =
render(CodeCorpsWeb.ConversationPartView, "show.json-api", data: conversation_part)
expected_json = %{
"data" => %{
"id" => conversation_part.id |> Integer.to_string,
"type" => "conversation-part",
"attributes" => %{
"body" => conversation_part.body,
"inserted-at" => conversation_part.inserted_at,
"read-at" => conversation_part.read_at,
"updated-at" => conversation_part.updated_at
},
"relationships" => %{
"author" => %{
"data" => %{
"id" => conversation_part.author_id |> Integer.to_string,
"type" => "user"
}
},
"conversation" => %{
"data" => %{
"id" => conversation_part.conversation_id |> Integer.to_string,
"type" => "conversation"
}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps/github/api/comment.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.Comment do
@moduledoc ~S"""
Functions for working with comments on GitHub.
"""
alias CodeCorps.{
Comment,
GitHub,
GithubAppInstallation,
GithubComment,
GithubIssue,
GithubRepo,
Task,
User
}
@doc """
Create a comment on GitHub's API for a `CodeCorps.Comment`.
"""
@spec create(Comment.t) :: GitHub.response
def create(
%Comment{
task: %Task{
github_repo: %GithubRepo{
github_app_installation: %GithubAppInstallation{} = installation
}
},
user: %User{} = user
} = comment) do
endpoint = comment |> create_endpoint_for()
attrs = comment |> GitHub.Adapters.Comment.to_api
with opts when is_list(opts) <- GitHub.API.opts_for(user, installation) do
GitHub.request(:post, endpoint, attrs, %{}, opts)
else
{:error, github_error} -> {:error, github_error}
end
end
@doc """
Update a comment on GitHub's API for a `CodeCorps.Comment`.
"""
@spec update(Comment.t) :: GitHub.response
def update(
%Comment{
task: %Task{
github_repo: %GithubRepo{
github_app_installation: %GithubAppInstallation{} = installation
}
},
user: %User{} = user
} = comment) do
endpoint = comment |> update_endpoint_for()
attrs = comment |> GitHub.Adapters.Comment.to_api
with opts when is_list(opts) <- GitHub.API.opts_for(user, installation) do
GitHub.request(:patch, endpoint, attrs, %{}, opts)
else
{:error, github_error} -> {:error, github_error}
end
end
@spec update_endpoint_for(Comment.t) :: String.t
defp update_endpoint_for(
%Comment{
github_comment: %GithubComment{github_id: id},
task: %Task{
github_repo: %GithubRepo{
github_account_login: owner, name: repo
}
}
}) do
"/repos/#{owner}/#{repo}/issues/comments/#{id}"
end
@spec create_endpoint_for(Comment.t) :: String.t
defp create_endpoint_for(
%Comment{
task: %Task{
github_issue: %GithubIssue{
number: number
},
github_repo: %GithubRepo{
github_account_login: owner, name: repo
},
}
}) do
"/repos/#{owner}/#{repo}/issues/#{number}/comments"
end
end
<|start_filename|>lib/code_corps_web/views/organization_github_app_installation_view.ex<|end_filename|>
defmodule CodeCorpsWeb.OrganizationGithubAppInstallationView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:inserted_at, :updated_at]
has_one :github_app_installation, type: "github-app-installation", field: :github_app_installation_id
has_one :organization, type: "organization", field: :organization_id
end
<|start_filename|>lib/code_corps_web/views/comment_view.ex<|end_filename|>
defmodule CodeCorpsWeb.CommentView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [
:body, :created_at, :created_from, :inserted_at, :markdown, :modified_at,
:modified_from, :updated_at
]
has_one :task, type: "task", field: :task_id
has_one :user, type: "user", field: :user_id
end
<|start_filename|>test/lib/code_corps_web/views/github_issue_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.GithubIssueViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
github_repo = insert(:github_repo)
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request, github_repo: github_repo)
rendered_json = render(CodeCorpsWeb.GithubIssueView, "show.json-api", data: github_issue)
expected_json = %{
"data" => %{
"attributes" => %{
"body" => github_issue.body,
"closed-at" => github_issue.closed_at,
"comments-url" => github_issue.comments_url,
"events-url" => github_issue.events_url,
"github-created-at" => github_issue.github_created_at,
"github-id" => github_issue.github_id,
"github-updated-at" => github_issue.github_updated_at,
"html-url" => github_issue.html_url,
"labels-url" => github_issue.labels_url,
"locked" => github_issue.locked,
"number" => github_issue.number,
"state" => github_issue.state,
"title" => github_issue.title,
"url" => github_issue.url
},
"id" => github_issue.id |> Integer.to_string,
"relationships" => %{
"github-pull-request" => %{
"data" => %{
"id" => github_issue.github_pull_request_id |> Integer.to_string,
"type" => "github-pull-request"
}
},
"github-repo" => %{
"data" => %{
"id" => github_issue.github_repo_id |> Integer.to_string,
"type" => "github-repo"
}
}
},
"type" => "github-issue",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps/github/webhook/handler.ex<|end_filename|>
defmodule CodeCorps.GitHub.Webhook.Handler do
@moduledoc """
Receives and handles GitHub event payloads.
"""
alias CodeCorps.{
GithubEvent,
GitHub.Event,
GitHub.Event.Installation,
GitHub.Event.InstallationRepositories,
GitHub.Event.IssueComment,
GitHub.Event.Issues,
GitHub.Event.PullRequest,
Repo
}
@doc """
Handles a fully supported GitHub event based on its type and action.
The handling process consistes of 3 steps
- create event record marked as "unprocessed"
- mark event record as processing and handle it
- mark event record as processed or errored depending on handling outcome
"""
@spec handle_supported(String.t, String.t, map) :: {:ok, GithubEvent.t}
def handle_supported(type, id, %{} = payload) do
with {:ok, %GithubEvent{} = event} <- find_or_create_event(type, id, payload, "unprocessed") do
payload |> apply_handler(type) |> Event.stop_processing(event)
end
end
@doc ~S"""
Handles an unsupported supported GitHub event.
"unsupported" means that, while we generally support this event type,
we do not yet support this specific event action.
The process consistes of simply storing the event and marking it as
"unsupported".
"""
@spec handle_unsupported(String.t, String.t, map) :: {:ok, GithubEvent.t}
def handle_unsupported(type, id, %{} = payload) do
find_or_create_event(type, id, payload, "unsupported")
end
@spec build_params(String.t, String.t, String.t, map) :: map
defp build_params(type, id, status, %{"action" => action} = payload) do
%{
action: action,
github_delivery_id: id,
payload: payload,
status: status,
type: type
}
end
@spec find_or_create_event(String.t, String.t, map, String.t) :: {:ok, GithubEvent.t}
defp find_or_create_event(type, id, payload, status) do
case GithubEvent |> Repo.get_by(github_delivery_id: id) do
nil -> type |> build_params(id, status, payload) |> create_event()
%GithubEvent{} = github_event -> {:ok, github_event}
end
end
@spec create_event(map) :: {:ok, GithubEvent.t}
defp create_event(%{} = params) do
%GithubEvent{} |> GithubEvent.changeset(params) |> Repo.insert()
end
@spec apply_handler(map, String.t) :: tuple
defp apply_handler(payload, "installation"), do: Installation.handle(payload)
defp apply_handler(payload, "installation_repositories"), do: InstallationRepositories.handle(payload)
defp apply_handler(payload, "issue_comment"), do: IssueComment.handle(payload)
defp apply_handler(payload, "issues"), do: Issues.handle(payload)
defp apply_handler(payload, "pull_request"), do: PullRequest.handle(payload)
end
<|start_filename|>lib/code_corps/cloudex/cloudex_test.ex<|end_filename|>
defmodule CloudexTest do
@moduledoc """
Testing stub for `Cloudex`,
Each function should have the same signature as `Cloudex`.
"""
defmodule Url do
def for(_public_id, %{height: height, width: width}) do
"https://placehold.it/#{width}x#{height}"
end
def for(_public_id, _options) do
"https://placehold.it/500x500"
end
end
@spec upload(String.t) :: {:ok, Cloudex.UploadedImage.t}
def upload(_url) do
{:ok, %Cloudex.UploadedImage{public_id: fake_cloudinary_id()}}
end
defp fake_cloudinary_id do
:crypto.strong_rand_bytes(5) |> Base.encode64()
end
end
<|start_filename|>lib/code_corps_web/views/password_reset_view.ex<|end_filename|>
defmodule CodeCorpsWeb.PasswordResetView do
@moduledoc false
use CodeCorpsWeb, :view
def render("show.json", %{email: email, token: token, user_id: user_id}) do
%{
email: email,
token: token,
user_id: user_id
}
end
end
<|start_filename|>test/lib/code_corps_web/views/user_category_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.UserCategoryViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
user_category = insert(:user_category)
rendered_json = render(CodeCorpsWeb.UserCategoryView, "show.json-api", data: user_category)
expected_json = %{
"data" => %{
"id" => user_category.id |> Integer.to_string,
"type" => "user-category",
"attributes" => %{},
"relationships" => %{
"category" => %{
"data" => %{"id" => user_category.category_id |> Integer.to_string, "type" => "category"}
},
"user" => %{
"data" => %{"id" => user_category.user_id |> Integer.to_string, "type" => "user"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps_web/controllers/token_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.TokenController do
@moduledoc false
use CodeCorpsWeb, :controller
import Comeonin.Bcrypt, only: [checkpw: 2, dummy_checkpw: 0]
alias CodeCorps.Repo
alias CodeCorps.User
def create(conn, params = %{"username" => _, "password" => _}) do
case login_by_email_and_pass(params) do
{:ok, user} ->
{:ok, token, _claims} = user |> CodeCorps.Guardian.encode_and_sign()
conn
|> Plug.Conn.assign(:current_user, user)
|> put_status(:created)
|> render("show.json", token: token, user_id: user.id)
{:error, reason} -> handle_unauthenticated(conn, reason)
end
end
def create(conn, %{"username" => ""}) do
handle_unauthenticated(conn, "Please enter your email and password.")
end
def create(conn, %{"username" => _email}) do
handle_unauthenticated(conn, "Please enter your password.")
end
def refresh(conn, %{"token" => current_token}) do
with {:ok, _claims} <- CodeCorps.Guardian.decode_and_verify(current_token),
{:ok, _, {new_token, new_claims}} <- CodeCorps.Guardian.refresh(current_token),
{:ok, user} <- CodeCorps.Guardian.resource_from_claims(new_claims) do
conn
|> Plug.Conn.assign(:current_user, user)
|> put_status(:created)
|> render("show.json", token: new_token, user_id: user.id)
else
{:error, reason} -> handle_unauthenticated(conn, reason)
end
end
defp handle_unauthenticated(conn, reason) do
conn
|> put_status(:unauthorized)
|> render("401.json", message: reason)
end
defp login_by_email_and_pass(%{"username" => email, "password" => password}) do
user = Repo.get_by(User, email: email)
cond do
user && checkpw(password, user.encrypted_password) ->
{:ok, user}
user ->
{:error, "Your password doesn't match the email #{email}."}
true ->
dummy_checkpw()
{:error, "We couldn't find a user with the email #{email}."}
end
end
end
<|start_filename|>lib/code_corps/model/github_pull_request.ex<|end_filename|>
defmodule CodeCorps.GithubPullRequest do
use Ecto.Schema
import Ecto.Changeset
@type t :: %__MODULE__{}
schema "github_pull_requests" do
field :additions, :integer
field :body, :string
field :changed_files, :integer
field :closed_at, :utc_datetime
field :comments, :integer
field :comments_url, :string
field :commits, :integer
field :commits_url, :string
field :deletions, :integer
field :diff_url, :string
field :github_created_at, :utc_datetime
field :github_id, :integer
field :github_updated_at, :utc_datetime
field :html_url, :string
field :issue_url, :string
field :locked, :boolean, default: false
field :merge_commit_sha, :string
field :mergeable_state, :string
field :merged, :boolean, default: false
field :merged_at, :utc_datetime
field :number, :integer
field :patch_url, :string
field :review_comment_url, :string
field :review_comments, :integer
field :review_comments_url, :string
field :state, :string
field :statuses_url, :string
field :title, :string
field :url, :string
belongs_to :github_repo, CodeCorps.GithubRepo
belongs_to :github_user, CodeCorps.GithubUser
timestamps()
end
@attrs [
:additions, :body, :changed_files, :closed_at, :comments, :comments_url,
:commits, :commits_url, :deletions, :diff_url, :github_created_at,
:github_id, :github_updated_at, :html_url, :issue_url, :locked,
:merge_commit_sha, :mergeable_state, :merged, :merged_at, :number,
:patch_url, :review_comment_url, :review_comments, :review_comments_url,
:state, :statuses_url, :title, :url
]
@required_attrs [
:github_created_at, :github_id, :github_updated_at, :html_url, :locked,
:number, :state, :title
]
@doc false
def changeset(struct, params) do
struct
|> cast(params, @attrs)
|> add_merged()
|> validate_required(@required_attrs)
|> unique_constraint(:github_id)
end
def create_changeset(struct, params) do
struct
|> changeset(params)
|> cast(params, [:github_repo_id, :github_user_id])
|> assoc_constraint(:github_repo)
|> assoc_constraint(:github_user)
end
def update_changeset(struct, params) do
struct
|> changeset(params)
|> cast(params, [:github_repo_id, :github_user_id])
|> assoc_constraint(:github_repo)
|> assoc_constraint(:github_user)
end
defp add_merged(%Ecto.Changeset{changes: %{merged: merged}} = changeset) when is_boolean(merged) do
changeset
end
defp add_merged(%Ecto.Changeset{changes: %{merged_at: _}} = changeset) do
changeset |> put_change(:merged, true)
end
defp add_merged(%Ecto.Changeset{} = changeset) do
changeset |> put_change(:merged, false)
end
end
<|start_filename|>lib/code_corps/stripe_service/stripe_connect_plan.ex<|end_filename|>
defmodule CodeCorps.StripeService.StripeConnectPlanService do
@moduledoc """
Used to perform actions on `StripeConnectPlan` records
while at the same time propagating to and from associated `Stripe.Plan`
records.
"""
alias CodeCorps.{Project, Repo, StripeConnectPlan}
alias CodeCorps.StripeService.Adapters.StripeConnectPlanAdapter
alias CodeCorps.StripeService.Validators.ProjectCanEnableDonations
@api Application.get_env(:code_corps, :stripe)
@doc """
Creates a new `Stripe.Plan` record on Stripe API, as well as an associated local
`StripeConnectPlan` record
"""
@spec create(map) :: {:ok, StripeConnectPlan.t} |
{:error, Ecto.Changeset.t} |
{:error, Stripe.Error.t} |
{:error, :project_not_ready} |
{:error, :not_found}
def create(%{"project_id" => project_id} = attributes) do
with {:ok, %Project{} = project} <- get_project(project_id),
{:ok, %Project{}} <- ProjectCanEnableDonations.validate(project),
%{} = create_attributes <- get_create_attributes(project_id),
connect_account_id <- project.organization.stripe_connect_account.id_from_stripe,
{:ok, %Stripe.Plan{} = plan} <- @api.Plan.create(create_attributes, connect_account: connect_account_id),
{:ok, params} <- StripeConnectPlanAdapter.to_params(plan, attributes)
do
%StripeConnectPlan{}
|> StripeConnectPlan.create_changeset(params)
|> Repo.insert()
else
failure -> failure
end
end
defp get_create_attributes(project_id) do
%{
amount: 1, # in cents
currency: "usd",
id: "month_project_" <> to_string(project_id),
interval: "month",
name: "Monthly donation",
statement_descriptor: "CODECORPS.ORG Donation" # No more than 22 chars
}
end
defp get_project(project_id) do
preloads = [:donation_goals, {:organization, :stripe_connect_account}, :stripe_connect_plan]
case Project |> Repo.get(project_id) |> Repo.preload(preloads) do
nil -> {:error, :not_found}
record -> {:ok, record}
end
end
end
<|start_filename|>lib/code_corps_web/controllers/user_category_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.UserCategoryController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{UserCategory, User, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with user_categories <- UserCategory |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: user_categories)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %UserCategory{} = user_category <- UserCategory |> Repo.get(id) do
conn |> render("show.json-api", data: user_category)
end
end
@spec create(Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %UserCategory{}, params),
{:ok, %UserCategory{} = user_category} <- %UserCategory{} |> UserCategory.create_changeset(params) |> Repo.insert
do
conn |> put_status(:created) |> render("show.json-api", data: user_category)
end
end
@spec delete(Conn.t, map) :: Conn.t
def delete(%Conn{} = conn, %{"id" => id} = _params) do
with %UserCategory{} = user_category <- UserCategory |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:delete, user_category),
{:ok, %UserCategory{} = _user_category} <- user_category |> Repo.delete
do
conn |> Conn.assign(:user_category, user_category) |> send_resp(:no_content, "")
end
end
end
<|start_filename|>lib/code_corps/model/github_comment.ex<|end_filename|>
defmodule CodeCorps.GithubComment do
use Ecto.Schema
alias Ecto.Changeset
@type t :: %__MODULE__{}
schema "github_comments" do
field :body, :string
field :github_created_at, :utc_datetime
field :github_id, :integer
field :github_updated_at, :utc_datetime
field :html_url, :string
field :url, :string
belongs_to :github_issue, CodeCorps.GithubIssue
belongs_to :github_repo, CodeCorps.GithubRepo
belongs_to :github_user, CodeCorps.GithubUser
timestamps()
end
@doc false
defp changeset(struct, params) do
struct
|> Changeset.cast(params, [:body, :github_created_at, :github_id, :github_updated_at, :html_url, :url])
|> Changeset.validate_required([:body, :github_created_at, :github_id, :github_updated_at, :html_url, :url])
|> Changeset.unique_constraint(:github_id)
end
@doc ~S"""
Default changeset used to create a `CodeCorps.GithubComment` record.
"""
def create_changeset(struct, params) do
struct
|> changeset(params)
|> Changeset.cast(params, [:github_issue_id, :github_repo_id, :github_user_id])
|> Changeset.assoc_constraint(:github_issue)
|> Changeset.assoc_constraint(:github_repo)
|> Changeset.assoc_constraint(:github_user)
end
@doc ~S"""
Default changeset used to update a `CodeCorps.GithubComment` record.
"""
def update_changeset(struct, params) do
struct
|> changeset(params)
|> Changeset.cast(params, [:github_issue_id, :github_repo_id, :github_user_id])
|> Changeset.assoc_constraint(:github_issue)
|> Changeset.assoc_constraint(:github_repo)
|> Changeset.assoc_constraint(:github_user)
end
end
<|start_filename|>test/lib/code_corps/github/sync/task/task_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.TaskTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.{GitHub.Sync, Repo, Task}
describe "sync all/3" do
defp setup_test_data do
# Creates a user, 3 projects and a github issue all linked to a
# github repo. Returns that data as a map
user = insert(:user)
project = insert(:project)
github_repo = insert(:github_repo, project: project)
github_issue = insert(
:github_issue,
github_repo: github_repo,
github_updated_at: DateTime.utc_now |> Timex.shift(hours: 1)
)
insert(:task_list, project: project, inbox: true)
%{github_issue: github_issue, github_repo: github_repo, project: project, user: user}
end
test "creates missing, updates existing tasks for each project associated with the github repo" do
%{
github_issue: github_issue,
github_repo: github_repo,
project: project,
user: user
} = setup_test_data()
existing_task =
insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user)
{:ok, task} = github_issue |> Sync.Task.sync_github_issue(user)
assert Repo.aggregate(Task, :count, :id) == 1
assert task.user_id == user.id
assert task.markdown == github_issue.body
assert task.github_issue_id == github_issue.id
assert existing_task.id == task.id
end
test "sets task :modified_from to 'github'" do
%{github_issue: github_issue, user: user} = setup_test_data()
{:ok, task} = github_issue |> Sync.Task.sync_github_issue(user)
assert task.modified_from == "github"
end
test "fails on validation errors" do
project = insert(:project)
github_repo = insert(:github_repo, project: project)
github_issue = insert(:github_issue, title: nil, github_repo: github_repo)
%{user: user} = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo)
insert(:task_list, project: project, inbox: true)
{:error, changeset} = github_issue |> Sync.Task.sync_github_issue(user)
refute changeset.valid?
end
end
end
<|start_filename|>lib/code_corps/stripe_testing/subscription.ex<|end_filename|>
defmodule CodeCorps.StripeTesting.Subscription do
def create(map, _opts \\ []) do
{:ok, do_create(map)}
end
def retrieve(map, _opts \\ []) do
{:ok, do_retrieve(map)}
end
defp do_create(%{items: [%{quantity: quantity}]}) do
{:ok, plan} = CodeCorps.StripeTesting.Plan.create(%{}, [])
%Stripe.Subscription{
application_fee_percent: 5.0,
cancel_at_period_end: false,
canceled_at: nil,
created: 1_479_472_835,
current_period_end: 1_479_472_835,
current_period_start: 1_479_472_835,
customer: "cus_123",
ended_at: nil,
id: "sub_123",
items: %{
object: "list",
data: [
%{
id: "sub_123",
object: "subscription_item",
created: 1_479_472_835,
metadata: %{},
plan: plan,
quantity: quantity
}
],
has_more: false,
total_count: 1,
url: "/v1/subscription_items?subscription=sub_123"
},
livemode: false,
metadata: %{},
plan: plan,
quantity: quantity,
start: 1_479_472_835,
status: "active",
tax_percent: nil,
trial_end: nil,
trial_start: nil
}
end
defp do_retrieve(_) do
{:ok, plan} = CodeCorps.StripeTesting.Plan.create(%{}, [])
%Stripe.Subscription{
application_fee_percent: 5.0,
cancel_at_period_end: false,
canceled_at: nil,
created: 1_479_472_835,
current_period_end: 1_479_472_835,
current_period_start: 1_479_472_835,
customer: "cus_123",
ended_at: nil,
id: "sub_123",
items: %{
object: "list",
data: [
%{
id: "sub_123",
object: "subscription_item",
created: 1_479_472_835,
metadata: %{},
plan: plan,
quantity: 1000
}
],
has_more: false,
total_count: 1,
url: "/v1/subscription_items?subscription=sub_123"
},
livemode: false,
metadata: %{},
plan: plan,
quantity: 1000,
start: 1_479_472_835,
status: "canceled",
tax_percent: nil,
trial_end: nil,
trial_start: nil
}
end
end
<|start_filename|>lib/code_corps/model/task.ex<|end_filename|>
defmodule CodeCorps.Task do
use CodeCorps.Model
import EctoOrdered
alias CodeCorps.{Services.MarkdownRendererService, Task}
alias Ecto.Changeset
@type t :: %__MODULE__{}
schema "tasks" do
field :archived, :boolean, default: false
field :body, :string
field :closed_at, :utc_datetime
field :created_at, :utc_datetime
field :created_from, :string, default: "code_corps"
field :markdown, :string
field :modified_at, :utc_datetime
field :modified_from, :string, default: "code_corps"
field :number, :integer, read_after_writes: true
field :order, :integer
field :status, :string, default: "open"
field :title, :string
field :position, :integer, virtual: true
belongs_to :github_issue, CodeCorps.GithubIssue
belongs_to :github_repo, CodeCorps.GithubRepo
belongs_to :project, CodeCorps.Project
belongs_to :task_list, CodeCorps.TaskList
belongs_to :user, CodeCorps.User
has_one :github_pull_request, through: [:github_issue, :github_pull_request]
has_one :user_task, CodeCorps.UserTask
has_many :comments, CodeCorps.Comment
has_many :task_skills, CodeCorps.TaskSkill
timestamps()
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:archived, :title, :markdown, :task_list_id, :position])
|> validate_required([:title])
|> assoc_constraint(:task_list)
|> handle_archived()
|> MarkdownRendererService.render_markdown_to_html(:markdown, :body)
end
def handle_archived(changeset) do
case get_field(changeset, :archived) do
true ->
changeset
|> put_change(:task_list_id, nil)
|> put_change(:order, nil)
_ ->
order_task(changeset)
end
end
def order_task(changeset) do
changeset
|> validate_required([:task_list_id])
|> apply_position()
|> set_order(:position, :order, :task_list_id)
end
@spec create_changeset(struct, map) :: Ecto.Changeset.t
def create_changeset(struct, %{} = params) do
struct
|> changeset(params)
|> cast(params, [:github_repo_id, :project_id, :user_id])
|> set_created_and_modified_at()
|> validate_required([:project_id, :user_id])
|> assoc_constraint(:github_repo)
|> assoc_constraint(:project)
|> assoc_constraint(:user)
|> put_change(:status, "open")
|> put_change(:modified_from, "code_corps")
end
@spec update_changeset(struct, map) :: Ecto.Changeset.t
def update_changeset(struct, %{} = params) do
struct
|> changeset(params)
|> cast(params, [:status])
|> validate_inclusion(:status, statuses())
|> set_closed_at()
|> update_modified_at()
|> maybe_assoc_with_repo(params)
|> put_change(:modified_from, "code_corps")
end
def apply_position(changeset) do
case get_field(changeset, :position) do
nil ->
put_change(changeset, :position, 0)
_ -> changeset
end
end
defp statuses do
~w{ open closed }
end
defp set_closed_at(changeset) do
case changeset do
%Changeset{valid?: true, changes: %{status: "closed"}} ->
put_change(changeset, :closed_at, DateTime.utc_now)
%Changeset{valid?: true, changes: %{status: "open"}} ->
put_change(changeset, :closed_at, nil)
_ ->
changeset
end
end
defp set_created_and_modified_at(changeset) do
now = DateTime.utc_now
changeset
|> put_change(:created_at, now)
|> put_change(:modified_at, now)
end
defp update_modified_at(changeset) do
put_change(changeset, :modified_at, DateTime.utc_now)
end
@spec maybe_assoc_with_repo(Changeset.t, map) :: Changeset.t
defp maybe_assoc_with_repo(
%Changeset{data: %Task{github_repo_id: nil}} = changeset,
%{} = params) do
changeset
|> cast(params, [:github_repo_id])
|> assoc_constraint(:github_repo)
end
defp maybe_assoc_with_repo(%Changeset{} = changeset, %{}), do: changeset
end
<|start_filename|>lib/code_corps/services/forgot_password.ex<|end_filename|>
defmodule CodeCorps.Services.ForgotPasswordService do
# credo:disable-for-this-file Credo.Check.Refactor.PipeChainStart
alias CodeCorps.{AuthToken, Emails, Mailer, Repo, User}
@doc"""
Generates an AuthToken model and sends to the provided email.
"""
def forgot_password(email) do
with %User{} = user <- Repo.get_by(User, email: email),
{ :ok, %AuthToken{} = %{ value: token } } <- AuthToken.changeset(%AuthToken{}, user) |> Repo.insert
do
Emails.ForgotPasswordEmail.create(user, token) |> Mailer.deliver_now()
else
nil -> nil
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/validators/project_can_enable_donations_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Validators.ProjectCanEnableDonationsTest do
use ExUnit.Case, async: true
use CodeCorps.ModelCase
alias CodeCorps.{Project}
alias CodeCorps.StripeService.Validators.ProjectCanEnableDonations
describe "validate" do
test "succeeds when project has donation_goals and organization where charges and payouts are enabled" do
organization = insert(:organization)
project = insert(:project, organization: organization)
insert(:donation_goal, project: project)
insert(:stripe_connect_account, organization: organization, charges_enabled: true, payouts_enabled: true)
project =
Project
|> Repo.get(project.id)
|> Repo.preload([:donation_goals, [organization: :stripe_connect_account], :stripe_connect_plan])
assert {:ok, _project} = ProjectCanEnableDonations.validate(project)
end
test "fails when project has a StripeConnectPlan" do
project = insert(:project)
insert(:stripe_connect_plan, project: project)
project =
Project
|> Repo.get(project.id)
|> Repo.preload([:stripe_connect_plan])
assert {:error, :project_has_plan} = ProjectCanEnableDonations.validate(project)
end
test "fails when project is not ready" do
project = insert(:project)
assert {:error, :project_not_ready} = ProjectCanEnableDonations.validate(project)
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/webhook_processing/ignored_event_handler_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.WebhookProcessing.IgnoredEventHandlerTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeService.WebhookProcessing.{
ConnectEventHandler, IgnoredEventHandler, PlatformEventHandler
}
@spec ignored?(String.t, module) :: boolean
defp ignored?(type, handler) do
event = insert(:stripe_event, type: type)
{:ok, event} = IgnoredEventHandler.handle(event, handler)
event.ignored_reason && event.status == "ignored"
end
describe "handle/2" do
test "ignores events from the ignored platform events list" do
IgnoredEventHandler.ignored_event_types(PlatformEventHandler)
|> Enum.each(fn(type) -> assert ignored?(type, PlatformEventHandler) end)
assert_raise(FunctionClauseError, fn -> ignored?("some.other.type", PlatformEventHandler) end)
end
test "ignores events from the ignored connect events list" do
IgnoredEventHandler.ignored_event_types(ConnectEventHandler)
|> Enum.each(fn(type) -> assert ignored?(type, ConnectEventHandler) end)
assert_raise(FunctionClauseError, fn -> ignored?("some.other.type", ConnectEventHandler) end)
end
end
describe "should_handle?/2" do
test "returns true for types from the ignored platform events list" do
IgnoredEventHandler.ignored_event_types(PlatformEventHandler)
|> Enum.each(fn(type) -> assert IgnoredEventHandler.should_handle?(type, PlatformEventHandler) end)
refute IgnoredEventHandler.should_handle?("some.other.type", PlatformEventHandler)
end
test "returns true for types from the ignored connect events list" do
IgnoredEventHandler.ignored_event_types(ConnectEventHandler)
|> Enum.each(fn(type) -> assert IgnoredEventHandler.should_handle?(type, ConnectEventHandler) end)
refute IgnoredEventHandler.should_handle?("some.other.type", ConnectEventHandler)
end
end
end
<|start_filename|>test/lib/code_corps/policy/helpers_test.exs<|end_filename|>
defmodule CodeCorps.Policy.HelpersTest do
use CodeCorps.ModelCase
alias Ecto.Changeset
alias CodeCorps.{
Organization, User, Policy.Helpers,
ProjectUser
}
def create_project_user_with_role(role) do
user = insert(:user)
project = insert(:project)
insert(:project_user, project: project, user: user, role: role)
{project, user}
end
describe "owned_by/2" do
test "returns false when organization is not owned by user" do
refute Helpers.owned_by?(%Organization{owner_id: 1}, %User{id: 2})
end
test "returns false when invalid arguments are passed" do
refute Helpers.owned_by?(nil, 2)
end
test "returns false if a project is not owned by the user" do
project = insert(:project)
some_other_user = %User{id: 1}
refute Helpers.owned_by?(project, some_other_user)
end
test "returns true if a project is owned by the user" do
{project, user} = create_project_user_with_role("owner")
assert Helpers.owned_by?(project, user)
end
test "returns false if a project is admined by the user" do
{project, user} = create_project_user_with_role("admin")
refute Helpers.owned_by?(project, user)
end
test "returns false if a project is contributed by the user" do
{project, user} = create_project_user_with_role("contributor")
refute Helpers.owned_by?(project, user)
end
test "returns false if a project user role is pending" do
{project, user} = create_project_user_with_role("pending")
refute Helpers.owned_by?(project, user)
end
test "returns true when organization is owned by user" do
assert Helpers.owned_by?(%Organization{owner_id: 1}, %User{id: 1})
end
end
describe "administered_by?/2" do
test "returns false if given invalid arguments" do
refute Helpers.administered_by?(nil, 2)
end
test "returns true if the user is an admin" do
{project, user} = create_project_user_with_role("admin")
assert Helpers.administered_by?(project, user)
end
test "returns true if the user is an owner" do
{project, user} = create_project_user_with_role("admin")
assert Helpers.administered_by?(project, user)
end
test "returns false if the user is a contributor" do
{project, user} = create_project_user_with_role("contributor")
refute Helpers.administered_by?(project, user)
end
test "returns false if the user is pending" do
{project, user} = create_project_user_with_role("pending")
refute Helpers.administered_by?(project, user)
end
end
describe "contributed_by?/2" do
test "returns false if given invalid arguments" do
refute Helpers.contributed_by?(nil, 2)
end
test "returns true if the user is an admin" do
{project, user} = create_project_user_with_role("admin")
assert Helpers.contributed_by?(project, user)
end
test "returns true if the user is an owner" do
{project, user} = create_project_user_with_role("admin")
assert Helpers.contributed_by?(project, user)
end
test "returns true if the user is a contributor" do
{project, user} = create_project_user_with_role("contributor")
assert Helpers.contributed_by?(project, user)
end
test "returns false if the user is pending" do
{project, user} = create_project_user_with_role("pending")
refute Helpers.contributed_by?(project, user)
end
end
describe "get_conversation/1" do
test "should return conversation of a map" do
conversation = insert(:conversation)
result = Helpers.get_conversation(%{"conversation_id" => conversation.id})
assert result.id == conversation.id
end
test "should return conversation of a ConversationPart" do
conversation = insert(:conversation)
conversation_part = insert(:conversation_part, conversation: conversation)
result = Helpers.get_conversation(conversation_part)
assert result.id == conversation.id
end
test "should return conversation of a Changeset" do
conversation = insert(:conversation)
changeset = %Changeset{changes: %{conversation_id: conversation.id}}
result = Helpers.get_conversation(changeset)
assert result.id == conversation.id
end
end
describe "get_organization/1" do
test "return organization if the organization_id is defined on the struct" do
organization = insert(:organization)
project = insert(:project, organization: organization)
result = Helpers.get_organization(project)
assert result.id == organization.id
assert result.name == organization.name
end
test "return organization if the organization_id is defined on the changeset" do
organization = insert(:organization)
changeset = %Changeset{changes: %{organization_id: organization.id}}
result = Helpers.get_organization(changeset)
assert result.id == organization.id
assert result.name == organization.name
end
test "return nil for structs with no organization_id" do
assert Helpers.get_organization(%{foo: "bar"}) == nil
end
test "return nil for any" do
assert Helpers.get_organization("foo") == nil
end
end
describe "get_message/1" do
test "should return message of a map" do
message = insert(:message)
result = Helpers.get_message(%{"message_id" => message.id})
assert result.id == message.id
end
test "should return message of a Conversation" do
message = insert(:message)
conversation = insert(:conversation, message: message)
result = Helpers.get_message(conversation)
assert result.id == message.id
end
test "should return message of a Changeset" do
message = insert(:message)
changeset = %Changeset{changes: %{message_id: message.id}}
result = Helpers.get_message(changeset)
assert result.id == message.id
end
end
describe "get_project/1" do
test "return project if the project_id is defined on the struct" do
project = insert(:project)
project_category = insert(:project_category, project: project)
result = Helpers.get_project(project_category)
assert result.id == project.id
assert result.title == project.title
end
test "return project if the project_id is defined on the changeset" do
project = insert(:project)
changeset = %Changeset{changes: %{project_id: project.id}}
result = Helpers.get_project(changeset)
assert result.id == project.id
assert result.title == project.title
end
test "return nil for structs with no project_id" do
assert Helpers.get_project(%{foo: "bar"}) == nil
end
test "return nil for any" do
assert Helpers.get_project("foo") == nil
end
end
describe "get_role/1" do
test "should return a project user's role if it's defined" do
assert Helpers.get_role(%ProjectUser{role: "admin"}) == "admin"
end
test "should return a changeset's role if it's defined" do
assert Helpers.get_role(%Changeset{data: %{role: "contributor"}, types: %{role: :string}}) == "contributor"
end
test "should return nil if no role is defined on a project user" do
assert Helpers.get_role(%ProjectUser{}) == nil
end
test "should return nil if no role is defined on a changeset" do
assert Helpers.get_role(%Changeset{data: %{role: nil}, types: %{role: :string}}) == nil
end
test "should return nil if nil is passed in" do
assert Helpers.get_role(nil) == nil
end
end
describe "get_task/1" do
test "should return task of a TaskSkill" do
task = insert(:task)
task_skill = insert(:task_skill, task: task)
result = Helpers.get_task(task_skill)
assert result.id == task.id
end
test "should return task of a UserTask" do
task = insert(:task)
user_task = insert(:user_task, task: task)
result = Helpers.get_task(user_task)
assert result.id == task.id
end
test "should return task of a Changeset" do
task = insert(:task)
changeset = %Changeset{changes: %{task_id: task.id}}
result = Helpers.get_task(changeset)
assert result.id == task.id
end
end
describe "task_authored_by?/1" do
test "returns true if the user is the author of the task" do
user = insert(:user)
task = insert(:task, user: user)
assert Helpers.task_authored_by?(task, user)
end
test "returns false if the user is not the author of the task" do
user = insert(:user)
other_user = insert(:user)
task = insert(:task, user: user)
refute Helpers.task_authored_by?(task, other_user)
end
end
end
<|start_filename|>lib/code_corps/model/auth_token.ex<|end_filename|>
defmodule CodeCorps.AuthToken do
@moduledoc """
Represents one of the user's many possible authentication tokens, created
using `Phoenix.Token.sign/4`.
Many can coexist and be valid at the same time. They can be used for
password resets or passwordless logins.
These tokens do expire based on the `max_age` value passed to
`Phoenix.Token.verify/4`.
"""
use CodeCorps.Model
schema "auth_token" do
field :value, :string
belongs_to :user, CodeCorps.User
timestamps()
end
@doc """
Builds a changeset based on the `struct`
"""
def changeset(struct, user) do
token = CodeCorpsWeb.Endpoint |> Phoenix.Token.sign("user", user.id)
struct
|> cast(%{ value: token, user_id: user.id }, [:value, :user_id])
|> validate_required([:value, :user_id])
|> assoc_constraint(:user)
end
end
<|start_filename|>priv/repo/migrations/20171106200036_archive_outdated_tasks.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.ArchiveOutdatedTasks do
use Ecto.Migration
import Ecto.Query
alias CodeCorps.Repo
def up do
from(
t in "tasks",
where: t.status == "closed",
where: date_add(t.modified_at, 30, "day") > ^Date.utc_today,
update: [set: [archived: true, task_list_id: nil]]
) |> Repo.update_all([])
end
def down do
# no-op
end
end
<|start_filename|>test/lib/code_corps/model/stripe_event_test.exs<|end_filename|>
defmodule CodeCorps.StripeEventTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeEvent
describe "create_changeset/2" do
@valid_attrs %{
endpoint: "connect",
id_from_stripe: "evt_123",
object_id: "cus_123",
object_type: "customer",
type: "any.event"
}
test "reports as valid when attributes are valid" do
changeset = StripeEvent.create_changeset(%StripeEvent{}, @valid_attrs)
assert changeset.valid?
end
test "required params" do
changeset = StripeEvent.create_changeset(%StripeEvent{}, %{})
refute changeset.valid?
assert_validation_triggered(changeset, :endpoint, :required)
assert_validation_triggered(changeset, :id_from_stripe, :required)
assert_validation_triggered(changeset, :object_id, :required)
assert_validation_triggered(changeset, :object_type, :required)
assert_validation_triggered(changeset, :type, :required)
end
test "sets :status to 'processing'" do
{:ok, %StripeEvent{} = record} =
%StripeEvent{}
|> StripeEvent.create_changeset(@valid_attrs)
|> Repo.insert
assert record.status == "processing"
end
test "prevents :endpoint from being invalid" do
event = insert(:stripe_event)
attrs = %{endpoint: "random", id_from_stripe: "evt_123", type: "any.event"}
changeset = StripeEvent.create_changeset(event, attrs)
refute changeset.valid?
assert_error_message(changeset, :endpoint, "is invalid")
end
end
describe "update_changeset/2" do
@valid_attrs %{status: "unprocessed"}
test "reports as valid when attributes are valid" do
event = insert(:stripe_event)
changeset = StripeEvent.update_changeset(event, @valid_attrs)
assert changeset.valid?
end
test "requires :status" do
event = insert(:stripe_event)
changeset = StripeEvent.update_changeset(event, %{status: nil})
refute changeset.valid?
assert_error_message(changeset, :status, "can't be blank")
end
test "prevents :status from being invalid" do
event = insert(:stripe_event)
changeset = StripeEvent.update_changeset(event, %{status: "random"})
refute changeset.valid?
assert_error_message(changeset, :status, "is invalid")
end
end
end
<|start_filename|>test/lib/code_corps/github/adapters/issue_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.IssueTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
import CodeCorps.Factories
alias CodeCorps.{GitHub.Adapters, GithubIssue, Task}
describe "to_issue/1" do
test "maps api payload correctly" do
%{"issue" => payload} = load_event_fixture("issues_opened")
assert Adapters.Issue.to_issue(payload) == %{
body: payload["body"],
closed_at: payload["closed_at"],
comments_url: payload["comments_url"],
events_url: payload["events_url"],
github_created_at: payload["created_at"],
github_id: payload["id"],
github_updated_at: payload["updated_at"],
html_url: payload["html_url"],
labels_url: payload["labels_url"],
locked: payload["locked"],
number: payload["number"],
state: payload["state"],
title: payload["title"],
url: payload["url"]
}
end
end
describe "to_task/1" do
test "maps github issue correctly" do
github_issue = build(:github_issue)
assert github_issue |> Adapters.Issue.to_task == %{
created_at: github_issue.github_created_at,
markdown: github_issue.body,
modified_at: github_issue.github_updated_at,
status: github_issue.state,
title: github_issue.title
}
end
end
describe "to_api/1" do
test "maps GithubIssue correctly" do
payload =
%GithubIssue{body: "bar", locked: false, number: 5, state: "open", title: "Foo"}
|> Adapters.Issue.to_api
assert payload["body"] == "bar"
assert payload["locked"] == false
assert payload["state"] == "open"
assert payload["title"] == "Foo"
refute payload["closed_at"]
refute payload["comments_url"]
refute payload["created_at"]
refute payload["events_url"]
refute payload["html_url"]
refute payload["id"]
refute payload["labels_url"]
refute payload["number"]
refute payload["updated_at"]
refute payload["url"]
end
test "maps Task correctly" do
payload =
%Task{created_at: DateTime.utc_now, markdown: "bar", modified_at: DateTime.utc_now, status: "open", title: "Foo"}
|> Adapters.Issue.to_api
assert payload["body"] == "bar"
assert payload["state"] == "open"
assert payload["title"] == "Foo"
refute payload["closed_at"]
refute payload["comments_url"]
refute payload["created_at"]
refute payload["events_url"]
refute payload["html_url"]
refute payload["id"]
refute payload["labels_url"]
refute payload["number"]
refute payload["updated_at"]
refute payload["url"]
end
end
end
<|start_filename|>priv/repo/migrations/20171220154922_add_part_type_to_conversation.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddPartTypeToConversation do
use Ecto.Migration
def change do
alter table(:conversation_parts) do
add :part_type, :string, default: "comment"
end
end
end
<|start_filename|>test/lib/code_corps/policy/donation_goal_test.exs<|end_filename|>
defmodule CodeCorps.Policy.DonationGoalTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.DonationGoal, only: [create?: 2, update?: 2, delete?: 2]
import CodeCorps.DonationGoal, only: [create_changeset: 2]
alias CodeCorps.DonationGoal
describe "create?" do
test "returns false when user is not a project member" do
user = insert(:user)
project = insert(:project)
changeset = %DonationGoal{} |> create_changeset(%{project_id: project.id})
refute create?(user, changeset)
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
changeset = %DonationGoal{} |> create_changeset(%{project_id: project.id})
refute create?(user, changeset)
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
changeset = %DonationGoal{} |> create_changeset(%{project_id: project.id})
refute create?(user, changeset)
end
test "returns false when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
changeset = %DonationGoal{} |> create_changeset(%{project_id: project.id})
refute create?(user, changeset)
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
changeset = %DonationGoal{} |> create_changeset(%{project_id: project.id})
assert create?(user, changeset)
end
end
describe "update?" do
test "returns false when user is not a project member" do
user = insert(:user)
project = insert(:project)
record = insert(:donation_goal, project: project)
refute update?(user, record)
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
record = insert(:donation_goal, project: project)
refute update?(user, record)
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
record = insert(:donation_goal, project: project)
refute update?(user, record)
end
test "returns false when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
record = insert(:donation_goal, project: project)
refute update?(user, record)
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
record = insert(:donation_goal, project: project)
assert update?(user, record)
end
end
describe "delete?" do
test "returns false when user is not a project member" do
user = insert(:user)
project = insert(:project)
record = insert(:donation_goal, project: project)
refute delete?(user, record)
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
record = insert(:donation_goal, project: project)
refute delete?(user, record)
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
record = insert(:donation_goal, project: project)
refute delete?(user, record)
end
test "returns false when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
record = insert(:donation_goal, project: project)
refute delete?(user, record)
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
record = insert(:donation_goal, project: project)
assert delete?(user, record)
end
end
end
<|start_filename|>test/lib/code_corps/github/adapters/comment_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.CommentTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{GitHub.Adapters, Comment}
describe "to_comment/1" do
test "maps GithubComment correctly" do
github_comment = insert(:github_comment)
assert Adapters.Comment.to_comment(github_comment) == %{
created_at: github_comment.github_created_at,
markdown: github_comment.body,
modified_at: github_comment.github_updated_at
}
end
test "removes 'Posted by' header from body if one is present" do
header =
"Posted by \r\n\r\n[//]: # (Please type your edits below this line)\r\n\r\n---\r\n\r\n"
body = "test"
github_comment = insert(:github_comment, body: header <> body)
assert Adapters.Comment.to_comment(github_comment) == %{
created_at: github_comment.github_created_at,
markdown: body,
modified_at: github_comment.github_updated_at
}
end
end
describe "to_github_comment/1" do
test "maps from api payload correctly" do
%{"comment" => payload} = load_event_fixture("issue_comment_created")
assert Adapters.Comment.to_github_comment(payload) == %{
body: payload["body"],
github_created_at: payload["created_at"],
github_id: payload["id"],
github_updated_at: payload["updated_at"],
html_url: payload["html_url"],
url: payload["url"]
}
end
end
describe "to_api/1" do
test "maps Comment correctly" do
payload =
%Comment{markdown: "bar"}
|> Adapters.Comment.to_api
assert payload["body"] == "bar"
refute payload["created_at"]
refute payload["updated_at"]
end
test "adds 'Posted by' header to body if comment user is not github connected" do
user = insert(:user, github_id: nil)
comment = insert(:comment, user: user)
payload = comment |> Adapters.Comment.to_api
assert payload["body"] =~ "Posted by"
end
end
end
<|start_filename|>lib/code_corps/messages/emails.ex<|end_filename|>
defmodule CodeCorps.Messages.Emails do
@moduledoc """
Handles email notifications used within the Messages context
"""
alias CodeCorps.{
ConversationPart,
Emails,
Mailer,
Message,
Repo,
User
}
@message_preloads [:project, [conversations: :user]]
@doc ~S"""
Notifies all the recipients of a new `CodeCorps.Message`.
Target recipients are found in the `user` relationship of each
`CodeCorps.Conversation`.
"""
@spec notify_message_targets(Message.t) :: :ok
def notify_message_targets(%Message{initiated_by: "admin"} = message) do
message = message |> Repo.preload(@message_preloads)
message
|> Map.get(:conversations)
|> Enum.map(&Emails.MessageInitiatedByProjectEmail.create(message, &1))
|> Enum.each(&Mailer.deliver_now/1)
end
@part_preloads [
:author,
conversation: [
[conversation_parts: :author],
[message: [:author, [project: :organization]]],
:user
]
]
@doc ~S"""
Notifies users via email when a `CodeCorps.ConversationPart` has been added
to a `CodeCorps.Conversation`.
Sends to users participating in the conversation, excluding the author of the
conversation part.
"""
@spec notify_of_new_reply(ConversationPart.t) :: :ok
def notify_of_new_reply(%ConversationPart{} = part) do
part = part |> Repo.preload(@part_preloads)
part |> send_reply_to_conversation_emails()
end
@spec send_reply_to_conversation_emails(ConversationPart.t) :: :ok
defp send_reply_to_conversation_emails(%ConversationPart{} = part) do
part
|> get_conversation_participants()
|> Enum.map(&Emails.ReplyToConversationEmail.create(part, &1))
|> Enum.each(&Mailer.deliver_now/1)
end
@spec get_conversation_participants(ConversationPart.t) :: list(User.t)
defp get_conversation_participants(%ConversationPart{author_id: author_id} = part) do
part.conversation.conversation_parts
|> Enum.map(&Map.get(&1, :author))
|> Enum.concat([part.conversation.user])
|> Enum.concat([part.conversation.message.author])
|> Enum.reject(fn u -> u.id == author_id end)
|> Enum.uniq()
end
end
<|start_filename|>test/lib/code_corps/github/api/pull_request_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.API.PullRequestTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.{
GitHub.API.PullRequest
}
describe "from_url/2" do
test "calls github API to create an issue for assigned task, makes user request if user is connected, returns response" do
url = "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1"
github_app_installation = insert(:github_app_installation)
github_repo = insert(:github_repo, github_account_login: "foo", name: "bar", github_app_installation: github_app_installation)
assert PullRequest.from_url(url, github_repo)
assert_received({
:get,
endpoint_url,
_body,
[
{"Accept", "application/vnd.github.machine-man-preview+json"},
{"Authorization", "token" <> _tok}
],
_options
})
assert endpoint_url == url
end
end
end
<|start_filename|>test/lib/code_corps/validators/slug_validator_test.exs<|end_filename|>
defmodule CodeCorps.Validators.SlugValidatorTest do
use ExUnit.Case, async: true
import CodeCorps.Validators.SlugValidator
test "with only letters" do
changeset = process_slug("testslug") # can't be `slug` because reserved
assert changeset.valid?
end
test "with prefixed underscores" do
changeset = process_slug("_slug")
assert changeset.valid?
end
test "with suffixed underscores" do
changeset = process_slug("slug_")
assert changeset.valid?
end
test "with prefixed numbers" do
changeset = process_slug("123slug")
assert changeset.valid?
end
test "with suffixed numbers" do
changeset = process_slug("slug123")
assert changeset.valid?
end
test "with multiple dashes" do
changeset = process_slug("slug-slug-slug")
assert changeset.valid?
end
test "with multiple underscores" do
changeset = process_slug("slug_slug_slug")
assert changeset.valid?
end
test "with multiple consecutive underscores" do
changeset = process_slug("slug___slug")
assert changeset.valid?
end
test "with one character" do
changeset = process_slug("s")
assert changeset.valid?
end
test "with prefixed symbols" do
changeset = process_slug("@slug")
refute changeset.valid?
end
test "with prefixed dashes" do
changeset = process_slug("-slug")
refute changeset.valid?
end
test "with suffixed dashes" do
changeset = process_slug("slug-")
refute changeset.valid?
end
test "with multiple consecutive dashes" do
changeset = process_slug("slug---slug")
refute changeset.valid?
end
test "with single slashes" do
changeset = process_slug("slug/slug")
refute changeset.valid?
end
test "with multiple slashes" do
changeset = process_slug("slug/slug/slug")
refute changeset.valid?
end
test "with multiple consecutive slashes" do
changeset = process_slug("slug///slug")
refute changeset.valid?
end
test "with reserved routes" do
changeset = process_slug("about")
refute changeset.valid?
end
test "reserves all api routes" do
CodeCorpsWeb.Router.__routes__
|> Enum.map(&Map.get(&1, :path))
|> Enum.map(&String.split(&1, "/"))
|> List.flatten
|> Enum.reject(fn fragment -> fragment == "" end)
|> Enum.reject(fn fragment -> fragment |> String.at(0) == ":" end)
|> Enum.uniq
|> Enum.sort
|> Enum.each(fn reserved ->
changeset = process_slug(reserved)
refute changeset.valid?, "#{reserved} should not be allowed as a slug"
end)
end
defp process_slug(slug) do
slug
|> cast_slug
|> validate_slug(:slug)
end
defp cast_slug(slug) do
Ecto.Changeset.cast({%{slug: nil}, %{slug: :string}}, %{"slug" => slug}, [:slug])
end
end
<|start_filename|>test/lib/code_corps/stripe_service/stripe_platform_customer_service_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.StripePlatformCustomerServiceTest do
@moduledoc false
use CodeCorps.ModelCase
alias CodeCorps.StripePlatformCustomer
alias CodeCorps.StripeService.StripePlatformCustomerService
describe "update/2" do
test "performs update" do
customer = insert(:stripe_platform_customer)
{
:ok,
%StripePlatformCustomer{} = customer,
%Stripe.Customer{} = stripe_customer
} = StripePlatformCustomerService.update(customer, %{email: "<EMAIL>"})
assert customer.email == "<EMAIL>"
assert stripe_customer.email == "<EMAIL>"
assert stripe_customer.id == customer.id_from_stripe
end
test "returns changeset with validation errors if there is an issue" do
customer = insert(:stripe_platform_customer)
{:error, changeset} = StripePlatformCustomerService.update(customer, %{email: nil})
refute changeset.valid?
end
end
describe "update_from_stripe" do
test "performs update using information from Stripe API" do
customer = insert(:stripe_platform_customer)
{:ok, %StripePlatformCustomer{} = updated_customer, nil} =
StripePlatformCustomerService.update_from_stripe(customer.id_from_stripe)
# Hardcoded in StripeTesting.Customer
assert updated_customer.email == "<EMAIL>"
customer = Repo.get(StripePlatformCustomer, customer.id)
assert customer.email == "<EMAIL>"
end
test "also performs update of connect customers if any" do
platform_customer = insert(:stripe_platform_customer)
[connect_customer_1, connect_customer_2] =
insert_pair(:stripe_connect_customer, stripe_platform_customer: platform_customer)
{:ok, %StripePlatformCustomer{} = updated_customer, connect_updates} =
StripePlatformCustomerService.update_from_stripe(platform_customer.id_from_stripe)
# Hardcoded in StripeTesting.Customer
assert updated_customer.email == "<EMAIL>"
platform_customer = Repo.get(StripePlatformCustomer, platform_customer.id)
assert platform_customer.email == "<EMAIL>"
[
{:ok, %Stripe.Customer{} = stripe_record_1},
{:ok, %Stripe.Customer{} = stripe_record_2}
] = connect_updates
original_ids_from_stripe =
[connect_customer_1, connect_customer_2]
|> Enum.map(&Map.get(&1, :id_from_stripe))
|> Enum.sort
result_ids_from_stripe =
[stripe_record_1, stripe_record_2]
|> Enum.map(&Map.get(&1, :id))
|> Enum.sort
assert result_ids_from_stripe == original_ids_from_stripe
assert stripe_record_1.email == "<EMAIL>"
assert stripe_record_2.email == "<EMAIL>"
end
end
end
<|start_filename|>test/lib/code_corps/github/event/issue_comment/issue_comment_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.IssueCommentTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
Comment,
GithubComment,
GithubIssue,
GitHub.Event.IssueComment,
Task,
Repo,
User
}
for action <- ["created", "edited"] do
describe "handle/1 for IssueComment::#{action}" do
@payload load_event_fixture("issue_comment_#{action}")
test "creates or updates associated records" do
%{"repository" => %{"id" => repo_github_id}} = @payload
project = insert(:project)
insert(:github_repo, github_id: repo_github_id, project: project)
insert(:task_list, project: project, inbox: true)
{:ok, %Comment{}} = IssueComment.handle(@payload)
assert Repo.aggregate(Comment, :count, :id) == 1
assert Repo.aggregate(GithubComment, :count, :id) == 1
assert Repo.aggregate(GithubIssue, :count, :id) == 1
assert Repo.aggregate(Task, :count, :id) == 1
end
test "returns error if unmatched repository" do
assert IssueComment.handle(@payload) == {:error, :repo_not_found}
refute Repo.one(User)
end
test "returns error if payload is wrong" do
assert {:error, :unexpected_payload} == IssueComment.handle(%{})
end
test "returns error if repo payload is wrong" do
assert {:error, :unexpected_payload} == IssueComment.handle(@payload |> Map.put("repository", "foo"))
end
test "returns error if issue payload is wrong" do
assert {:error, :unexpected_payload} == IssueComment.handle(@payload |> Map.put("issue", "foo"))
end
test "returns error if comment payload is wrong" do
assert {:error, :unexpected_payload} == IssueComment.handle(@payload |> Map.put("comment", "foo"))
end
end
end
describe "handle/1 for IssueComment::deleted" do
@payload load_event_fixture("issue_comment_deleted")
test "deletes all comments related to and github comment with github_id specified in the payload" do
%{"comment" => %{"id" => github_id}} = @payload
github_repo = insert(:github_repo)
github_issue = insert(:github_issue, github_repo: github_repo)
github_comment = insert(:github_comment, github_id: github_id, github_issue: github_issue)
comment = insert(:comment, github_comment: github_comment)
{:ok, results} = IssueComment.handle(@payload)
%{
deleted_comments: [deleted_comment],
deleted_github_comment: deleted_github_comment
} = results
assert github_comment.id == deleted_github_comment.id
assert comment.id == deleted_comment.id
assert Repo.aggregate(Comment, :count, :id) == 0
assert Repo.aggregate(GithubComment, :count, :id) == 0
end
test "returns error if payload is wrong" do
assert {:error, :unexpected_payload} == IssueComment.handle(%{})
end
test "returns error if repo payload is wrong" do
assert {:error, :unexpected_payload} == IssueComment.handle(@payload |> Map.put("repository", "foo"))
end
test "returns error if issue payload is wrong" do
assert {:error, :unexpected_payload} == IssueComment.handle(@payload |> Map.put("issue", "foo"))
end
test "returns error if comment payload is wrong" do
assert {:error, :unexpected_payload} == IssueComment.handle(@payload |> Map.put("comment", "foo"))
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/github_pull_request_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.GithubPullRequestControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :github_pull_request
describe "index" do
test "lists all resources", %{conn: conn} do
[record_1, record_2] = insert_pair(:github_pull_request)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
test "filters resources by record id", %{conn: conn} do
[record_1, record_2 | _] = insert_list(3, :github_pull_request)
path = "github-pull-requests/?filter[id]=#{record_1.id},#{record_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
record = insert(:github_pull_request)
conn
|> request_show(record)
|> json_response(200)
|> assert_id_from_response(record.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
end
<|start_filename|>test/lib/code_corps/github/api/headers_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.API.HeadersTest do
@moduledoc false
use ExUnit.Case, async: true
alias CodeCorps.GitHub.API.Headers
describe "access_token_request/0" do
test "works" do
assert Headers.access_token_request == [
{"Accept", "application/json"},
{"Content-Type", "application/json"}
]
end
end
describe "integration_request/1" do
test "returns defaults when provided a blank map" do
headers = Headers.integration_request(%{})
assert {"Accept", "application/vnd.github.machine-man-preview+json"} in headers
end
test "merges with provided map" do
headers = Headers.integration_request(%{"foo" => "bar"})
assert {"Accept", "application/vnd.github.machine-man-preview+json"} in headers
assert {"foo", "bar"} in headers
end
test "prioritizes keys in provided map" do
headers = Headers.integration_request(%{"foo" => "bar", "Accept" => "baz"})
assert {"Accept", "baz"} in headers
assert {"foo", "bar"} in headers
end
test "adds a jwt to the headers" do
headers = Headers.integration_request(%{})
assert headers |> Enum.find(fn {key, _value} -> key == "Authorization" end)
end
end
describe "user_request/2" do
test "returns defaults when provided a blank map" do
headers = Headers.user_request(%{}, [])
assert {"Accept", "application/vnd.github.machine-man-preview+json"} in headers
end
test "merges with provided map" do
headers = Headers.user_request(%{"foo" => "bar"}, [])
assert {"Accept", "application/vnd.github.machine-man-preview+json"} in headers
assert {"foo", "bar"} in headers
end
test "prioritizes keys in provided map" do
headers = Headers.user_request(%{"foo" => "bar", "Accept" => "baz"}, [])
assert {"Accept", "baz"} in headers
assert {"foo", "bar"} in headers
end
test "adds access token if key is present in opts and not nil" do
headers = Headers.user_request(%{"foo" => "bar"}, [access_token: "foo_bar"])
assert {"Accept", "application/vnd.github.machine-man-preview+json"} in headers
assert {"foo", "bar"} in headers
assert {"Authorization", "token foo_bar"} in headers
end
test "does not add access token if key is present in opts but is nil" do
headers = Headers.user_request(%{"foo" => "bar"}, [access_token: nil])
assert {"Accept", "application/vnd.github.machine-man-preview+json"} in headers
assert {"foo", "bar"} in headers
refute headers |> Enum.find(fn {key, _value} -> key == "Authorization" end)
end
end
end
<|start_filename|>lib/code_corps/github/adapters/repo.ex<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.Repo do
@mapping [
{:github_account_avatar_url, ["owner", "avatar_url"]},
{:github_account_id, ["owner", "id"]},
{:github_account_login, ["owner", "login"]},
{:github_account_type, ["owner", "type"]},
{:github_id, ["id"]},
{:name, ["name"]}
]
@spec from_api(map) :: map
def from_api(%{} = payload) do
payload |> CodeCorps.Adapter.MapTransformer.transform(@mapping)
end
end
<|start_filename|>lib/code_corps/emails/project_approval_request_email.ex<|end_filename|>
defmodule CodeCorps.Emails.ProjectApprovalRequestEmail do
import Bamboo.Email, only: [to: 2]
import Bamboo.PostmarkHelper
import Ecto.Query, only: [where: 3]
alias CodeCorps.{Project, Repo, User, WebClient}
alias CodeCorps.Emails.BaseEmail
alias CodeCorps.Presenters.ImagePresenter
@spec create(Project.t) :: Bamboo.Email.t
def create(%Project{} = project) do
BaseEmail.create
|> to(get_site_admins_emails())
|> template(template_id(), build_model(project))
end
@spec build_model(Project.t) :: map
defp build_model(%Project{} = project) do
%{
admin_project_show_url: project |> admin_url(),
project_description: project.description,
project_logo_url: ImagePresenter.large(project),
project_title: project.title,
project_url: project |> preload() |> project_url(),
subject: "#{project.title} is asking to be approved"
}
end
@spec preload(Project.t) :: Project.t
defp preload(%Project{} = project), do: project |> Repo.preload(:organization)
@spec admin_url(Project.t) :: String.t
defp admin_url(project) do
WebClient.url()
|> URI.merge("/admin/projects/" <> Integer.to_string(project.id))
|> URI.to_string()
end
@spec project_url(Project.t) :: String.t
defp project_url(project) do
WebClient.url()
|> URI.merge(project.organization.slug <> "/" <> project.slug)
|> URI.to_string()
end
@spec template_id :: String.t
defp template_id, do: Application.get_env(:code_corps, :postmark_project_approval_request_template)
@spec get_site_admins_emails() :: list(String.t)
defp get_site_admins_emails() do
get_site_admins() |> Enum.map(&extract_email/1)
end
@spec extract_email(User.t) :: String.t
defp extract_email(%User{email: email}), do: email
@spec get_site_admins() :: list(User.t)
defp get_site_admins() do
User
|> where([object], object.admin == true)
|> Repo.all()
end
end
<|start_filename|>lib/code_corps/github/adapters/comment.ex<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.Comment do
@moduledoc ~S"""
Used to convert between GitHub data which represents a GitHub Issue Comment
and `CodeCorps.Comment` as well as `CodeCorps.GithubComment` attributes.
"""
alias CodeCorps.{
Adapter.MapTransformer,
Comment,
GithubComment,
GitHub.Adapters.Utils.BodyDecorator
}
@github_comment_to_comment_mapping [
{:created_at, [:github_created_at]},
{:markdown, [:body]},
{:modified_at, [:github_updated_at]}
]
@github_payload_to_comment_mapping [
{:created_at, ["created_at"]},
{:markdown, ["body"]},
{:modified_at, ["updated_at"]}
]
@github_payload_to_github_comment_mapping [
{:body, ["body"]},
{:github_created_at, ["created_at"]},
{:github_id, ["id"]},
{:github_updated_at, ["updated_at"]},
{:html_url, ["html_url"]},
{:url, ["url"]}
]
@doc ~S"""
Converts a `CodeCorps.GithubComment` into a set of attributes suitable for
creating or updating a `CodeCorps.Comment`
"""
@spec to_comment(GithubComment.t) :: map
def to_comment(%GithubComment{} = github_comment) do
github_comment
|> Map.from_struct()
|> BodyDecorator.remove_code_corps_header()
|> MapTransformer.transform(@github_comment_to_comment_mapping)
end
@doc ~S"""
Converts a GitHub Issue Comment payload into a set of attributes suitable for
creating or updating a `CodeCorps.GithubComment`
"""
@spec to_github_comment(map) :: map
def to_github_comment(%{} = payload) do
payload |> MapTransformer.transform(@github_payload_to_github_comment_mapping)
end
@autogenerated_github_keys ~w(created_at id updated_at)
@doc ~S"""
Converts a `CodeCorps.Comment` into a set of attributes suitable for creating
or updating an GitHub Issue Comment through the GitHub API.
"""
@spec to_api(Comment.t) :: map
def to_api(%Comment{} = comment) do
comment
|> Map.from_struct
|> MapTransformer.transform_inverse(@github_payload_to_comment_mapping)
|> Map.drop(@autogenerated_github_keys)
|> BodyDecorator.add_code_corps_header(comment)
end
end
<|start_filename|>test/lib/code_corps_web/views/preview_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.PreviewViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
user = insert(:user)
preview = insert(:preview, user: user)
rendered_json = render(CodeCorpsWeb.PreviewView, "show.json-api", data: preview)
expected_json = %{
"data" => %{
"id" => preview.id |> Integer.to_string,
"type" => "preview",
"attributes" => %{
"body" => preview.body,
"inserted-at" => preview.inserted_at,
"markdown" => preview.markdown,
"updated-at" => preview.updated_at
},
"relationships" => %{
"user" => %{
"data" => %{
"id" => preview.user_id |> Integer.to_string,
"type" => "user"
}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert expected_json == rendered_json
end
end
<|start_filename|>test/lib/code_corps_web/controllers/stripe_connect_account_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectAccountControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :stripe_connect_account
alias CodeCorps.StripeConnectAccount
describe "show" do
@tag :authenticated
test "shows chosen resource", %{conn: conn, current_user: current_user} do
organization = insert(:organization, owner: current_user)
stripe_connect_account = insert(:stripe_connect_account, organization: organization)
conn
|> request_show(stripe_connect_account)
|> json_response(200)
|> assert_id_from_response(stripe_connect_account.id)
end
test "renders 401 when unauthenticated", %{conn: conn} do
stripe_connect_account = insert(:stripe_connect_account)
assert conn |> request_show(stripe_connect_account) |> json_response(401)
end
@tag :authenticated
test "renders 403 when unauthorized", %{conn: conn} do
stripe_connect_account = insert(:stripe_connect_account)
assert conn |> request_show(stripe_connect_account) |> json_response(403)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when user is authenticated and authorized", %{conn: conn, current_user: current_user} do
organization = insert(:organization, owner: current_user)
attrs = %{organization: organization, country: "US", tos_acceptance_date: 123456}
response = conn |> put_req_header("user-agent", "Test agent") |> request_create(attrs)
assert response |> json_response(201)
user_id = current_user.id
assert_received {:track, ^user_id, "Created Stripe Connect Account", %{}}
account = StripeConnectAccount |> Repo.one
assert account.tos_acceptance_date
request_ip = CodeCorps.ConnUtils.extract_ip(response)
assert account.tos_acceptance_ip == request_ip
request_user_agent = CodeCorps.ConnUtils.extract_user_agent(response)
assert account.tos_acceptance_user_agent == request_user_agent
end
test "does not create resource and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
organization = insert(:organization)
attrs = %{ organization: organization }
assert conn |> request_create(attrs) |> json_response(403)
end
end
describe "update" do
@tag :authenticated
test "updates external account on resource when user is authenticated and authorized", %{conn: conn, current_user: current_user} do
organization = insert(:organization, owner: current_user)
stripe_connect_account = insert(:stripe_connect_account, organization: organization)
attrs = %{external_account: "ba_test123"}
assert conn |> request_update(stripe_connect_account, attrs) |> json_response(200)
updated_account = Repo.get(StripeConnectAccount, stripe_connect_account.id)
assert updated_account.external_account == "ba_test123"
end
test "does not update resource and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "does not update resource and renders 403 when not authorized", %{conn: conn} do
organization = insert(:organization)
stripe_connect_account = insert(:stripe_connect_account, organization: organization)
assert conn |> request_update(stripe_connect_account, %{}) |> json_response(403)
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/webhook_processing/event_handler_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.WebhookProcessing.EventHandlerTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeService.WebhookProcessing.{
ConnectEventHandler, EventHandler, PlatformEventHandler
}
alias CodeCorps.{
Repo, Project,
StripeEvent, StripeInvoice, StripePlatformCard, StripePlatformCustomer,
StripeTesting
}
defmodule CodeCorps.StripeService.WebhookProcessing.EventHandlerTest.StubObject do
defstruct [:id, :object]
end
defp stub_object() do
%CodeCorps.StripeService.WebhookProcessing.EventHandlerTest.StubObject{id: "stub_id", object: "stub"}
end
defp build_event(user_id), do: build_event("any.event", "any_object", user_id)
defp build_event(type, object_type, user_id), do: build_event(type, object_type, stub_object(), user_id)
defp build_event(type, object_type, object, user_id), do: build_event("some_id", type, object_type, object, user_id)
defp build_event(id, type, object_type, object, user_id) do
object = Map.merge(object, %{object: object_type})
%Stripe.Event{id: id, type: type, data: %{object: object}, user_id: user_id}
end
describe "platform events" do
test "handles customer.updated" do
platform_customer = insert(:stripe_platform_customer)
stripe_customer = %Stripe.Customer{id: platform_customer.id_from_stripe}
event = build_event("customer.updated", "customer", stripe_customer, nil)
{:ok, event} = EventHandler.handle(event, PlatformEventHandler)
assert event.object_type == "customer"
assert event.object_id == platform_customer.id_from_stripe
assert event.status == "processed"
platform_customer = Repo.get(StripePlatformCustomer, platform_customer.id)
# hardcoded in StripeTesting.Customer
assert platform_customer.email == "<EMAIL>"
end
test "handles customer.source.updated" do
platform_customer = insert(:stripe_platform_customer)
platform_card = insert(:stripe_platform_card, customer_id_from_stripe: platform_customer.id_from_stripe)
stripe_card = %Stripe.Card{id: platform_card.id_from_stripe}
event = build_event("customer.source.updated", "card", stripe_card, nil)
{:ok, event} = EventHandler.handle(event, PlatformEventHandler)
assert event.object_type == "card"
assert event.object_id == platform_card.id_from_stripe
assert event.status == "processed"
updated_card = Repo.get_by(StripePlatformCard, id: platform_card.id)
# hardcoded in StripeTesting.Card
assert updated_card.name == "<NAME>"
end
end
describe "connect events" do
test "handles account.updated" do
connect_account = insert(:stripe_connect_account)
event = build_event(
"account.updated",
"account",
%Stripe.Account{id: connect_account.id_from_stripe},
connect_account.id_from_stripe
)
{:ok, event} = EventHandler.handle(event, ConnectEventHandler, "acc_123")
assert event.object_type == "account"
assert event.object_id == connect_account.id_from_stripe
assert event.status == "processed"
assert event.user_id == "acc_123"
end
test "handles charge.succeeded as processed when everything is in order" do
connect_account = insert(:stripe_connect_account)
charge_fixture = StripeTesting.Helpers.load_fixture("charge")
insert(:stripe_connect_customer, id_from_stripe: charge_fixture.customer)
invoice_fixture = StripeTesting.Helpers.load_fixture(charge_fixture.invoice)
insert(:stripe_connect_subscription, id_from_stripe: invoice_fixture.subscription)
project = Repo.one(Project)
insert(:donation_goal, current: true, project: project)
event = build_event("charge.succeeded", "charge", charge_fixture, connect_account.id_from_stripe)
{:ok, event} = EventHandler.handle(event, ConnectEventHandler, connect_account.id_from_stripe)
assert event.object_type == "charge"
assert event.object_id == charge_fixture.id
assert event.status == "processed"
end
test "handles charge.succeeded as errored when something goes wrong with email" do
connect_account = insert(:stripe_connect_account)
charge_fixture = StripeTesting.Helpers.load_fixture("charge")
insert(:stripe_connect_customer, id_from_stripe: charge_fixture.customer)
event = build_event("charge.succeeded", "charge", charge_fixture, connect_account.id_from_stripe)
{:ok, event} = EventHandler.handle(event, ConnectEventHandler, connect_account.id_from_stripe)
assert event.object_type == "charge"
assert event.object_id == charge_fixture.id
assert event.status == "errored"
end
test "handles charge.succeeded as errored when something goes wrong with creating a charge" do
charge_fixture = StripeTesting.Helpers.load_fixture("charge")
event = build_event("charge.succeeded", "charge", charge_fixture, "bad_account")
{:ok, event} = EventHandler.handle(event, ConnectEventHandler, "bad_account")
assert event.object_type == "charge"
assert event.object_id == charge_fixture.id
assert event.status == "errored"
end
test "handles customer.subscription.updated" do
project = insert(:project)
plan = insert(:stripe_connect_plan, project: project)
subscription = insert(:stripe_connect_subscription, stripe_connect_plan: plan)
account = insert(:stripe_connect_account)
platform_customer = insert(:stripe_platform_customer)
connect_customer = insert(
:stripe_connect_customer,
stripe_connect_account: account,
stripe_platform_customer: platform_customer
)
event = build_event(
"customer.subscription.updated",
"subscription",
%Stripe.Subscription{
id: subscription.id_from_stripe,
customer: connect_customer.id_from_stripe
},
account.id_from_stripe
)
{:ok, event} = EventHandler.handle(event, ConnectEventHandler, "acc_123")
assert event.object_type == "subscription"
assert event.object_id == subscription.id_from_stripe
assert event.status == "processed"
assert event.user_id == "acc_123"
end
test "handles customer.subscription.deleted" do
project = insert(:project)
plan = insert(:stripe_connect_plan, project: project)
subscription = insert(:stripe_connect_subscription, stripe_connect_plan: plan)
account = insert(:stripe_connect_account)
platform_customer = insert(:stripe_platform_customer)
connect_customer = insert(
:stripe_connect_customer,
stripe_connect_account: account,
stripe_platform_customer: platform_customer
)
event = build_event(
"customer.subscription.deleted",
"subscription",
%Stripe.Subscription{
id: subscription.id_from_stripe,
customer: connect_customer.id_from_stripe
},
account.id_from_stripe
)
{:ok, event} = EventHandler.handle(event, ConnectEventHandler, "acc_123")
assert event.object_type == "subscription"
assert event.object_id == subscription.id_from_stripe
assert event.status == "processed"
assert event.user_id == "acc_123"
end
test "handles invoice.payment_succeeded" do
fixture = StripeTesting.Helpers.load_fixture("invoice")
insert(:stripe_connect_subscription, id_from_stripe: fixture.subscription)
user = insert(:user)
stripe_platform_customer = insert(:stripe_platform_customer, user: user)
# same with hardcoding customer id from stripe
insert(
:stripe_connect_customer,
id_from_stripe: fixture.customer,
stripe_platform_customer: stripe_platform_customer,
user: user
)
event = build_event("invoice.payment_succeeded", "invoice", fixture, nil)
{:ok, event} = EventHandler.handle(event, ConnectEventHandler, "acc_123")
assert event.object_type == "invoice"
assert event.object_id == fixture.id
assert event.status == "processed"
assert event.user_id == "acc_123"
assert Repo.get_by(StripeInvoice, id_from_stripe: fixture.id)
end
end
describe "any event" do
test "sets endpoint to 'platform' when using PlatformEventHandler" do
event = build_event(nil)
{:ok, event} = EventHandler.handle(event, PlatformEventHandler)
assert event.endpoint == "platform"
assert event.user_id == nil
end
test "sets endpoint to 'connect' when using ConnectEventHandler" do
event = build_event(nil)
{:ok, event} = EventHandler.handle(event, ConnectEventHandler, "acc_123")
assert event.endpoint == "connect"
assert event.user_id == "acc_123"
end
test "creates event if id is new" do
event = build_event(nil)
{:ok, event} = EventHandler.handle(event, PlatformEventHandler)
assert event.id_from_stripe == "some_id"
assert event.object_id == "stub_id"
assert event.object_type == "any_object"
assert event.status == "unhandled"
assert event.user_id == nil
end
test "uses existing event if id exists" do
local_event = insert(:stripe_event)
event = build_event(local_event.id_from_stripe, "any.event", "any_object", stub_object(), nil)
{:ok, returned_event} = EventHandler.handle(event, PlatformEventHandler)
assert returned_event.id == local_event.id
assert StripeEvent |> Repo.aggregate(:count, :id) == 1
end
test "sets event as unhandled if event is not handled" do
event = build_event("unhandled.event", "any_object", nil)
{:ok, event} = EventHandler.handle(event, PlatformEventHandler)
assert event.status == "unhandled"
end
test "errors out event if handling fails" do
# we build the event, but do not make the customer, causing it to error out
event = build_event("customer.updated", "customer", %Stripe.Customer{id: "some_id"}, nil)
{:ok, event} = EventHandler.handle(event, PlatformEventHandler)
assert event.status == "errored"
end
test "marks event as processed if handling is done" do
# we build the event AND create the customer, so it should process correctly
event = build_event("customer.updated", "customer", %Stripe.Customer{id: "some_id"}, nil)
insert(:stripe_platform_customer, id_from_stripe: "some_id")
{:ok, event} = EventHandler.handle(event, PlatformEventHandler)
assert event.status == "processed"
end
test "leaves event alone if already processing" do
local_event = insert(:stripe_event, status: "processing")
event = build_event(local_event.id_from_stripe, "any.event", "any_object", %Stripe.Customer{id: "some_id"}, nil)
assert {:error, :already_processing} == EventHandler.handle(event, PlatformEventHandler)
end
end
describe "ignored events" do
test "properly sets as ignored" do
event = build_event("application_fee.created", "application_fee", nil)
{:ok, event} = EventHandler.handle(event, PlatformEventHandler)
assert event.status == "ignored"
assert event.ignored_reason
end
end
end
<|start_filename|>priv/repo/migrations/20171106045740_add_done_to_task_list.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddDoneToTaskList do
use Ecto.Migration
import Ecto.Query
alias CodeCorps.Repo
def up do
alter table(:task_lists) do
add :done, :boolean, default: false
end
flush()
from(tl in "task_lists", where: [name: "Done"], update: [set: [done: true]])
|> Repo.update_all([])
task_list_query =
from(tl in "task_lists", where: [done: true], select: [:id])
task_list_query |> Repo.all() |> Enum.each(fn task ->
# tests do not have any data, so we need to account for potential nil
case task do
%{id: done_list_id} ->
task_update_query = from t in "tasks",
where: [status: "closed"],
update: [set: [task_list_id: ^done_list_id]]
task_update_query |> Repo.update_all([])
nil -> nil
end
end)
end
def down do
alter table(:task_lists) do
remove :done
end
end
end
<|start_filename|>lib/code_corps_web/controllers/github_issue_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubIssueController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{GithubIssue, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with github_repos <- GithubIssue |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: github_repos)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %GithubIssue{} = github_repo <- GithubIssue |> Repo.get(id) do
conn |> render("show.json-api", data: github_repo)
end
end
end
<|start_filename|>lib/code_corps/model/github_event.ex<|end_filename|>
defmodule CodeCorps.GithubEvent do
use CodeCorps.Model
use Scrivener, page_size: 20
alias Ecto.Changeset
@type t :: %__MODULE__{}
schema "github_events" do
field :action, :string
field :data, :string
field :error, :string
field :failure_reason, :string
field :github_delivery_id, :string
field :payload, :map
field :retry, :boolean, virtual: true
field :status, :string
field :type, :string
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:action, :data, :github_delivery_id, :payload, :error, :status, :type])
|> validate_required([:action, :github_delivery_id, :payload, :status, :type])
|> validate_inclusion(:status, statuses())
end
def update_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:retry, :status])
|> validate_acceptance(:retry)
|> validate_retry()
|> validate_inclusion(:status, statuses())
end
def statuses do
~w{unprocessed processing processed errored unsupported reprocessing}
end
defp validate_retry(%Changeset{changes: %{retry: true}} = changeset) do
case changeset |> Changeset.get_field(:status) do
"errored" -> Changeset.put_change(changeset, :status, "reprocessing")
_ -> Changeset.add_error(changeset, :retry, "only possible when status is errored")
end
end
defp validate_retry(changeset), do: changeset
end
<|start_filename|>lib/code_corps_web/controllers/stripe_connect_account_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectAccountController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.ConnUtils
alias CodeCorps.StripeConnectAccount
alias CodeCorps.StripeService.StripeConnectAccountService
alias CodeCorps.User
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
%StripeConnectAccount{} = account <- StripeConnectAccount |> Repo.get(id),
{:ok, :authorized} <- current_user |> Policy.authorize(:show, account, params)
do
account = preload(account)
conn |> render("show.json-api", data: account)
end
end
@spec create(Conn.t, map) :: Conn.t
def create(%Conn{} = conn, params) do
params =
params
|> Map.put("type", "custom")
|> Map.put("tos_acceptance_ip", conn |> ConnUtils.extract_ip)
|> Map.put("tos_acceptance_user_agent", conn |> ConnUtils.extract_user_agent)
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %StripeConnectAccount{}, params),
{:ok, %StripeConnectAccount{} = account} <- StripeConnectAccountService.create(params),
account <- preload(account)
do
conn |> put_status(:created) |> render("show.json-api", data: account)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %StripeConnectAccount{} = account <- StripeConnectAccount |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, account, params),
{:ok, %StripeConnectAccount{} = updated_account} <- account |> StripeConnectAccountService.update(params),
updated_account <- preload(updated_account)
do
conn |> render("show.json-api", data: updated_account)
end
end
@preloads [:stripe_external_account]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>priv/repo/migrations/20170926134646_add_failure_reason_to_github_events.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddFailureReasonToGithubEvents do
use Ecto.Migration
def change do
alter table(:github_events) do
add :failure_reason, :string
end
end
end
<|start_filename|>test/lib/code_corps_web/views/task_skill_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.TaskSkillViewTest do
@moduledoc false
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
task_skill = insert(:task_skill)
rendered_json = render(CodeCorpsWeb.TaskSkillView, "show.json-api", data: task_skill)
expected_json = %{
"data" => %{
"id" => task_skill.id |> Integer.to_string,
"type" => "task-skill",
"attributes" => %{},
"relationships" => %{
"task" => %{
"data" => %{"id" => task_skill.task_id |> Integer.to_string, "type" => "task"}
},
"skill" => %{
"data" => %{"id" => task_skill.skill_id |> Integer.to_string, "type" => "skill"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/lib/code_corps/policy/task_test.exs<|end_filename|>
defmodule CodeCorps.Policy.TaskTest do
use CodeCorps.PolicyCase
alias CodeCorps.Policy
describe "create?" do
test "returns true when user is task author" do
user = insert(:user)
assert Policy.Task.create?(user, %{"user_id" => user.id})
end
test "returns false when user is not the author" do
user = insert(:user)
refute Policy.Task.create?(user, %{"user_id" => -1})
end
end
describe "update?" do
test "returns true when user is the task author" do
user = insert(:user)
task = insert(:task, user: user)
assert Policy.Task.update?(user, task)
end
test "returns false when user is not associated to project or task" do
user = insert(:user)
task = insert(:task)
refute Policy.Task.update?(user, task)
end
test "returns false when user is a pending member of project" do
%{project: project, user: user} = insert(:project_user, role: "pending")
task = insert(:task, project: project)
refute Policy.Task.update?(user, task)
end
test "returns false when user is a contributing member of project" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
task = insert(:task, project: project)
refute Policy.Task.update?(user, task)
end
test "returns true when user is an admin member of project" do
%{project: project, user: user} = insert(:project_user, role: "admin")
task = insert(:task, project: project)
assert Policy.Task.update?(user, task)
end
test "returns true when user is the owner of the project" do
%{project: project, user: user} = insert(:project_user, role: "owner")
task = insert(:task, project: project)
assert Policy.Task.update?(user, task)
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/adapters/stripe_platform_customer_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripePlatformCustomerTest do
use ExUnit.Case, async: true
import CodeCorps.StripeService.Adapters.StripePlatformCustomerAdapter, only: [to_params: 2]
timestamp = 1479472835
@stripe_platform_customer %Stripe.Customer{
id: "cus_123",
account_balance: 0,
created: timestamp,
currency: "usd",
default_source: nil,
delinquent: false,
description: nil,
email: "<EMAIL>",
livemode: false,
metadata: %{}
}
@local_map %{
"id_from_stripe" => "cus_123",
"created" => timestamp,
"currency" => "usd",
"delinquent" => false,
"email" => "<EMAIL>"
}
describe "to_params/2" do
test "converts from stripe map to local properly" do
test_attributes = %{
"user_id" => 123,
"foo" => "bar"
}
expected_attributes = %{
"user_id" => 123,
}
{:ok, result} = to_params(@stripe_platform_customer, test_attributes)
expected_map = Map.merge(@local_map, expected_attributes)
assert result == expected_map
end
end
end
<|start_filename|>lib/code_corps/model/conversation_part.ex<|end_filename|>
defmodule CodeCorps.ConversationPart do
@moduledoc ~S"""
An individual "line of conversation" in a `CodeCorps.Conversation` thread,
depicting a reply to the `CodeCorps.Conversation` by any of the two sides.
When a project sends a `CodeCorps.Message` to one or more users, a
`CodeCorps.Conversation` needs to be created for each of those users, so
separate conversations can be held with different users starting from the same
original `CodeCorps.Message`
Once replies start coming in, a `CodeCorps.ConversationPart` is created for
each of those replies, regardless of which side is making them.
"""
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "conversation_parts" do
field :body, :string, null: false
field :read_at, :utc_datetime, null: true
field :part_type, :string, default: "comment"
belongs_to :author, CodeCorps.User
belongs_to :conversation, CodeCorps.Conversation
timestamps()
end
end
<|start_filename|>lib/code_corps_web/controllers/organization_invite_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.OrganizationInviteController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Emails, Helpers.Query, Mailer, OrganizationInvite, User}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with organization_invites <- OrganizationInvite |> Query.id_filter(params) |> Query.optional_filters(params, ~w(code)a) |> Repo.all do
conn |> render("index.json-api", data: organization_invites)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %OrganizationInvite{} = organization_invite <- OrganizationInvite |> Repo.get(id) do
conn |> render("show.json-api", data: organization_invite)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %OrganizationInvite{}, params),
{:ok, %OrganizationInvite{} = organization_invite} <- %OrganizationInvite{} |> OrganizationInvite.create_changeset(params) |> Repo.insert do
send_email(organization_invite)
conn
|> put_status(:created)
|> render("show.json-api", data: organization_invite)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %OrganizationInvite{} = organization_invite <- OrganizationInvite |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, organization_invite),
{:ok, %OrganizationInvite{} = organization_invite} <- organization_invite |> OrganizationInvite.changeset(params) |> Repo.update do
conn |> render("show.json-api", data: organization_invite)
end
end
defp send_email(organization_invite) do
organization_invite
|> Emails.OrganizationInviteEmail.create()
|> Mailer.deliver_later()
end
end
<|start_filename|>priv/repo/migrations/20171003225853_add_created_at_and_modified_at_to_comments.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddCreatedAtAndModifiedAtToComments do
use Ecto.Migration
def change do
alter table(:comments) do
add :created_at, :utc_datetime
add :modified_at, :utc_datetime
add :created_from, :string, default: "code_corps"
add :modified_from, :string, default: "code_corps"
end
end
end
<|start_filename|>test/lib/code_corps_web/views/layout_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.LayoutViewTest do
use CodeCorpsWeb.ViewCase
end
<|start_filename|>lib/code_corps/analytics/in_memory_api.ex<|end_filename|>
defmodule CodeCorps.Analytics.InMemoryAPI do
@moduledoc """
In-memory interface to simulate calling out to the Segment API.
Each function should have the same signature as `CodeCorps.Analytics.SegmentAPI` and simply return `nil`.
"""
require Logger
def identify(user_id, _traits), do: log_identify(user_id)
def track(user_id, event_name, properties), do: log_track(user_id, event_name, properties)
defp log_identify(user_id) do
Logger.info "Called identify for User #{user_id}"
end
defp log_track(user_id, event_name, properties) do
props = Poison.encode!(properties)
Logger.info "Called track for event #{event_name} for User #{user_id} and properties #{props}"
end
end
<|start_filename|>lib/code_corps/model/github_issue_assignee.ex<|end_filename|>
defmodule CodeCorps.GithubIssueAssignee do
use Ecto.Schema
import Ecto.Changeset
schema "github_issue_assignees" do
belongs_to :github_issue, CodeCorps.GithubIssue
belongs_to :github_user, CodeCorps.GithubUser
timestamps()
end
@doc false
def changeset(struct, attrs) do
struct
|> cast(attrs, [:github_issue_id, :github_user_id])
|> validate_required([:github_issue_id, :github_user_id])
|> assoc_constraint(:github_issue)
|> assoc_constraint(:github_user)
|> unique_constraint(:github_user, name: :github_issue_assignees_github_issue_id_github_user_id_index)
end
end
<|start_filename|>lib/code_corps/policy/user_role.ex<|end_filename|>
defmodule CodeCorps.Policy.UserRole do
alias CodeCorps.UserRole
alias CodeCorps.User
def create?(%User{admin: true}, %{}), do: true
def create?(%User{id: id}, %{"user_id" => user_id}), do: id == user_id
def create?(%User{}, %{}), do: false
def delete?(%User{admin: true}, %UserRole{}), do: true
def delete?(%User{id: id}, %UserRole{user_id: user_id}), do: id == user_id
def delete?(%User{}, %UserRole{}), do: false
end
<|start_filename|>test/lib/code_corps/emails/project_approval_request_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.ProjectApprovalRequestEmailTest do
use CodeCorps.ModelCase
use Bamboo.Test
alias CodeCorps.Emails.ProjectApprovalRequestEmail
test "request email works" do
project = insert(:project)
admin1 = insert(:user, admin: true)
admin2 = insert(:user, admin: true)
email = ProjectApprovalRequestEmail.create(project)
assert email.from == "Code Corps<<EMAIL>>"
assert Enum.count(email.to) == 2
assert Enum.member?(email.to, admin1.email)
assert Enum.member?(email.to, admin2.email)
template_model = email.private.template_model
assert template_model == %{
admin_project_show_url: "http://localhost:4200/admin/projects/#{project.id}",
project_description: project.description,
project_logo_url: "#{Application.get_env(:code_corps, :asset_host)}/icons/project_default_large_.png",
project_title: project.title,
project_url: "http://localhost:4200/#{project.organization.slug}/#{project.slug}",
subject: "#{project.title} is asking to be approved"
}
end
end
<|start_filename|>test/lib/code_corps/model/stripe_platform_card_test.exs<|end_filename|>
defmodule CodeCorps.StripePlatformCardTest do
use CodeCorps.ModelCase
alias CodeCorps.StripePlatformCard
@valid_attrs %{
brand: "Visa",
customer_id_from_stripe: "cust_123",
cvc_check: "unchecked",
exp_month: 12,
exp_year: 2020,
last4: "4242",
name: "<NAME>",
id_from_stripe: "card_1234",
user_id: 1
}
@invalid_attrs %{}
describe "create_changeset/2" do
test "reports as valid when attributes are valid" do
user_id = insert(:user).id
changes = Map.merge(@valid_attrs, %{user_id: user_id})
changeset = StripePlatformCard.create_changeset(%StripePlatformCard{}, changes)
assert changeset.valid?
end
test "reports as invalid when attributes are invalid" do
changeset = StripePlatformCard.create_changeset(%StripePlatformCard{}, @invalid_attrs)
refute changeset.valid?
assert_error_message(changeset, :id_from_stripe, "can't be blank")
assert_error_message(changeset, :user_id, "can't be blank")
end
test "ensures associations link to records that exist" do
attrs = @valid_attrs |> Map.merge(%{user_id: -1})
{result, changeset} =
StripePlatformCard.create_changeset(%StripePlatformCard{}, attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :user, "does not exist")
end
end
describe "update_changeset/2" do
@valid_attrs %{name: "<NAME>", exp_month: 12, exp_year: 2020}
test "reports as valid when attributes are valid" do
platform_card = insert(:stripe_platform_card)
changeset = StripePlatformCard.update_changeset(platform_card, @valid_attrs)
assert changeset.valid?
end
@invalid_attrs %{name: nil, exp_month: nil, exp_year: nil}
test "requires name, exp_month and exp_year" do
platform_card = insert(:stripe_platform_card)
changeset = StripePlatformCard.update_changeset(platform_card, @invalid_attrs)
refute changeset.valid?
assert_error_message(changeset, :exp_month, "can't be blank")
assert_error_message(changeset, :exp_year, "can't be blank")
assert_error_message(changeset, :name, "can't be blank")
end
end
end
<|start_filename|>test/lib/code_corps/model/role_skill_test.exs<|end_filename|>
defmodule CodeCorps.RoleSkillTest do
use CodeCorps.ModelCase
alias CodeCorps.RoleSkill
test "create_changeset with valid attributes" do
role_id = insert(:role).id
skill_id = insert(:skill).id
changeset = RoleSkill.create_changeset(%RoleSkill{}, %{role_id: role_id, skill_id: skill_id})
assert changeset.valid?
end
test "create_changeset requires role_id" do
skill_id = insert(:skill).id
changeset = RoleSkill.create_changeset(%RoleSkill{}, %{skill_id: skill_id})
refute changeset.valid?
assert_error_message(changeset, :role_id, "can't be blank")
end
test "create_changeset requires skill_id" do
role_id = insert(:role).id
changeset = RoleSkill.create_changeset(%RoleSkill{}, %{role_id: role_id})
refute changeset.valid?
assert_error_message(changeset, :skill_id, "can't be blank")
end
test "create_changeset requires id of actual role" do
role_id = -1
skill_id = insert(:skill).id
{result, changeset} =
RoleSkill.create_changeset(%RoleSkill{}, %{role_id: role_id, skill_id: skill_id})
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :role, "does not exist")
end
test "create_changeset requires id of actual skill" do
role_id = insert(:role).id
skill_id = -1
{result, changeset} =
RoleSkill.create_changeset(%RoleSkill{}, %{role_id: role_id, skill_id: skill_id})
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :skill, "does not exist")
end
describe "import_changeset" do
test "valid cat value included in cats is accepted" do
role_id = insert(:role).id
skill_id = insert(:skill).id
cat_value = 1
changeset = RoleSkill.import_changeset(%RoleSkill{}, %{role_id: role_id, skill_id: skill_id, cat: cat_value})
assert changeset.valid?
end
test "invalid cat value not included in cats is rejected" do
role_id = insert(:role).id
skill_id = insert(:skill).id
cat_value = 9
changeset = RoleSkill.import_changeset(%RoleSkill{}, %{role_id: role_id, skill_id: skill_id, cat: cat_value})
refute changeset.valid?
assert_error_message(changeset, :cat, "is invalid")
end
end
end
<|start_filename|>lib/code_corps_web/views/skill_view.ex<|end_filename|>
defmodule CodeCorpsWeb.SkillView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:title, :description, :inserted_at, :updated_at]
has_many :role_skills, serializer: CodeCorpsWeb.RoleSkillView, identifiers: :always
end
<|start_filename|>lib/code_corps/services/user_service.ex<|end_filename|>
defmodule CodeCorps.Services.UserService do
@moduledoc """
Handles CRUD operations for users.
When operations happen on `CodeCorps.User`, we need to make sure changes
are propagated to related records, ex., `CodeCorps.StripePlatformCustomer` and
`CodeCorps.StripeConnectCustomer`
"""
alias CodeCorps.{Repo, StripeConnectCustomer, StripePlatformCustomer, User}
alias CodeCorps.StripeService.{StripeConnectCustomerService, StripePlatformCustomerService}
alias Ecto.{Changeset, Multi}
@doc """
Updates a `CodeCorps.User` record and, if necessary, associated
`CodeCorps.StripePlatformCustomer` and `CodeCorps.StripeConnectCustomer` records.
These related records inherit the email field from the user,
so they need to be kept in sync, both locally, and on the Stripe platform.
Returns one of:
- `{:ok, %CodeCorps.User{}, nil, nil}`
- `{:ok, %CodeCorps.User{}, %CodeCorps.StripePlatformCustomer{}, nil}`
- `{:ok, %CodeCorps.User{}, %CodeCorps.StripePlatformCustomer{}, %CodeCorps.StripeConnectCustomer{}}`
- `{:error, %Ecto.Changeset{}}`
- `{:error, :unhandled}`
"""
def update(%User{} = user, attributes) do
changeset = user |> User.update_changeset(attributes)
do_update(changeset)
end
defp do_update(%Changeset{changes: %{email: _email}} = changeset) do
multi =
Multi.new
|> Multi.update(:update_user, changeset)
|> Multi.run(:update_platform_customer, &update_platform_customer/1)
|> Multi.run(:update_connect_customers, &update_connect_customers/1)
case Repo.transaction(multi) do
{:ok, %{
update_user: user,
update_platform_customer: update_platform_customer_result,
update_connect_customers: update_connect_customers_results
}} ->
{:ok, user, update_platform_customer_result, update_connect_customers_results}
{:error, :update_user, %Ecto.Changeset{} = changeset, %{}} ->
{:error, changeset}
{:error, _failed_operation, _failed_value, _changes_so_far} ->
{:error, :unhandled}
end
end
defp do_update(%Changeset{} = changeset) do
with {:ok, user} <- Repo.update(changeset) do
{:ok, user, nil, nil}
else
{:error, changeset} -> {:error, changeset}
_ -> {:error, :unhandled}
end
end
defp update_platform_customer(%{update_user: %User{id: user_id, email: email}}) do
StripePlatformCustomer
|> Repo.get_by(user_id: user_id)
|> do_update_platform_customer(%{email: email})
end
defp do_update_platform_customer(nil, _), do: {:ok, nil}
defp do_update_platform_customer(%StripePlatformCustomer{} = stripe_platform_customer, attributes) do
{:ok, %StripePlatformCustomer{} = platform_customer, _} =
StripePlatformCustomerService.update(stripe_platform_customer, attributes)
{:ok, platform_customer}
end
defp update_connect_customers(%{update_platform_customer: nil}), do: {:ok, nil}
defp update_connect_customers(%{update_platform_customer: %StripePlatformCustomer{email: email} = stripe_platform_customer }) do
case do_update_connect_customers(stripe_platform_customer, %{email: email}) do
[_h | _t] = results -> {:ok, results}
[] -> {:ok, nil}
end
end
@spec do_update_connect_customers(StripePlatformCustomer.t, map) :: [{:ok, StripeConnectCustomer.t}] | [{:error, Stripe.Error.t}]
defp do_update_connect_customers(stripe_platform_customer, attributes) do
stripe_platform_customer
|> Repo.preload([stripe_connect_customers: :stripe_connect_account])
|> Map.get(:stripe_connect_customers)
|> Enum.map(&do_update_connect_customer(&1, attributes))
end
@spec do_update_connect_customer(StripeConnectCustomer.t, map) :: {:ok, StripeConnectCustomer.t} | {:error, Stripe.Error.t}
defp do_update_connect_customer(%StripeConnectCustomer{} = stripe_connect_customer, attributes) do
StripeConnectCustomerService.update(stripe_connect_customer, attributes)
end
end
<|start_filename|>lib/code_corps/model/organization_github_app_installation.ex<|end_filename|>
defmodule CodeCorps.OrganizationGithubAppInstallation do
use CodeCorps.Model
schema "organization_github_app_installations" do
belongs_to :github_app_installation, CodeCorps.GithubAppInstallation
belongs_to :organization, CodeCorps.Organization
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def create_changeset(struct, params \\ %{}) do
struct
|> changeset(params)
end
defp changeset(struct, params) do
struct
|> cast(params, [:github_app_installation_id, :organization_id])
|> validate_required([:github_app_installation_id, :organization_id])
|> assoc_constraint(:github_app_installation, name: "organization_github_app_installations_github_app_installation_i")
|> assoc_constraint(:organization)
end
end
<|start_filename|>lib/code_corps_web/views/project_skill_view.ex<|end_filename|>
defmodule CodeCorpsWeb.ProjectSkillView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
has_one :project, type: "project", field: :project_id
has_one :skill, type: "skill", field: :skill_id
end
<|start_filename|>lib/code_corps_web/views/layout_view.ex<|end_filename|>
defmodule CodeCorpsWeb.LayoutView do
@moduledoc false
use CodeCorpsWeb, :view
end
<|start_filename|>lib/code_corps_web/views/token_view.ex<|end_filename|>
defmodule CodeCorpsWeb.TokenView do
@moduledoc false
use CodeCorpsWeb, :view
def render("show.json", %{token: token, user_id: user_id}) do
%{
token: token,
user_id: user_id,
}
end
def render("401.json", %{message: message}) do
%{
errors: [
%{
id: "UNAUTHORIZED",
title: "401 Unauthorized",
detail: message,
status: 401,
}
]
}
end
def render("403.json", %{message: message}) do
%{
errors: [
%{
id: "FORBIDDEN",
title: "403 Forbidden",
detail: message,
status: 403,
}
]
}
end
def render("delete.json", _) do
%{ok: true}
end
end
<|start_filename|>lib/code_corps/github/api/errors/pagination_error.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.Errors.PaginationError do
alias CodeCorps.GitHub.{APIError, HTTPClientError}
@type t :: %__MODULE__{
api_errors: list,
client_errors: list,
message: String.t,
retrieved_pages: list
}
defstruct [
api_errors: [],
client_errors: [],
message: "One or more pages failed to retrieve when paginating GitHub API resources",
retrieved_pages: []
]
@spec new({list, list}) :: t
def new({pages, errors}) do
%__MODULE__{
api_errors: errors |> Enum.filter(&api_error?/1),
client_errors: errors |> Enum.filter(&client_error?/1),
retrieved_pages: pages
}
end
@spec api_error?(APIError.t | any) :: boolean
defp api_error?(%APIError{}), do: true
defp api_error?(_), do: false
@spec client_error?(HTTPClientError.t | any) :: boolean
defp client_error?(%HTTPClientError{}), do: true
defp client_error?(_), do: false
end
<|start_filename|>test/lib/code_corps/stripe_service/adapters/stripe_platform_card_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripePlatformCardTest do
use ExUnit.Case, async: true
import CodeCorps.StripeService.Adapters.StripePlatformCardAdapter, only: [to_params: 2]
@stripe_platform_card %Stripe.Card{
id: "card_123",
address_city: nil,
address_country: nil,
address_line1: nil,
address_line1_check: nil,
address_line2: nil,
address_state: nil,
address_zip: nil,
address_zip_check: nil,
brand: "Visa",
country: "US",
customer: "cus_123",
cvc_check: "unchecked",
dynamic_last4: nil,
exp_month: 11,
exp_year: 2016,
funding: "credit",
last4: "4242",
metadata: %{},
name: nil,
tokenization_method: nil
}
@local_map %{
"id_from_stripe" => "card_123",
"brand" => "Visa",
"exp_month" => 11,
"exp_year" => 2016,
"last4" => "4242",
"customer_id_from_stripe" => "cus_123",
"cvc_check" => "unchecked",
"name" => nil
}
describe "to_params/2" do
test "converts from stripe map to local properly" do
test_attributes = %{
"user_id" => 123,
"foo" => "bar"
}
expected_attributes = %{
"user_id" => 123,
}
{:ok, result} = to_params(@stripe_platform_card, test_attributes)
expected_map = Map.merge(@local_map, expected_attributes)
assert result == expected_map
end
end
end
<|start_filename|>lib/code_corps_web/controllers/stripe_platform_customer_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.StripePlatformCustomerController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.StripeService.StripePlatformCustomerService
alias CodeCorps.{StripePlatformCustomer, User}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
%StripePlatformCustomer{} = stripe_platform_customer <- StripePlatformCustomer |> Repo.get(id),
{:ok, :authorized} <- current_user |> Policy.authorize(:show, stripe_platform_customer, params) do
conn |> render("show.json-api", data: stripe_platform_customer)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %StripePlatformCustomer{}, params),
{:ok, %StripePlatformCustomer{} = stripe_platform_customer} <- StripePlatformCustomerService.create(params) do
conn |> put_status(:created) |> render("show.json-api", data: stripe_platform_customer)
end
end
end
<|start_filename|>test/lib/code_corps_web/views/task_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.TaskViewTest do
@moduledoc false
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request)
github_repo = insert(:github_repo)
task = insert(:task, order: 1000, github_issue: github_issue, github_pull_request: github_pull_request, github_repo: github_repo)
comment = insert(:comment, task: task)
task_skill = insert(:task_skill, task: task)
user_task = insert(:user_task, task: task)
task = CodeCorpsWeb.TaskController.preload(task)
rendered_json = render(CodeCorpsWeb.TaskView, "show.json-api", data: task)
expected_json = %{
"data" => %{
"attributes" => %{
"archived" => task.archived,
"body" => task.body,
"created-at" => task.created_at,
"created-from" => task.created_from,
"has-github-pull-request" => true,
"inserted-at" => task.inserted_at,
"markdown" => task.markdown,
"modified-at" => task.modified_at,
"modified-from" => task.modified_from,
"number" => task.number,
"order" => task.order,
"overall-status" => "open",
"status" => task.status,
"title" => task.title,
"updated-at" => task.updated_at
},
"id" => task.id |> Integer.to_string,
"relationships" => %{
"comments" => %{
"data" => [
%{
"id" => comment.id |> Integer.to_string,
"type" => "comment"
}
]
},
"project" => %{
"data" => %{
"id" => task.project_id |> Integer.to_string,
"type" => "project"
}
},
"github-issue" => %{
"data" => %{
"id" => task.github_issue_id |> Integer.to_string,
"type" => "github-issue"
}
},
"github-pull-request" => %{
"data" => %{
"id" => task.github_issue.github_pull_request_id |> Integer.to_string,
"type" => "github-pull-request"
}
},
"github-repo" => %{
"data" => %{
"id" => task.github_repo_id |> Integer.to_string,
"type" => "github-repo"
}
},
"task-skills" => %{
"data" => [
%{
"id" => task_skill.id |> Integer.to_string,
"type" => "task-skill"
}
]
},
"user" => %{
"data" => %{
"id" => task.user_id |> Integer.to_string,
"type" => "user"
}
},
"user-task" => %{
"data" => %{
"id" => user_task.id |> Integer.to_string,
"type" => "user-task"
}
},
"task-list" => %{
"data" => %{
"id" => task.task_list_id |> Integer.to_string,
"type" => "task-list"
}
}
},
"type" => "task",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
describe "has-github-pull-request" do
test "when pull request exists" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request)
task = insert(:task, github_issue: github_issue)
task = CodeCorpsWeb.TaskController.preload(task)
rendered_json = render(CodeCorpsWeb.TaskView, "show.json-api", data: task)
assert rendered_json["data"]["attributes"]["has-github-pull-request"]
end
test "when no pull request exists" do
task = insert(:task)
task = CodeCorpsWeb.TaskController.preload(task)
rendered_json = render(CodeCorpsWeb.TaskView, "show.json-api", data: task)
refute rendered_json["data"]["attributes"]["has-github-pull-request"]
end
end
describe "overall-status" do
test "when pull request is open" do
github_pull_request = insert(:github_pull_request, merged: false, state: "open")
github_issue = insert(:github_issue, github_pull_request: github_pull_request)
task = insert(:task, github_issue: github_issue)
task = CodeCorpsWeb.TaskController.preload(task)
rendered_json = render(CodeCorpsWeb.TaskView, "show.json-api", data: task)
assert rendered_json["data"]["attributes"]["overall-status"] == "open"
end
test "when pull request is closed" do
github_pull_request = insert(:github_pull_request, merged: false, state: "closed")
github_issue = insert(:github_issue, github_pull_request: github_pull_request)
task = insert(:task, github_issue: github_issue)
task = CodeCorpsWeb.TaskController.preload(task)
rendered_json = render(CodeCorpsWeb.TaskView, "show.json-api", data: task)
assert rendered_json["data"]["attributes"]["overall-status"] == "closed"
end
test "when pull request is merged" do
github_pull_request = insert(:github_pull_request, merged: false, state: "merged")
github_issue = insert(:github_issue, github_pull_request: github_pull_request)
task = insert(:task, github_issue: github_issue)
task = CodeCorpsWeb.TaskController.preload(task)
rendered_json = render(CodeCorpsWeb.TaskView, "show.json-api", data: task)
assert rendered_json["data"]["attributes"]["overall-status"] == "merged"
end
test "when task is open" do
task = insert(:task, status: "open")
task = CodeCorpsWeb.TaskController.preload(task)
rendered_json = render(CodeCorpsWeb.TaskView, "show.json-api", data: task)
assert rendered_json["data"]["attributes"]["overall-status"] == "open"
end
test "when task is closed" do
task = insert(:task, status: "closed")
task = CodeCorpsWeb.TaskController.preload(task)
rendered_json = render(CodeCorpsWeb.TaskView, "show.json-api", data: task)
assert rendered_json["data"]["attributes"]["overall-status"] == "closed"
end
end
end
<|start_filename|>priv/repo/migrations/20171012215106_create_github_issues.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.CreateGithubIssues do
use Ecto.Migration
def change do
create table(:github_issues) do
add :body, :text
add :closed_at, :utc_datetime
add :comments_url, :text
add :events_url, :text
add :github_created_at, :utc_datetime
add :github_id, :integer
add :github_updated_at, :utc_datetime
add :html_url, :text
add :labels_url, :text
add :locked, :boolean
add :number, :integer
add :state, :string
add :title, :text
add :url, :text
timestamps()
add :github_repo_id, references(:github_repos)
end
end
end
<|start_filename|>test/lib/code_corps_web/views/github_app_installation_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.GithubAppInstallationViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
organization = insert(:organization)
project = insert(:project)
user = insert(:user)
github_app_installation = insert(:github_app_installation, project: project, user: user)
organization_github_app_installation = insert(:organization_github_app_installation, github_app_installation: github_app_installation, organization: organization)
github_repo = insert(:github_repo, github_app_installation: github_app_installation)
github_app_installation =
github_app_installation
|> CodeCorpsWeb.GithubAppInstallationController.preload()
rendered_json =
CodeCorpsWeb.GithubAppInstallationView
|> render("show.json-api", data: github_app_installation)
expected_json = %{
"data" => %{
"id" => github_app_installation.id |> Integer.to_string,
"type" => "github-app-installation",
"attributes" => %{
"github-id" => github_app_installation.github_id,
"github-account-avatar-url" => github_app_installation.github_account_avatar_url,
"github-account-id" => github_app_installation.github_account_id,
"github-account-login" => github_app_installation.github_account_login,
"github-account-type" => github_app_installation.github_account_type,
"inserted-at" => github_app_installation.inserted_at,
"installed" => github_app_installation.installed,
"state" => github_app_installation.state,
"updated-at" => github_app_installation.updated_at
},
"relationships" => %{
"github-repos" => %{
"data" => [
%{"id" => github_repo.id |> Integer.to_string, "type" => "github-repo"}
]
},
"organization-github-app-installations" => %{
"data" => [
%{"id" => organization_github_app_installation.id |> Integer.to_string, "type" => "organization-github-app-installation"}
]
},
"project" => %{
"data" => %{"id" => github_app_installation.project_id |> Integer.to_string, "type" => "project"}
},
"user" => %{
"data" => %{"id" => github_app_installation.user_id |> Integer.to_string, "type" => "user"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>priv/repo/migrations/20171104013543_add_indexes_for_syncing.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddIndexesForSyncing do
use Ecto.Migration
def change do
create index(:tasks, [:github_issue_id, :project_id])
end
end
<|start_filename|>test/lib/code_corps/model/github_comment_test.exs<|end_filename|>
defmodule CodeCorps.GithubCommentTest do
use CodeCorps.ModelCase
alias CodeCorps.GithubComment
@valid_attrs %{
body: "I'm having a problem with this.",
github_created_at: "2011-04-22T13:33:48Z",
github_id: 1,
github_updated_at: "2014-03-03T18:58:10Z",
html_url: "https://github.com/octocat/Hello-World/issues/1347",
url: "https://api.github.com/repos/octocat/Hello-World/issues/1347",
}
@invalid_attrs %{}
test "create_changeset/2 with valid attributes" do
changeset = GithubComment.create_changeset(%GithubComment{}, @valid_attrs)
assert changeset.valid?
end
test "create_changeset/2 with invalid attributes" do
changeset = GithubComment.create_changeset(%GithubComment{}, @invalid_attrs)
refute changeset.valid?
end
end
<|start_filename|>lib/code_corps/github/api/repository.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.Repository do
@moduledoc ~S"""
Functions for retrieving a GitHub repository's issues, pull requests, and
comments from the GitHub API.
"""
alias CodeCorps.{
GitHub,
GitHub.API,
GithubAppInstallation,
GithubRepo,
}
@doc ~S"""
Retrieves issues for a repository
All pages of records are retrieved.
Closed issues are included.
"""
@spec issues(GithubRepo.t) :: {:ok, list(map)} | {:error, GitHub.paginated_endpoint_error}
def issues(%GithubRepo{
github_app_installation: %GithubAppInstallation{
github_account_login: owner
} = installation,
name: repo
}) do
with {:ok, access_token} <- API.Installation.get_access_token(installation) do
"repos/#{owner}/#{repo}/issues"
|> GitHub.get_all(%{}, [access_token: access_token, params: [per_page: 100, state: "all"]])
else
{:error, error} -> {:error, error}
end
end
@doc ~S"""
Retrieves pull requests for a repository.
All pages of records are retrieved.
"""
@spec pulls(GithubRepo.t) :: {:ok, list(map)} | {:error, GitHub.paginated_endpoint_error}
def pulls(%GithubRepo{
github_app_installation: %GithubAppInstallation{
github_account_login: owner
} = installation,
name: repo
}) do
with {:ok, access_token} <- API.Installation.get_access_token(installation) do
"repos/#{owner}/#{repo}/pulls"
|> GitHub.get_all(%{}, [access_token: access_token, params: [per_page: 100, state: "all"]])
else
{:error, error} -> {:error, error}
end
end
@doc ~S"""
Retrieves comments from all issues in a github repository.
"""
@spec issue_comments(GithubRepo.t) :: {:ok, list(map)} | {:error, GitHub.paginated_endpoint_error}
def issue_comments(%GithubRepo{
github_app_installation: %GithubAppInstallation{
github_account_login: owner
} = installation,
name: repo
}) do
with {:ok, access_token} <- API.Installation.get_access_token(installation) do
"repos/#{owner}/#{repo}/issues/comments"
|> GitHub.get_all(%{}, [access_token: access_token, params: [per_page: 100]])
else
{:error, error} -> {:error, error}
end
end
end
<|start_filename|>lib/code_corps/accounts/users.ex<|end_filename|>
defmodule CodeCorps.Accounts.Users do
alias CodeCorps.ProjectUser
import Ecto.Query
def project_filter(query, %{"project_id" => project_id}) do
from user in query,
join: pu in ProjectUser, on: pu.user_id == user.id and pu.project_id == ^project_id
end
def project_filter(query, _), do: query
end
<|start_filename|>test/lib/code_corps/stripe_service/events/connect_charge_succeeded_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Events.ConnectChargeSucceededTest do
@moduledoc false
use CodeCorps.StripeCase
use Bamboo.Test
alias CodeCorps.{
Project, Repo, StripeConnectCharge, StripeTesting
}
alias CodeCorps.StripeService.Events.ConnectChargeSucceeded
test "handling event creates charge and sends receipt" do
account = insert(:stripe_connect_account)
charge_fixture = StripeTesting.Helpers.load_fixture("charge")
insert(:stripe_connect_customer, id_from_stripe: charge_fixture.customer)
invoice_fixture = StripeTesting.Helpers.load_fixture(charge_fixture.invoice)
insert(:stripe_connect_subscription, id_from_stripe: invoice_fixture.subscription)
project = Repo.one(Project)
insert(:donation_goal, current: true, project: project)
event = %Stripe.Event{
data: %{object: charge_fixture},
user_id: account.id_from_stripe
}
Bamboo.SentEmail.start_link()
assert {
:ok,
%StripeConnectCharge{} = charge,
%Bamboo.Email{} = email
} = ConnectChargeSucceeded.handle(event)
# assert email was sent
assert_delivered_email email
# assert event was tracked by Segment
user_id = charge.user_id
charge_id = charge.id
currency = String.capitalize(charge.currency) # Segment requires this in ISO 4127 format
amount = charge.amount / 100
assert_received {
:track,
^user_id,
"Created Stripe Connect Charge",
%{charge_id: ^charge_id, currency: ^currency, revenue: ^amount, user_id: ^user_id}
}
end
end
<|start_filename|>lib/code_corps/policy/github_repo.ex<|end_filename|>
defmodule CodeCorps.Policy.GithubRepo do
@moduledoc """
Handles `User` authorization of actions on `GithubRepo` records
"""
import CodeCorps.Policy.Helpers, only: [get_project: 1, administered_by?: 2]
alias CodeCorps.{GithubRepo, User}
def update?(%User{} = user, %GithubRepo{project_id: nil}, %{"project_id" => _} = params) do
params |> get_project |> administered_by?(user)
end
def update?(%User{} = user, %GithubRepo{} = github_repo, %{}) do
github_repo |> get_project |> administered_by?(user)
end
end
<|start_filename|>test/lib/code_corps/validators/time_validator_test.exs<|end_filename|>
defmodule CodeCorps.Validators.TimeValidatorTest do
use ExUnit.Case, async: true
import CodeCorps.Validators.TimeValidator
@previous_time DateTime.utc_now
describe "validate_time_not_before/2" do
test "when the time happened before" do
# set the time to 1 day before the previous (recorded) time
new_time = @previous_time |> Timex.shift(days: -1)
changeset = cast_times(@previous_time, new_time, :modified_at)
changeset = changeset |> validate_time_not_before(:modified_at)
refute changeset.valid?
end
test "when the time happened at the same time" do
new_time = @previous_time
changeset = cast_times(@previous_time, new_time, :modified_at)
changeset = changeset |> validate_time_not_before(:modified_at)
assert changeset.valid?
end
test "when the time happened at the same second but with microseconds of difference" do
previous_time = @previous_time |> Timex.shift(milliseconds: 500)
new_time = previous_time |> truncate(:second)
changeset = cast_times(previous_time, new_time, :modified_at)
changeset = changeset |> validate_time_not_before(:modified_at)
assert changeset.valid?
end
test "when the time happened after" do
# set the time to 1 day after the previous (recorded) time
new_time = @previous_time |> Timex.shift(days: 1)
changeset = cast_times(@previous_time, new_time, :modified_at)
changeset = changeset |> validate_time_not_before(:modified_at)
assert changeset.valid?
end
end
defp cast_times(previous_time, new_time, field) do
data = Map.put(%{}, field, previous_time)
fields = Map.put(%{}, field, :utc_datetime)
params = Map.put(%{}, field, new_time)
Ecto.Changeset.cast({data, fields}, params, [field])
end
end
<|start_filename|>priv/repo/migrations/20171028173508_update_null_comment_values_from_github_integrations.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.UpdateNullCommentValuesFromGithubIntegrations do
use Ecto.Migration
@consecutive_whitespace_regex ~r/\s+/
def up do
execute created_at_update()
execute created_from_update()
execute modified_at_update()
execute modified_from_update()
end
def down do
# no-op
end
defp created_at_update do
squish(
"""
UPDATE comments
SET created_at = inserted_at
WHERE created_at IS NULL
"""
)
end
defp created_from_update do
squish(
"""
UPDATE comments
SET created_from = 'code_corps'
WHERE created_from IS NULL
"""
)
end
defp modified_at_update do
squish(
"""
UPDATE comments
SET modified_at = updated_at
WHERE modified_at IS NULL
"""
)
end
defp modified_from_update do
squish(
"""
UPDATE comments
SET modified_from = 'code_corps'
WHERE modified_from IS NULL
"""
)
end
defp squish(query) do
String.replace(query, @consecutive_whitespace_regex, " ") |> String.trim
end
end
<|start_filename|>lib/code_corps/github/adapters/utils/body_decorator.ex<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.Utils.BodyDecorator do
@moduledoc ~S"""
Decorates and undecorates the body of GitHub issues and comments as needed.
"""
alias CodeCorps.{
Comment,
GithubIssue,
Task,
User,
WebClient
}
@separator "\r\n\r\n[//]: # (Please type your edits below this line)\r\n\r\n---"
@linebreak "\r\n\r\n"
@spec add_code_corps_header(map, Comment.t | Task.t | GithubIssue.t) :: map
def add_code_corps_header(%{"body" => body} = attrs, %Comment{user: %User{github_id: nil}} = comment) do
modified_body = build_header(comment) <> @separator <> @linebreak <> body
attrs |> Map.put("body", modified_body)
end
def add_code_corps_header(%{"body" => body} = attrs, %Task{user: %User{github_id: nil}} = task) do
modified_body = build_header(task) <> @separator <> @linebreak <> body
attrs |> Map.put("body", modified_body)
end
def add_code_corps_header(%{} = attrs, _), do: attrs
@spec build_header(Comment.t | Task.t) :: String.t
defp build_header(%Comment{task: %Task{} = task, user: %User{} = user}), do: do_build_header(task, user)
defp build_header(%Task{user: %User{} = user} = task), do: do_build_header(task, user)
@spec do_build_header(Task.t, User.t) :: String.t
defp do_build_header(%Task{} = task, %User{username: username} = user) do
"Posted by [**#{username}**](#{user |> WebClient.url}) from [Code Corps](#{task |> WebClient.url})"
end
@spec remove_code_corps_header(map) :: map
def remove_code_corps_header(%{body: _} = attrs) do
attrs |> Map.update(:body, nil, &clean_body/1)
end
@spec clean_body(String.t | nil) :: String.t | nil
defp clean_body("Posted by " <> @separator <> _rest = body) do
body
|> String.split(@separator)
|> Enum.drop(1) |> Enum.join
|> String.trim_leading
end
defp clean_body(body), do: body
end
<|start_filename|>test/lib/code_corps/emails/organization_invite_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.OrganizationInviteEmailTest do
use CodeCorps.ModelCase
use Bamboo.Test
alias CodeCorps.{Emails.OrganizationInviteEmail, WebClient}
test "organization email invite works" do
invite = insert(:organization_invite)
email = OrganizationInviteEmail.create(invite)
assert email.from == "Code Corps<<EMAIL>>"
assert email.to == invite.email
template_model = email.private.template_model
params =
%{code: invite.code, organization_name: invite.organization_name}
|> URI.encode_query
invite_url = "#{WebClient.url()}/organizations/new?#{params}"
assert template_model == %{
invite_url: invite_url,
organization_name: invite.organization_name,
subject: "Create your first project on Code Corps"
}
end
end
<|start_filename|>lib/code_corps/github/webhook/event_support.ex<|end_filename|>
defmodule CodeCorps.GitHub.Webhook.EventSupport do
@moduledoc """
Determines event support for a GitHub event type
"""
@type support_status :: :supported | :unsupported | :ignored
@supported_events [
{"installation", "created"},
{"installation_repositories", "added"},
{"installation_repositories", "removed"},
{"issue_comment", "created"},
{"issue_comment", "edited"},
{"issue_comment", "deleted"},
{"issues", "opened"},
{"issues", "edited"},
{"issues", "closed"},
{"issues", "reopened"},
{"pull_request", "opened"},
{"pull_request", "edited"},
{"pull_request", "closed"},
{"pull_request", "reopened"},
]
@doc ~S"""
Utility function. Returns list of supported events as `{type, action}` tuples.
Supported events are events of types and actions we currently fully support.
"""
@spec supported_events :: list(tuple)
def supported_events, do: @supported_events
@unsupported_events [
{"installation", "deleted"},
{"issues", "assigned"},
{"issues", "unassigned"},
{"issues", "labeled"},
{"issues", "unlabeled"},
{"issues", "milestoned"},
{"issues", "demilestoned"},
{"pull_request", "assigned"},
{"pull_request", "unassigned"},
{"pull_request", "review_requested"},
{"pull_request", "review_request_removed"},
{"pull_request", "labeled"},
{"pull_request", "unlabeled"},
{"pull_request", "synchronize"},
]
@doc ~S"""
Utility function. Returns list of unsupported events as `{type, action}`
tuples.
Unsupported events are events of types we technically support, but actions we
do not yet implement the handling of.
"""
@spec unsupported_events :: list(tuple)
def unsupported_events, do: @unsupported_events
@doc ~S"""
Returns `:handled` if the GitHub event/action is being handled by the system,
`:ignored` otherwise.
"""
@spec status(String.t, String.t) :: support_status
def status(type, action) when {type, action} in @supported_events, do: :supported
def status(type, action) when {type, action} in @unsupported_events, do: :unsupported
def status(_type, _action), do: :ignored
end
<|start_filename|>lib/code_corps_web/views/preview_view.ex<|end_filename|>
defmodule CodeCorpsWeb.PreviewView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:markdown, :body, :inserted_at, :updated_at]
has_one :user, type: "user", field: :user_id
end
<|start_filename|>lib/code_corps/github/adapters/pull_request.ex<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.PullRequest do
@mapping [
{:additions, ["additions"]},
{:body, ["body"]},
{:changed_files, ["changed_files"]},
{:closed_at, ["closed_at"]},
{:comments, ["comments"]},
{:comments_url, ["comments_url"]},
{:commits, ["commits"]},
{:commits_url, ["commits_url"]},
{:deletions, ["deletions"]},
{:diff_url, ["diff_url"]},
{:github_created_at, ["created_at"]},
{:github_id, ["id"]},
{:github_updated_at, ["updated_at"]},
{:html_url, ["html_url"]},
{:issue_url, ["issue_url"]},
{:locked, ["locked"]},
{:merge_commit_sha, ["merge_commit_sha"]},
{:mergeable_state, ["mergeable_state"]},
{:merged, ["merged"]},
{:merged_at, ["merged_at"]},
{:number, ["number"]},
{:patch_url, ["patch_url"]},
{:review_comment_url, ["review_comment_url"]},
{:review_comments, ["review_comments"]},
{:review_comments_url, ["review_comments_url"]},
{:state, ["state"]},
{:statuses_url, ["statuses_url"]},
{:title, ["title"]},
{:url, ["url"]}
]
@spec from_api(map) :: map
def from_api(%{} = payload) do
payload |> CodeCorps.Adapter.MapTransformer.transform(@mapping)
end
end
<|start_filename|>lib/code_corps/model/stripe_event.ex<|end_filename|>
defmodule CodeCorps.StripeEvent do
@moduledoc """
Represents a reference to single Stripe API Event object
## Fields
* `endpoint` - "connect" or "platform"
* `id_from_stripe` - Stripe's `id`
* `status` - "unprocessed", "processed", or "errored"
## Note on `status`
When the event is received via a webhook, it is stored as "unprocessed".
If during processing, there is an issue, it is set to "errored". Once
successfuly processed, it is set to "processed".
There are cases where Stripe can send multiple webhooks for the same event,
so when such a request is received, an event that is "errored" or "unprocessed"
can be processed again, while a "processed" event is ignored.
"""
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "stripe_events" do
field :endpoint, :string, null: false
field :id_from_stripe, :string, null: false
field :ignored_reason, :string
field :object_id, :string
field :object_type, :string
field :status, :string, default: "unprocessed"
field :type, :string, null: false
field :user_id, :string
timestamps()
end
@doc """
Builds a changeset for storing a new event reference into the database.
The `status` field is set to "unprocessed" by default.
"""
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:endpoint, :id_from_stripe, :object_id, :object_type, :type, :user_id])
|> validate_required([:endpoint, :id_from_stripe, :object_id, :object_type, :type])
|> put_change(:status, "processing")
|> validate_inclusion(:status, states())
|> validate_inclusion(:endpoint, endpoints())
|> unique_constraint(:id_from_stripe)
end
@doc """
Builds a changeset for updating the status of an existing event reference.
Accepts `:status` only and ensures it's one of "unprocessed", "processed"
or "errored".
"""
def update_changeset(struct, params) do
struct
|> cast(params, [:ignored_reason, :status])
|> validate_required([:status])
|> validate_inclusion(:status, states())
end
defp endpoints do
~w{ connect platform }
end
defp states do
~w{ errored ignored processed processing unhandled unprocessed }
end
end
<|start_filename|>lib/code_corps/github/sync/github_app_installation/github_app_installation.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubAppInstallation do
import Ecto.Query
alias CodeCorps.{GithubAppInstallation, GitHub.Sync, Repo, User}
alias Ecto.Changeset
@type commit_result ::
{:ok, GithubAppInstallation.t()} | {:error, Changeset.t()}
@type outcome ::
commit_result() | {:error, :multiple_unprocessed_installations_found}
@doc ~S"""
Tries to sync a GithubAppInstallation record using a Github API payload.
The process is branching.
If the installation can be matched by github id, then it is simply updated.
If the installation cannot be matched by github id, then the system tries to
locate the user, through the sender github id.
If that fails, an unassociated installation is created.
If a user is found, the system looks for a "skeleton" installation. This is
a `CodeCorps.GithubAppInstallation` record which was created from the project
integration page, but the webhook for the next step, which is actually
performing the installation on Github.com, has not yet been done.
This "skeleton" record is associated to a project and a user, but does
not have any github data yet.
In this case, the system assumes a single "skeleton" installation. If multiple
are found, an error tuple is returned.
If an installation is matched this way, it gets updated.
Finally, if no installation has been matched in this alternative way, an
installation associated to a user, but not associated to a project gets
created.
"""
@spec sync(map) :: outcome()
def sync(%{} = payload) do
case payload |> find_installation() do
%GithubAppInstallation{} = installation ->
installation |> update_installation(payload)
nil ->
payload |> sync_unmatched(payload |> find_user())
end
end
@spec sync_unmatched(map, User.t() | nil) ::
commit_result() | {:error, :multiple_unprocessed_installations_found}
defp sync_unmatched(%{} = payload, nil) do
payload |> create_installation()
end
defp sync_unmatched(%{} = payload, %User{} = user) do
case user |> find_unprocessed_installations() do
[] ->
create_installation(payload, user)
[%GithubAppInstallation{} = installation] ->
update_installation(installation, payload)
[_|_] ->
{:error, :multiple_unprocessed_installations_found}
end
end
@spec find_user(map) :: User.t() | nil
defp find_user(%{"sender" => %{"id" => github_id}}) do
Repo.get_by(User, github_id: github_id)
end
@spec find_installation(any) :: GithubAppInstallation.t() | nil
defp find_installation(%{"installation" => %{"id" => github_id}}) do
GithubAppInstallation |> Repo.get_by(github_id: github_id)
end
@spec find_unprocessed_installations(User.t()) ::
list(GithubAppInstallation.t())
defp find_unprocessed_installations(%User{id: user_id}) do
GithubAppInstallation
|> where([i], is_nil(i.github_id) and i.user_id == ^user_id)
|> Repo.all()
end
@spec create_installation(map, User.t() | nil) :: commit_result()
defp create_installation(%{} = payload, user \\ nil) do
payload
|> Sync.GithubAppInstallation.Changeset.create_changeset(user)
|> Repo.insert()
end
@spec update_installation(GithubAppInstallation.t, map) :: commit_result()
defp update_installation(%GithubAppInstallation{} = installation, %{} = payload) do
installation
|> Sync.GithubAppInstallation.Changeset.update_changeset(payload)
|> Repo.update()
end
end
<|start_filename|>test/lib/code_corps_web/controllers/task_list_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.TaskListControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :task_list
describe "index" do
test "lists all entries", %{conn: conn} do
[task_list_1, task_list_2] = insert_pair(:task_list)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([task_list_1.id, task_list_2.id])
end
test "lists all entries by order", %{conn: conn} do
# Has to be done manually. Inserting as a list is too quick.
# Field lacks the resolution to differentiate.
project = insert(:project)
task_list_1 = insert(:task_list, project: project, order: 2000)
task_list_2 = insert(:task_list, project: project, order: 1000)
task_list_3 = insert(:task_list, project: project, order: 3000)
path = conn |> task_list_path(:index)
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([task_list_2.id, task_list_1.id, task_list_3.id])
end
test "lists all task lists for a project", %{conn: conn} do
project_1 = insert(:project)
project_2 = insert(:project)
insert(:task_list, project: project_1)
insert(:task_list, project: project_1)
insert(:task_list, project: project_2)
json =
conn
|> get("projects/#{project_1.id}/task-lists")
|> json_response(200)
assert json["data"] |> Enum.count == 2
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
task_list = insert(:task_list)
conn
|> request_show(task_list)
|> json_response(200)
|> assert_id_from_response(task_list.id)
end
test "shows task list by id for project", %{conn: conn} do
task_list = insert(:task_list)
path = conn |> project_task_list_path(:show, task_list.project_id, task_list.id)
data = conn |> get(path) |> json_response(200)
assert data["data"]["id"] == "#{task_list.id}"
assert data["data"]["type"] == "task-list"
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
end
<|start_filename|>priv/repo/migrations/20180113002017_normalize_organization_user_type.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.NormalizeOrganizationUserType do
use Ecto.Migration
def up do
execute(
"""
UPDATE github_users
SET type = 'organization'
WHERE type = 'Organization'
"""
)
end
def down do
execute(
"""
UPDATE github_users
SET type = 'Organization'
WHERE type = 'organization'
"""
)
end
end
<|start_filename|>lib/code_corps/github/sync/github_issue/github_issue.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubIssue do
@moduledoc ~S"""
In charge of finding or creating a `CodeCorps.GithubIssue` to link with a
`CodeCorps.Task` when processing a GitHub Issue payload.
The only entry point is `create_or_update_issue/2`.
"""
alias CodeCorps.{
GitHub.Adapters,
GitHub.Sync,
GithubIssue,
GithubPullRequest,
GithubRepo,
GithubUser,
Repo
}
alias Ecto.Changeset
@type result :: {:ok, GithubIssue.t} | {:error, Changeset.t}
@doc ~S"""
Creates or updates a `CodeCorps.GithubIssue` from a GitHub issue API payload.
The created record is associated to the provided `CodeCorps.GithubRepo` and,
optionally, to a provided `CodeCorps.GithubPullRequest`.
The created record is also associated with a matched `CodeCorps.GithubUser`,
which is created if necessary.
"""
@spec create_or_update_issue(map, GithubRepo.t, GithubPullRequest.t | nil) :: result
def create_or_update_issue(%{} = payload, %GithubRepo{} = github_repo, github_pull_request \\ nil) do
with {:ok, %GithubUser{} = github_user} <- Sync.GithubUser.create_or_update_github_user(payload) do
payload
|> find_or_init()
|> GithubIssue.changeset(payload |> Adapters.Issue.to_issue())
|> Changeset.put_assoc(:github_user, github_user)
|> Changeset.put_assoc(:github_repo, github_repo)
|> maybe_put_github_pull_request(github_pull_request)
|> Repo.insert_or_update()
else
{:error, %Changeset{} = changeset} -> {:error, changeset}
end
end
@spec maybe_put_github_pull_request(Changeset.t, GithubPullRequest.t | nil) :: Changeset.t
defp maybe_put_github_pull_request(%Changeset{} = changeset, %GithubPullRequest{} = github_pull_request) do
changeset |> Changeset.put_assoc(:github_pull_request, github_pull_request)
end
defp maybe_put_github_pull_request(%Changeset{} = changeset, nil) do
changeset
end
@spec find_or_init(map) :: GithubIssue.t
defp find_or_init(%{"id" => github_id}) do
case GithubIssue |> Repo.get_by(github_id: github_id) |> Repo.preload([:github_user, :github_repo, :github_pull_request]) do
nil -> %GithubIssue{}
%GithubIssue{} = github_issue -> github_issue
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/role_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.RoleControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :role
@valid_attrs %{ability: "Backend Development", kind: "technology", name: "Backend Developer"}
@invalid_attrs %{ability: "Juggling", kind: "circus", name: "Juggler"}
describe "index" do
test "lists all entries on index", %{conn: conn} do
[role_1, role_2] = insert_pair(:role)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([role_1.id, role_2.id])
end
end
describe "create" do
@tag authenticated: :admin
test "creates and renders resource when data is valid", %{conn: conn} do
assert conn |> request_create(@valid_attrs) |> json_response(201)
end
@tag authenticated: :admin
test "renders 422 when data is invalid", %{conn: conn} do
assert conn |> request_create(@invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "enders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
end
<|start_filename|>test/lib/code_corps/github/sync/github_pull_request/github_pull_request_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubPullRequestTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GitHub.Sync,
GitHub.Adapters,
GithubPullRequest,
Repo
}
@payload load_event_fixture("pull_request_opened")
describe "create_or_update_pull_request/1" do
test "creates pull request if none exists" do
%{"pull_request" => attrs} = @payload
github_repo = insert(:github_repo)
{:ok, %GithubPullRequest{} = created_pull_request} =
Sync.GithubPullRequest.create_or_update_pull_request(attrs, github_repo)
assert Repo.one(GithubPullRequest)
created_attributes =
attrs
|> Adapters.PullRequest.from_api()
|> Map.delete(:closed_at)
|> Map.delete(:merge_commit_sha)
|> Map.delete(:merged_at)
returned_pull_request = Repo.get_by(GithubPullRequest, created_attributes)
assert returned_pull_request.id == created_pull_request.id
assert returned_pull_request.github_repo_id == github_repo.id
end
test "updates pull request if it already exists" do
%{"pull_request" => %{"id" => pull_request_id} = attrs} = @payload
github_repo = insert(:github_repo)
pull_request = insert(:github_pull_request, github_id: pull_request_id, github_repo: github_repo)
{:ok, %GithubPullRequest{} = updated_pull_request} =
Sync.GithubPullRequest.create_or_update_pull_request(attrs, github_repo)
assert updated_pull_request.id == pull_request.id
assert updated_pull_request.github_repo_id == github_repo.id
end
test "returns changeset if payload is somehow not as expected" do
bad_payload = @payload |> put_in(["pull_request", "number"], nil)
%{"pull_request" => attrs} = bad_payload
github_repo = insert(:github_repo)
{:error, changeset} =
Sync.GithubPullRequest.create_or_update_pull_request(attrs, github_repo)
refute changeset.valid?
end
end
end
<|start_filename|>priv/repo/migrations/20171121144138_change_managed_to_type_on_stripe_connect_account.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.ChangeManagedToTypeOnStripeConnectAccount do
use Ecto.Migration
import Ecto.Query
alias CodeCorps.Repo
def up do
alter table(:stripe_connect_accounts) do
add :type, :string, null: false, default: "custom"
end
flush()
from(
a in "stripe_connect_accounts",
where: [managed: false],
update: [set: [type: "standard"]]
) |> Repo.update_all([])
from(
a in "stripe_connect_accounts",
where: [managed: true],
update: [set: [type: "custom"]]
) |> Repo.update_all([])
flush()
alter table(:stripe_connect_accounts) do
remove :managed
end
end
def down do
alter table(:stripe_connect_accounts) do
add :managed, :boolean, default: true, null: false
end
flush()
from(
a in "stripe_connect_accounts",
where: [type: "standard"],
update: [set: [managed: false]]
) |> Repo.update_all([])
from(
a in "stripe_connect_accounts",
where: [type: "express"],
update: [set: [managed: true]]
) |> Repo.update_all([])
from(
a in "stripe_connect_accounts",
where: [type: "custom"],
update: [set: [managed: true]]
) |> Repo.update_all([])
flush()
alter table(:stripe_connect_accounts) do
remove :type
end
end
end
<|start_filename|>lib/code_corps_web/views/error_view.ex<|end_filename|>
defmodule CodeCorpsWeb.ErrorView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
def render("404.json-api", _assigns) do
%{
title: "404 Not Found",
detail: "404 Not Found",
status: "404"
}
|> JaSerializer.ErrorSerializer.format
end
def render("500.json-api", _assigns) do
%{
title: "500 Internal Server Error",
detail: "500 Internal Server Error",
status: "500"
}
|> JaSerializer.ErrorSerializer.format
end
def render("github-error.json", %{message: message}) do
%{
title: "GitHub API error",
detail: message,
status: "500"
}
|> JaSerializer.ErrorSerializer.format
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.json-api", assigns
end
end
<|start_filename|>lib/code_corps/model/stripe_file_upload.ex<|end_filename|>
defmodule CodeCorps.StripeFileUpload do
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "stripe_file_uploads" do
field :created, :integer
field :id_from_stripe, :string, null: false
field :purpose, :string
field :size, :integer
field :type, :string
field :url, :string
belongs_to :stripe_connect_account, CodeCorps.StripeConnectAccount
timestamps()
end
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:created, :id_from_stripe, :purpose, :size, :type, :url, :stripe_connect_account_id])
|> validate_required([:id_from_stripe])
|> assoc_constraint(:stripe_connect_account)
end
end
<|start_filename|>test/lib/code_corps/github/event/installation_repositories/validator_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.InstallationRepositories.ValidatorTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Event.InstallationRepositories.Validator
describe "valid?/1" do
test "returns true for any Issues event fixture" do
assert "installation_repositories_added" |> load_event_fixture() |> Validator.valid?
assert "installation_repositories_removed" |> load_event_fixture() |> Validator.valid?
end
test "returns false for an unsupported structure" do
refute Validator.valid?("foo")
refute Validator.valid?(%{"action" => "foo", "foo" => "bar"})
refute Validator.valid?(%{"action" => "foo", "installation" => %{"bar" => "baz"}})
refute Validator.valid?(%{"action" => "added", "installation" => %{"id" => "foo"}, "repositories_added" => [%{"id" => "foo"}]})
refute Validator.valid?(%{"action" => "removed", "installation" => %{"id" => "foo"}, "repositories_removed" => [%{"id" => "ba"}]})
refute Validator.valid?(%{"action" => "added", "installation" => %{"id" => "foo"}, "repositories_added" => ["foo"]})
refute Validator.valid?(%{"action" => "removed", "installation" => %{"id" => "foo"}, "repositories_removed" => ["bar"]})
refute Validator.valid?(%{"action" => "added", "installation" => %{"id" => "foo"}, "repositories_added" => "foo"})
refute Validator.valid?(%{"action" => "removed", "installation" => %{"id" => "foo"}, "repositories_removed" => "bar"})
end
end
end
<|start_filename|>lib/code_corps/github/sync/task/task.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Task do
alias CodeCorps.{
GitHub.Sync,
GitHub.Utils.ResultAggregator,
GithubIssue,
GithubRepo,
Task,
User,
Repo
}
alias Ecto.Changeset
@type commit_result_aggregate ::
{:ok, list(Task.t())} | {:error, {list(Task.t()), list(Changeset.t())}}
@type commit_result :: {:ok, Task.t()} | {:error, Changeset.t()}
@doc """
When provided a `CodeCorps.GithubIssue` and a `CodeCorps.User`, for the
`CodeCorps.Project` associated to that `CodeCorps.GithubRepo`, it creates or
updates a `CodeCorps.Task`.
"""
@spec sync_github_issue(GithubIssue.t(), User.t()) :: commit_result()
def sync_github_issue(%GithubIssue{} = github_issue, %User{} = user) do
%GithubIssue{
github_repo: %GithubRepo{} = github_repo
} = github_issue |> Repo.preload(:github_repo)
github_issue
|> find_or_create_task(github_repo, user)
end
@doc ~S"""
Creates or updates `CodeCorps.Task` records for each `CodeCorps.GithubIssue`
record that relates to the `CodeCorps.GithubRepo`:
- Create or update the `CodeCorps.Task`
- Associate the `CodeCorps.Task` record with the `CodeCorps.User` that
relates to the `CodeCorps.GithubUser` for the `CodeCorps.GithubIssue`
"""
@spec sync_github_repo(GithubRepo.t()) :: commit_result_aggregate()
def sync_github_repo(%GithubRepo{} = repo) do
%GithubRepo{github_issues: issues} =
repo |> Repo.preload([:project, github_issues: [github_user: [:user]]])
issues
|> Enum.map(fn issue ->
{issue, issue |> Map.get(:github_user) |> Map.get(:user)}
end)
|> Enum.map(fn {issue, user} -> find_or_create_task(issue, repo, user) end)
|> ResultAggregator.aggregate()
end
@spec find_or_create_task(GithubIssue.t(), GithubRepo.t(), User.t()) :: commit_result
defp find_or_create_task(
%GithubIssue{} = issue,
%GithubRepo{} = repo,
%User{} = user) do
case find_task(repo, issue) do
nil ->
issue
|> Sync.Task.Changeset.create_changeset(repo, user)
|> Repo.insert()
%Task{} = task ->
task
|> Sync.Task.Changeset.update_changeset(issue, repo)
|> Repo.update()
end
end
@spec find_task(GithubRepo.t(), GithubIssue.t()) :: Task.t() | nil
defp find_task(%GithubRepo{id: repo_id}, %GithubIssue{id: issue_id}) do
Task |> Repo.get_by(github_issue_id: issue_id, github_repo_id: repo_id)
end
end
<|start_filename|>lib/code_corps_web/views/github_repo_view.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubRepoView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:github_account_avatar_url, :github_account_id,
:github_account_login, :github_account_type, :github_id, :inserted_at,
:name, :syncing_comments_count, :syncing_issues_count,
:syncing_pull_requests_count, :sync_state, :updated_at]
has_one :github_app_installation, type: "github-app-installation", field: :github_app_installation_id
has_one :project, type: "project", field: :project_id
end
<|start_filename|>priv/repo/migrations/20171115225358_add_serialized_error_to_github_events.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddSerializedErrorToGithubEvents do
use Ecto.Migration
def change do
alter table(:github_events) do
add :data, :text
add :error, :text
end
end
end
<|start_filename|>lib/code_corps/sentry/async.ex<|end_filename|>
defmodule CodeCorps.Sentry.Async do
def capture_exception(exception, opts \\ []) do
exception
|> Sentry.capture_exception(opts)
end
end
<|start_filename|>test/lib/code_corps/stripe_service/adapters/stripe_event_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripeEventTest do
use CodeCorps.ModelCase
import CodeCorps.StripeService.Adapters.StripeEventAdapter, only: [to_params: 2]
@stripe_event %Stripe.Event{
api_version: nil,
created: nil,
data: %{
object: %Stripe.Customer{
id: "cus_123",
object: "customer"
}
},
id: "evt_123",
livemode: false,
object: "event",
pending_webhooks: nil,
request: nil,
type: "some.event",
user_id: "act_123"
}
@attributes %{
"endpoint" => "connect"
}
@local_map %{
"endpoint" => "connect",
"id_from_stripe" => "evt_123",
"object_id" => "cus_123",
"object_type" => "customer",
"type" => "some.event",
"user_id" => "act_123"
}
@stripe_event_for_balance_available %Stripe.Event{
api_version: nil,
created: nil,
data: %{
# NOTE: stripity_stripe does not serialize Balance objects yet.
# Once it does, this map should be replaced with a Stripe.Balance struct
object: %{
available: [%{amount: 0, currency: "usd", source_types: %{card: 0}}],
connect_reserved: [%{amount: 0, currency: "usd"}],
livemode: false,
object: "balance",
pending: [%{amount: 0, currency: "usd", source_types: %{card: 0}}]
}
},
id: "evt_balance",
livemode: false,
object: "event",
pending_webhooks: nil,
request: nil,
type: "balance.available",
user_id: "act_with_balance"
}
@local_map_for_balance_available %{
"endpoint" => "connect",
"id_from_stripe" => "evt_balance",
"object_id" => nil,
"object_type" => "balance",
"type" => "balance.available",
"user_id" => "act_with_balance"
}
describe "to_params/2" do
test "converts from stripe map to local properly" do
{:ok, result} = to_params(@stripe_event, @attributes)
assert result == @local_map
end
test "works with balance.available event" do
{:ok, result} = to_params(@stripe_event_for_balance_available, @attributes)
assert result == @local_map_for_balance_available
end
end
end
<|start_filename|>test/lib/code_corps_web/views/stripe_platform_customer_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripePlatformCustomerViewTest do
use CodeCorpsWeb.ViewCase
alias Phoenix.ConnTest
alias Plug.Conn
test "renders all attributes and relationships properly" do
user = insert(:user)
stripe_platform_customer = insert(:stripe_platform_customer, id_from_stripe: "some_id", email: "email", user: user)
rendered_json = render(CodeCorpsWeb.StripePlatformCustomerView, "show.json-api", data: stripe_platform_customer)
expected_json = %{
"data" => %{
"attributes" => %{
"email" => "",
"created" => stripe_platform_customer.created,
"currency" => stripe_platform_customer.currency,
"delinquent" => stripe_platform_customer.delinquent,
"id-from-stripe" => "",
"inserted-at" => stripe_platform_customer.inserted_at,
"updated-at" => stripe_platform_customer.updated_at
},
"id" => stripe_platform_customer.id |> Integer.to_string,
"relationships" => %{
"user" => %{
"data" => %{"id" => user.id |> Integer.to_string, "type" => "user"}
}
},
"type" => "stripe-platform-customer",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
test "renders email and id_from_stripe when user is the authenticated user" do
user = insert(:user)
stripe_platform_customer = insert(:stripe_platform_customer, id_from_stripe: "some_id", email: "email", user: user)
conn =
ConnTest.build_conn
|> Conn.assign(:current_user, user)
rendered_json = render(CodeCorpsWeb.StripePlatformCustomerView, "show.json-api", data: stripe_platform_customer, conn: conn)
assert rendered_json["data"]["attributes"]["email"] == stripe_platform_customer.email
assert rendered_json["data"]["attributes"]["id-from-stripe"] == stripe_platform_customer.id_from_stripe
end
test "renders email and id_from_stripe for only the authenticated user when rendering list" do
stripe_platform_customers = insert_list(4, :stripe_platform_customer)
auth_customer = stripe_platform_customers |> List.last
conn =
ConnTest.build_conn
|> Conn.assign(:current_user, auth_customer.user)
rendered_json = render(CodeCorpsWeb.StripePlatformCustomerView, "show.json-api", data: stripe_platform_customers, conn: conn)
emails =
rendered_json["data"]
|> Enum.map(&Map.get(&1, "attributes"))
|> Enum.map(&Map.get(&1, "email"))
|> Enum.filter(fn(email) -> email != "" end)
assert emails == [auth_customer.email]
stripe_ids =
rendered_json["data"]
|> Enum.map(&Map.get(&1, "attributes"))
|> Enum.map(&Map.get(&1, "id-from-stripe"))
|> Enum.filter(fn(id_from_stripe) -> id_from_stripe != "" end)
assert stripe_ids == [auth_customer.id_from_stripe]
end
end
<|start_filename|>lib/code_corps/messages/messages.ex<|end_filename|>
defmodule CodeCorps.Messages do
@moduledoc ~S"""
Main context for work with the Messaging feature.
"""
alias CodeCorps.{
Conversation,
ConversationPart,
Helpers.Query,
Message,
Messages,
Repo
}
alias Ecto.{Changeset, Queryable}
@doc ~S"""
Lists pre-scoped `CodeCorps.Message` records filtered by parameters.
"""
@spec list(Queryable.t, map) :: list(Message.t)
def list(scope, %{} = params) do
scope
|> Query.id_filter(params)
|> Repo.all()
end
@doc ~S"""
Lists pre-scoped `CodeCorps.Conversation` records filtered by parameters
"""
@spec list_conversations(Queryable.t, map) :: list(Conversation.t)
def list_conversations(scope, %{} = params) do
scope
|> Messages.ConversationQuery.project_filter(params)
|> Messages.ConversationQuery.active_filter(params)
|> Messages.ConversationQuery.status_filter(params)
|> Messages.ConversationQuery.user_filter(params)
|> Repo.all()
end
@doc ~S"""
Lists pre-scoped `CodeCorps.ConversationPart` records filtered by parameters
"""
@spec list_parts(Queryable.t, map) :: list(Conversation.t)
def list_parts(scope, %{} = _params) do
scope |> Repo.all()
end
@doc ~S"""
Gets a `CodeCorps.Conversation` record
"""
@spec get_conversation(integer) :: Conversation.t
def get_conversation(id) do
Conversation |> Repo.get(id)
end
def update_conversation(conversation, params) do
conversation |> Conversation.update_changeset(params) |> Repo.update
end
@doc ~S"""
Gets a `CodeCorps.ConversationPart` record
"""
@spec get_part(integer) :: ConversationPart.t
def get_part(id) do
ConversationPart |> Repo.get(id)
end
@doc ~S"""
Creates a `CodeCorps.Message` from a set of parameters.
"""
@spec create(map) :: {:ok, Message.t} | {:error, Changeset.t}
def create(%{} = params) do
with {:ok, %Message{} = message} <- %Message{} |> create_changeset(params) |> Repo.insert() do
message |> Messages.Emails.notify_message_targets()
{:ok, message}
else
{:error, %Changeset{} = changeset} -> {:error, changeset}
end
end
@spec create_changeset(Message.t, map) :: Changeset.t
defp create_changeset(%Message{} = message, %{} = params) do
message
|> Message.changeset(params)
|> Changeset.cast(params, [:author_id, :project_id])
|> Changeset.validate_required([:author_id, :project_id])
|> Changeset.assoc_constraint(:author)
|> Changeset.assoc_constraint(:project)
|> Changeset.cast_assoc(:conversations, with: &Messages.Conversations.create_changeset/2)
end
@spec add_part(map) :: {:ok, ConversationPart.t} | {:error, Changeset.t}
def add_part(%{} = params) do
with {:ok, %ConversationPart{} = conversation_part} <- params |> Messages.ConversationParts.create do
conversation_part |> Messages.Emails.notify_of_new_reply()
{:ok, conversation_part}
else
{:error, %Changeset{} = changeset} -> {:error, changeset}
end
end
end
<|start_filename|>test/lib/code_corps_web/plugs/set_timber_user_context_test.exs<|end_filename|>
defmodule CodeCorpsWeb.Plug.SetTimberUserContextTest do
use CodeCorpsWeb.ConnCase
alias CodeCorpsWeb.Plug.SetTimberUserContext
@opts SetTimberUserContext.init([])
describe "init/1" do
test "returns the given options" do
assert SetTimberUserContext.init([]) == []
end
end
describe "call/2" do
test "adds user context when current_user is set" do
user = insert(:user, first_name: "Josh", last_name: "Smith")
conn = build_conn() |> assign(:current_user, user)
result = SetTimberUserContext.call(conn, @opts)
assert result == conn
assert Timber.CurrentContext.load() ==
%{user: %{id: to_string(user.id), name: "<NAME>", email: user.email}}
end
test "adds nothing when current_user is not set" do
conn = build_conn()
result = SetTimberUserContext.call(conn, @opts)
assert result == conn
assert Timber.CurrentContext.load() == %{}
end
end
describe "add_context/2" do
test "adds user context correctly when given user is valid" do
user = insert(:user, first_name: "Josh", last_name: nil)
conn = build_conn()
result = SetTimberUserContext.add_context(conn, user)
assert result == conn
assert Timber.CurrentContext.load() ==
%{user: %{id: to_string(user.id), name: "Josh", email: user.email}}
end
test "adds nothing when given user is invalid" do
conn = build_conn()
result = SetTimberUserContext.add_context(conn, nil)
assert result == conn
assert Timber.CurrentContext.load() == %{}
end
end
end
<|start_filename|>lib/code_corps_web/views/user_category_view.ex<|end_filename|>
defmodule CodeCorpsWeb.UserCategoryView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
has_one :user, type: "user", field: :user_id
has_one :category, type: "category", field: :category_id
end
<|start_filename|>test/lib/code_corps/skills/skills_test.exs<|end_filename|>
defmodule CodeCorps.SkillsTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.Skills
describe "popular/1" do
test "returns popular skills in order with a limit" do
[least_popular, somewhat_popular, most_popular] = insert_list(3, :skill)
insert_list(3, :user_skill, skill: most_popular)
insert_list(2, :user_skill, skill: somewhat_popular)
insert_list(1, :user_skill, skill: least_popular)
[first_result, last_result] = Skills.popular(%{"limit" => "2"})
assert first_result == most_popular
assert last_result == somewhat_popular
end
test "defaults limit to 10" do
skills = insert_list(11, :skill)
skills |> Enum.each(fn skill -> insert(:user_skill, skill: skill) end)
results = Skills.popular()
assert results |> Enum.count() == 10
end
test "ignores non-number limits" do
insert(:user_skill)
results = Skills.popular(%{"limit" => "apples"})
assert results |> Enum.count() == 1
end
end
end
<|start_filename|>test/lib/code_corps/policy/project_skill_test.exs<|end_filename|>
defmodule CodeCorps.Policy.ProjectSkillTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.ProjectSkill, only: [create?: 2, delete?: 2]
describe "create?" do
test "returns false when user is not a project member" do
user = insert(:user)
project = insert(:project)
params = %{project_id: project.id}
refute create?(user, params)
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
params = %{project_id: project.id}
refute create?(user, params)
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
params = %{project_id: project.id}
refute create?(user, params)
end
test "returns true when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
params = %{project_id: project.id}
assert create?(user, params)
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
params = %{project_id: project.id}
assert create?(user, params)
end
end
describe "delete?" do
test "returns false when user is not a project member" do
user = insert(:user)
project = insert(:project)
record = insert(:project_skill, project: project)
refute delete?(user, record)
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
record = insert(:project_skill, project: project)
refute delete?(user, record)
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
record = insert(:project_skill, project: project)
refute delete?(user, record)
end
test "returns true when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
record = insert(:project_skill, project: project)
assert delete?(user, record)
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
record = insert(:project_skill, project: project)
assert delete?(user, record)
end
end
end
<|start_filename|>lib/code_corps/policy/role_skill.ex<|end_filename|>
defmodule CodeCorps.Policy.RoleSkill do
alias CodeCorps.User
@spec create?(User.t) :: boolean
def create?(%User{admin: true}), do: true
def create?(%User{admin: false}), do: false
@spec delete?(User.t) :: boolean
def delete?(%User{admin: true}), do: true
def delete?(%User{admin: false}), do: false
end
<|start_filename|>test/lib/code_corps/github/event/installation_repositories/installation_repositories_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.InstallationRepositoriesTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GithubRepo,
GitHub.Event.InstallationRepositories,
Repo
}
describe "handle/1" do
@payload load_event_fixture("installation_repositories_added")
test "marks event as errored if invalid payload" do
payload = @payload |> Map.delete("action")
assert {:error, :unexpected_payload} ==
InstallationRepositories.handle(payload)
end
end
describe "handle/1 for InstallationRepositories::added" do
@payload load_event_fixture("installation_repositories_added")
test "creates repos" do
%{
"installation" => %{
"account" => %{
"avatar_url" => installation_account_avatar_url,
"id" => installation_account_id,
"login" => installation_account_login,
"type" => installation_account_type
},
"id" => installation_github_id
},
"repositories_added" => [repo_1_payload, repo_2_payload]
} = @payload
%{id: installation_id} = insert(:github_app_installation, github_account_avatar_url: installation_account_avatar_url, github_account_id: installation_account_id, github_account_login: installation_account_login, github_account_type: installation_account_type, github_id: installation_github_id)
{:ok, {synced_repos, deleted_repos}} =
InstallationRepositories.handle(@payload)
assert synced_repos |> Enum.count == 2
assert deleted_repos |> Enum.count == 0
github_repo_1 = Repo.get_by(GithubRepo, github_id: repo_1_payload["id"])
assert github_repo_1
assert github_repo_1.name == repo_1_payload["name"]
assert github_repo_1.github_account_avatar_url == installation_account_avatar_url
assert github_repo_1.github_account_id == installation_account_id
assert github_repo_1.github_account_login == installation_account_login
assert github_repo_1.github_account_type == installation_account_type
assert github_repo_1.github_app_installation_id == installation_id
github_repo_2 = Repo.get_by(GithubRepo, github_id: repo_2_payload["id"])
assert github_repo_2
assert github_repo_2.name == repo_2_payload["name"]
assert github_repo_2.github_account_avatar_url == installation_account_avatar_url
assert github_repo_2.github_account_id == installation_account_id
assert github_repo_2.github_account_login == installation_account_login
assert github_repo_2.github_account_type == installation_account_type
assert github_repo_2.github_app_installation_id == installation_id
end
test "skips creating existing repos" do
%{
"installation" => %{
"account" => %{
"avatar_url" => installation_account_avatar_url,
"id" => installation_account_id,
"login" => installation_account_login,
"type" => installation_account_type
},
"id" => installation_github_id
},
"repositories_added" => [repo_1_payload, repo_2_payload]
} = @payload
installation = insert(:github_app_installation, github_account_avatar_url: installation_account_avatar_url, github_account_id: installation_account_id, github_account_login: installation_account_login, github_account_type: installation_account_type, github_id: installation_github_id)
preinserted_repo = insert(:github_repo, github_app_installation: installation, github_id: repo_1_payload["id"])
{:ok, {synced_repos, deleted_repos}} =
InstallationRepositories.handle(@payload)
assert synced_repos |> Enum.count == 2
assert deleted_repos |> Enum.count == 0
github_repo_1 = Repo.get_by(GithubRepo, github_id: repo_1_payload["id"])
assert github_repo_1.id == preinserted_repo.id
github_repo_2 = Repo.get_by(GithubRepo, github_id: repo_2_payload["id"])
assert github_repo_2
assert github_repo_2.name == repo_2_payload["name"]
assert github_repo_2.github_account_avatar_url == installation_account_avatar_url
assert github_repo_2.github_account_id == installation_account_id
assert github_repo_2.github_account_login == installation_account_login
assert github_repo_2.github_account_type == installation_account_type
assert github_repo_2.github_app_installation_id == installation.id
assert Repo.aggregate(GithubRepo, :count, :id) == 2
end
test "marks event as errored if invalid instalation payload" do
assert {:error, :unexpected_payload} == InstallationRepositories.handle(@payload |> Map.put("installation", "foo"))
end
test "marks event as errored if invalid repo payload" do
insert(:github_app_installation, github_id: @payload["installation"]["id"])
assert {:error, :unexpected_payload} == InstallationRepositories.handle(@payload |> Map.put("repositories_added", ["foo"]))
end
test "marks event as errored if no installation" do
assert {:error, :unmatched_installation} == InstallationRepositories.handle(@payload)
end
end
describe "handle/1 for InstallationRepositories::removed" do
@payload load_event_fixture("installation_repositories_removed")
test "deletes github repos" do
%{
"installation" => %{"id" => installation_github_id},
"repositories_removed" => [repo_1_payload, repo_2_payload]
} = @payload
%{project: project} = installation = insert(:github_app_installation, github_id: installation_github_id)
insert(:github_repo, github_app_installation: installation, github_id: repo_1_payload["id"], project: project)
insert(:github_repo, github_app_installation: installation, github_id: repo_2_payload["id"])
{:ok, {synced_repos, deleted_repos}} =
InstallationRepositories.handle(@payload)
assert synced_repos |> Enum.count == 0
assert deleted_repos |> Enum.count == 2
assert Repo.aggregate(GithubRepo, :count, :id) == 0
end
test "skips deleting if nothing to delete" do
%{
"installation" => %{"id" => installation_github_id},
"repositories_removed" => [repo_1_payload, _repo_2_payload]
} = @payload
%{project: project} = installation = insert(:github_app_installation, github_id: installation_github_id)
insert(:github_repo, github_app_installation: installation, github_id: repo_1_payload["id"], project: project)
{:ok, {synced_repos, deleted_repos}} =
InstallationRepositories.handle(@payload)
assert synced_repos |> Enum.count == 0
assert deleted_repos |> Enum.count == 1
assert Repo.aggregate(GithubRepo, :count, :id) == 0
end
test "marks event as errored if invalid instalation payload" do
payload = @payload |> Map.put("installation", "foo")
assert {:error, :unexpected_payload} ==
InstallationRepositories.handle(payload)
end
test "marks event as errored if invalid repo payload" do
insert(:github_app_installation, github_id: @payload["installation"]["id"])
payload = @payload |> Map.put("repositories_removed", ["foo"])
assert {:error, :unexpected_payload} ==
InstallationRepositories.handle(payload)
end
test "marks event as errored if no installation" do
assert {:error, :unmatched_installation} == InstallationRepositories.handle(@payload)
end
end
end
<|start_filename|>emails/project_approval_request.html<|end_filename|>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>{{project_title}} is asking to be approved</title>
<!--
Make sure you copy the styles from styles.css into the email template in Postmark before saving there.
<style type="text/css" rel="stylesheet" media="all">
</style>
-->
<link rel="stylesheet" type="text/css" href="styles.css" media="screen" />
</head>
<body>
<span class="preheader">Head over to the app to process their request.</span>
<table class="email-wrapper" width="100%" cellpadding="0" cellspacing="0">
<tr>
<td align="center">
<table class="email-content" width="100%" cellpadding="0" cellspacing="0">
<tr>
<td class="email-masthead" width="100%" cellpadding="0" cellspacing="0">
<table class="email-masthead_inner" align="center" width="570" cellpadding="0" cellspacing="0">
<tr>
<td>
<a href="https://www.codecorps.org">
<img src="https://d3pgew4wbk2vb1.cloudfront.net/emails/images/logo-small@2x.png" class="email-masthead_logo" />
</a>
</td>
</tr>
</table>
</td>
</tr>
<!-- Email Body -->
<tr>
<td class="email-body" width="100%" cellpadding="0" cellspacing="0">
<table class="email-body_inner" align="center" width="570" cellpadding="0" cellspacing="0">
<!-- Body content -->
<tr>
<td class="content-cell">
<table class="donation" width="100%" cellpadding="0" cellspacing="0">
<tr>
<td>
<p>
Hi Code Corps team,
</p>
<p>
<strong>{{project_title}}</strong> is asking to have their project approved.
</p>
</td>
</tr>
</table>
<table class="donation" width="100%" cellpadding="0" cellspacing="0">
<tr>
<td>
<tr>
<td>
<p>
<p class="center">
<img src="{{project_logo_url}}" width="70" height="70" />
</p>
<p class="center">
<strong>{{project_title}}</strong>
</p>
</p>
<p class="center">
{{project_description}}
</p>
<td>
</tr>
</tr>
</td>
</table>
<table class="donation" width="100%" cellpadding="0" cellspacing="0">
<tr>
<td>
<p>
You can head over to <a href="{{admin_project_show_url}}">the admin page for the project</a> to process their request.
</p>
<p>
You can also take a look at <a href="{{project_url}}">their project on Code Corps</a> to see it how users will see it.
</p>
</td>
</tr>
</table>
<table class="body-signature">
<tr>
<td>
<p>
Cheers,
<br><strong>🤖 The Code Corps Robots</strong>
</p>
</td>
</tr>
</table>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td>
<table class="email-footer" align="center" width="570" cellpadding="0" cellspacing="0">
<tr>
<td class="email-footer__cell" align="center">
<p class="align-center">
Questions? Feedback? Visit our <strong><a href="https://help.codecor<EMAIL>">Help Center</a></strong> or just reply to this email.
</p>
</td>
</tr>
<tr>
<td class="email-footer__cell" align="center">
<p class="align-center">
Send us 🐌 mail!
<br>
<br>Code Corps PBC
<br>4166 Wilson Ave #1
<br>San Diego, CA 92104
</p>
</td>
</tr>
</table>
</td>
</tr>
</table>
</td>
</tr>
</table>
</body>
</html>
<|start_filename|>priv/repo/migrations/20171019191035_change_github_issue_and_github_pull_request_relationships.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.ChangeGithubIssueAndGithubPullRequestRelationships do
use Ecto.Migration
def up do
alter table(:tasks) do
remove :github_pull_request_id
end
alter table(:github_issues) do
add :github_pull_request_id, references(:github_pull_requests)
end
end
def down do
alter table(:tasks) do
add :github_pull_request_id, references(:github_pull_requests)
end
alter table(:github_issues) do
remove :github_pull_request_id
end
end
end
<|start_filename|>lib/code_corps/analytics/segment_data_extractor.ex<|end_filename|>
defmodule CodeCorps.Analytics.SegmentDataExtractor do
@moduledoc """
Extract data for use in Segment tracking
"""
@spec get_action(Plug.Conn.t) :: atom
def get_action(%Plug.Conn{private: %{phoenix_action: action}}), do: action
def get_action(_), do: nil
@doc """
Tries to extract project id from given resource.
Returns `nil` if project id can't be extracted.
"""
@spec get_project_id(CodeCorps.ProjectUser.t) :: String.t | nil
def get_project_id(%CodeCorps.ProjectUser{project_id: id}), do: "project_#{id}"
def get_project_id(_), do: nil
@spec get_resource(Plug.Conn.t) :: struct
def get_resource(%Plug.Conn{assigns: %{data: data}}), do: data
# these are used for delete actions on records that support it
# we render a 404 in those cases, so data is never assigned
def get_resource(%Plug.Conn{assigns: %{user_category: data}}), do: data
def get_resource(%Plug.Conn{assigns: %{user_role: data}}), do: data
def get_resource(%Plug.Conn{assigns: %{user_skill: data}}), do: data
def get_resource(%Plug.Conn{assigns: %{token: token, user_id: user_id}}) do
%{token: token, user_id: user_id}
end
def get_resource(_), do: nil
@spec get_user_id(Plug.Conn.t, CodeCorps.User.t | struct | map) :: String.t
def get_user_id(%Plug.Conn{assigns: %{current_user: %CodeCorps.User{id: id}}}, _), do: id
def get_user_id(_, %CodeCorps.User{id: id}), do: id
def get_user_id(_, %{user_id: user_id}), do: user_id
end
<|start_filename|>lib/code_corps_web/controllers/task_skill_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.TaskSkillController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{
Analytics.SegmentTracker,
Helpers.Query,
TaskSkill,
User
}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with task_skills <- TaskSkill |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: task_skills)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %TaskSkill{} = task_skill <- TaskSkill |> Repo.get(id) do
conn |> render("show.json-api", data: task_skill)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %TaskSkill{}, params),
{:ok, %TaskSkill{} = task_skill} <- %TaskSkill{} |> TaskSkill.create_changeset(params) |> Repo.insert
do
SegmentTracker.track(current_user.id, "Added Task Skill", task_skill)
conn |> put_status(:created) |> render("show.json-api", data: task_skill)
end
end
@spec delete(Conn.t, map) :: Conn.t
def delete(%Conn{} = conn, %{"id" => id} = _params) do
with %TaskSkill{} = task_skill <- TaskSkill |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:delete, task_skill),
{:ok, %TaskSkill{} = _task_skill} <- task_skill |> Repo.delete
do
SegmentTracker.track(current_user.id, "Removed Task Skill", task_skill)
conn |> Conn.assign(:task_skill, task_skill) |> send_resp(:no_content, "")
end
end
end
<|start_filename|>lib/code_corps/github/sync/task/changeset.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Task.Changeset do
@moduledoc ~S"""
In charge of building a `Changeset` to update a `Task` with, when handling an
Issues webhook.
"""
import Ecto.Query
alias CodeCorps.{
GithubIssue,
GithubRepo,
GitHub.Adapters,
Repo,
Services.MarkdownRendererService,
Task,
TaskList,
User,
Validators.TimeValidator
}
alias Ecto.Changeset
@create_attrs ~w(created_at markdown modified_at status title)a
@doc """
Constructs a changeset for creating a `CodeCorps.Task` when processing an
Issues or IssueComment webhook.
"""
@spec create_changeset(GithubIssue.t(), GithubRepo.t(), User.t()) :: Changeset.t()
def create_changeset(
%GithubIssue{} = github_issue,
%GithubRepo{project_id: project_id} = github_repo,
%User{} = user) do
%Task{}
|> Changeset.cast(github_issue |> Adapters.Issue.to_task, @create_attrs)
|> MarkdownRendererService.render_markdown_to_html(:markdown, :body)
|> Changeset.put_change(:created_from, "github")
|> Changeset.put_change(:modified_from, "github")
|> Changeset.put_assoc(:github_issue, github_issue)
|> Changeset.put_assoc(:github_repo, github_repo)
|> Changeset.put_assoc(:user, user)
|> Changeset.put_change(:project_id, project_id)
|> Changeset.assoc_constraint(:project)
|> assign_task_list(github_issue, github_repo)
|> Changeset.validate_required([:project_id, :task_list_id, :title])
|> maybe_archive()
|> Task.handle_archived()
end
@update_attrs ~w(markdown modified_at status title)a
@doc """
Constructs a changeset for updating a `CodeCorps.Task` when processing an
Issues or IssueComment webhook.
"""
@spec update_changeset(Task.t(), GithubIssue.t(), GithubRepo.t()) :: Changeset.t()
def update_changeset(
%Task{} = task,
%GithubIssue{} = github_issue,
%GithubRepo{} = github_repo) do
task
|> Changeset.cast(github_issue |> Adapters.Issue.to_task, @update_attrs)
|> MarkdownRendererService.render_markdown_to_html(:markdown, :body)
|> Changeset.put_change(:modified_from, "github")
|> TimeValidator.validate_time_not_before(:modified_at)
|> assign_task_list(github_issue, github_repo)
|> Changeset.validate_required([:title])
|> maybe_archive()
|> Task.handle_archived()
end
@spec assign_task_list(Changeset.t(), GithubIssue.t(), GithubRepo.t()) :: Changeset.t()
defp assign_task_list(
%Changeset{} = changeset,
%GithubIssue{} = issue,
%GithubRepo{project_id: project_id}) do
list_type = issue |> get_task_list_type()
task_list_id =
TaskList
|> where(project_id: ^project_id)
|> where([t], field(t, ^list_type) == true)
|> Repo.one()
# ensure code executs if no list find
|> (fn list -> (list || %{}) |> Map.get(:id) end).()
# put_change/2 instead of put_assoc/2 so task list
# doesn't have to be preloaded
# id can be nil and will trigger validation in that case
changeset
|> Changeset.put_change(:task_list_id, task_list_id)
|> Changeset.assoc_constraint(:task_list)
end
@spec get_task_list_type(GithubIssue.t()) :: atom
defp get_task_list_type(%GithubIssue{state: "closed"}), do: :done
defp get_task_list_type(%GithubIssue{state: "open", github_pull_request_id: id})
when not is_nil(id), do: :pull_requests
defp get_task_list_type(%GithubIssue{state: "open"}), do: :inbox
@spec maybe_archive(Changeset.t) :: Changeset.t
defp maybe_archive(%Changeset{} = changeset) do
modified_at = changeset |> Changeset.get_field(:modified_at)
status = changeset |> Changeset.get_field(:status)
case {status, Timex.now |> Timex.diff(modified_at, :days)} do
{"closed", days_since_modified} when days_since_modified > 30 ->
changeset |> Changeset.put_change(:archived, true)
_ -> changeset
end
end
end
<|start_filename|>test/lib/code_corps/github/webhook/handler_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Webhook.HandlerTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
import CodeCorps.TestEnvironmentHelper
alias CodeCorps.{
GithubEvent,
GitHub.Webhook.Handler,
Repo,
Task
}
defp setup_repo(github_repo_id) do
project = insert(:project)
insert(:github_repo, github_id: github_repo_id, project: project)
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
insert(:task_list, project: project, pull_requests: true)
end
describe "handle_supported/3" do
test "handles issues 'opened' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("issues_opened")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("issues", "abc-123", payload)
assert event.action == "opened"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "issues"
end
test "handles issues 'closed' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("issues_closed")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("issues", "abc-123", payload)
assert event.action == "closed"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "issues"
end
test "handles issues 'edited' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("issues_edited")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("issues", "abc-123", payload)
assert event.action == "edited"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "issues"
end
test "handles issues 'reopened' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("issues_reopened")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("issues", "abc-123", payload)
assert event.action == "reopened"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "issues"
end
test "handles issue_comment 'created' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("issue_comment_created")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("issue_comment", "abc-123", payload)
assert event.action == "created"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "issue_comment"
end
test "handles issue_comment 'edited' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("issue_comment_edited")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("issue_comment", "abc-123", payload)
assert event.action == "edited"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "issue_comment"
end
test "handles issue_comment 'deleted' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("issue_comment_deleted")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("issue_comment", "abc-123", payload)
assert event.action == "deleted"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "issue_comment"
end
test "handles installation_repositories 'added' event" do
%{
"installation" => %{
"id" => installation_id
}
} = payload = load_event_fixture("installation_repositories_added")
insert(:github_app_installation, github_id: installation_id)
assert Handler.handle_supported("installation_repositories", "abc-123", payload)
event = Repo.one(GithubEvent)
assert event.action == "added"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "installation_repositories"
end
test "handles installation_repositories 'removed' event" do
%{
"installation" => %{
"id" => installation_id
}
} = payload = load_event_fixture("installation_repositories_removed")
insert(:github_app_installation, github_id: installation_id)
assert Handler.handle_supported("installation_repositories", "abc-123", payload)
event = Repo.one(GithubEvent)
assert event.action == "removed"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "installation_repositories"
end
@installation_created_payload load_event_fixture("installation_created")
test "handles installation 'created' event" do
assert Handler.handle_supported("installation", "abc-123", @installation_created_payload)
event = Repo.one(GithubEvent)
assert event.action == "created"
assert event.github_delivery_id == "abc-123"
assert event.payload == @installation_created_payload
assert event.status == "processed"
assert event.type == "installation"
end
test "handles pull_request 'opened' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("pull_request_opened")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("pull_request", "abc-123", payload)
assert event.action == "opened"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "pull_request"
end
test "handles pull_request 'edited' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("pull_request_edited")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("pull_request", "abc-123", payload)
assert event.action == "edited"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "pull_request"
end
test "handles pull_request 'closed' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("pull_request_closed")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("pull_request", "abc-123", payload)
assert event.action == "closed"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "pull_request"
end
test "handles pull_request 'reopened' event" do
%{"repository" => %{"id" => github_repo_id}}
= payload = load_event_fixture("pull_request_reopened")
setup_repo(github_repo_id)
{:ok, %GithubEvent{} = event} = Handler.handle_supported("pull_request", "abc-123", payload)
assert event.action == "reopened"
assert event.github_delivery_id == "abc-123"
assert event.payload == payload
assert event.status == "processed"
assert event.type == "pull_request"
end
end
describe "handle_supported/3 when there are errors" do
test "serializes error output" do
%{"repository" => %{"id" => github_repo_id}}
= opened_payload = load_event_fixture("issues_opened")
setup_repo(github_repo_id)
{:ok, %GithubEvent{}} = Handler.handle_supported("issues", "abc-123", opened_payload)
edited_payload = load_event_fixture("issues_edited")
edited_payload =
edited_payload
|> put_in(["issue", "updated_at"], "2006-05-05T23:40:28Z")
task = Repo.one(Task)
changeset = Task.update_changeset(task, %{title: "New title", updated_from: "codecorps"})
Repo.update!(changeset)
bypass = Bypass.open
Bypass.expect bypass, fn conn ->
{:ok, body, conn} = Plug.Conn.read_body(conn)
assert body =~ "GitHubEventError"
assert body =~ "CodeCorps"
assert conn.request_path == "/api/1/store/"
assert conn.method == "POST"
Plug.Conn.resp(conn, 200, ~s<{"id": "340"}>)
end
modify_env(:sentry, environment_name: :prod)
modify_env(:sentry, dsn: "http://public:secret@localhost:#{bypass.port}/1")
{:ok, %GithubEvent{} = event} = Handler.handle_supported("issues", "abc-456", edited_payload)
assert event.action == "edited"
assert event.github_delivery_id == "abc-456"
assert event.data == Repo.one(Task) |> Kernel.inspect(pretty: true)
assert event.error # This is difficult to test, so just assert presence
assert event.payload == edited_payload
assert event.status == "errored"
assert event.type == "issues"
end
end
describe "handle_unsupported/3" do
[
{"installation", "deleted"},
{"issues", "assigned"},
{"issues", "unassigned"},
{"issues", "labeled"},
{"issues", "unlabeled"},
{"issues", "milestoned"},
{"issues", "demilestoned"},
{"pull_request", "assigned"},
{"pull_request", "unassigned"},
{"pull_request", "labeled"},
{"pull_request", "unlabeled"},
{"pull_request", "milestoned"},
{"pull_request", "demilestoned"},
{"pull_request", "synchronize"}
] |> Enum.each(fn {type, action} ->
@event_type type
@action action
test "stores #{type} '#{action}' as ignored" do
{:ok, %GithubEvent{} = event} =
Handler.handle_unsupported(@event_type, "foo", %{"action" => @action})
assert event.status == "unsupported"
assert event.type == @event_type
assert event.action == @action
assert event.github_delivery_id == "foo"
assert event.payload == %{"action" => @action}
end
end)
end
end
<|start_filename|>lib/code_corps/github/utils/result_aggregator.ex<|end_filename|>
defmodule CodeCorps.GitHub.Utils.ResultAggregator do
@moduledoc ~S"""
Used for aggregating a list of results.
"""
@doc ~S"""
Aggregates a list of result tuples into a single result tuple.
A result tuple is a two-element tuple where the first element is `:ok`,
or `:error`, while the second element is the resulting data.
This function goes through a list of such tuples and aggregates the list into
a single tuple where
- if all tuples in the list are `:ok` tuples, returns `{:ok, results}`
- if any tuple is an `:error` tuple, returns `{:error, {results, errors}}`
- `results` and `errors` are lists of second tuple elements in their
respective tuples
"""
@spec aggregate(list) :: {:ok, list} | {:error, {list, list}}
def aggregate(results) when is_list(results) do
results |> collect() |> summarize()
end
@spec collect(list, list, list) :: tuple
defp collect(results, recods \\ [], changesets \\ [])
defp collect([{:ok, record} | tail], records, errors) do
collect(tail, records ++ [record], errors)
end
defp collect([{:error, error} | tail], records, errors) do
collect(tail, records, errors ++ [error])
end
defp collect([], records, errors), do: {records, errors}
@spec summarize(tuple) :: tuple
defp summarize({records, []}), do: {:ok, records}
defp summarize({records, errors}), do: {:error, {records, errors}}
end
<|start_filename|>test/lib/code_corps_web/controllers/github_repo_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.GithubRepoControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :github_repo
import CodeCorps.GitHub.TestHelpers
describe "index" do
test "lists all resources", %{conn: conn} do
[record_1, record_2] = insert_pair(:github_repo)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
test "filters resources by record id", %{conn: conn} do
[record_1, record_2 | _] = insert_list(3, :github_repo)
path = "github-repos/?filter[id]=#{record_1.id},#{record_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
record = insert(:github_repo)
conn
|> request_show(record)
|> json_response(200)
|> assert_id_from_response(record.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "update" do
@tag :authenticated
test "updates repo to add project", %{conn: conn, current_user: user} do
%{project: project} = record = setup_coderly_repo()
insert(:project_user, project: project, user: user, role: "admin")
attrs = %{project: project}
assert conn |> request_update(record, attrs) |> json_response(200)
user_id = user.id
tracking_properties = %{
id: record.id,
github_account_login: record.github_account_login,
github_account_type: record.github_account_type,
github_id: record.github_id,
github_repo_name: record.name,
project: project.title,
project_id: project.id
}
assert_received {:track, ^user_id, "Connected GitHub Repo to Project", ^tracking_properties}
end
@tag :authenticated
test "updates repo to remove project", %{conn: conn, current_user: user} do
%{project: project} = record = setup_coderly_repo()
insert(:project_user, project: project, user: user, role: "admin")
attrs = %{project_id: nil}
assert conn |> request_update(record, attrs) |> json_response(200)
user_id = user.id
tracking_properties = %{
id: record.id,
github_account_login: record.github_account_login,
github_account_type: record.github_account_type,
github_id: record.github_id,
github_repo_name: record.name,
project: "",
project_id: nil
}
assert_received {:track, ^user_id, "Disconnected GitHub Repo from Project", ^tracking_properties}
end
test "doesn't update and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "doesn't update and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_update |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent on update", %{conn: conn} do
assert conn |> request_update(:not_found) |> json_response(404)
end
end
end
<|start_filename|>test/support/github/success_api.ex<|end_filename|>
defmodule CodeCorps.GitHub.SuccessAPI do
@moduledoc ~S"""
A mocked github API layer which returns a default successful response for all
GitHub API requests.
All tests in the test environment use this module as a mock for GitHub API
requests by default.
If certain tests explicitly depend on the data returned by GitHub, they can be
mocked individually using the `CodeCorps.GitHub.TestHelpers.with_mock_api`
macro.
As support for new GitHub endpoints is added, defaults for those endpoints
should be added here.
To assert a request has been made to GitHub as a result as an action, the
`assert_received` test helper can be used:
```
assert_received({:get, "https://api.github.com/user", body, headers, options})
```
"""
import CodeCorps.GitHub.TestHelpers
defmodule UnhandledGitHubEndpointError do
defexception message: "You have a GitHub API endpoint that's unhandled in tests."
end
defmodule GitHubMockResponseError do
defexception message: "There was a problem in building a response for your mocked GitHub API."
end
def request(method, url, body, headers, options) do
send(self(), {method, url, body, headers, options})
with {:ok, body} = get_body(method, url, body, headers, options) |> Poison.encode,
{:ok, code} = method |> success_code()
do
response = %HTTPoison.Response{body: body, request_url: url, status_code: code}
{:ok, response}
end
end
defp get_body(:head, _, _, _, _), do: ""
defp get_body(:post, "https://github.com/login/oauth/access_token", _, _, _) do
%{"access_token" => "foo_auth_token"}
end
defp get_body(method, "https://api.github.com/" <> endpoint, body, headers, options) do
get_body(method, endpoint |> String.split("/"), body, headers, options)
end
defp get_body(:get, ["user"], _, _, _), do: load_endpoint_fixture("user")
defp get_body(_method, ["installation", "repositories"], _, _, _) do
load_endpoint_fixture("installation_repositories")
end
defp get_body(:post, ["installations", _id, "access_tokens"], _, _, _) do
%{
"token" => "<PASSWORD>",
"expires_at" => Timex.now() |> Timex.shift(hours: 1) |> DateTime.to_iso8601
}
end
defp get_body(:get, ["repos", _owner, _repo, "issues", "comments"], _, _, _) do
load_endpoint_fixture("issues_comments")
end
defp get_body(:get, ["repos", _owner, _repo, "issues", _number], _, _, _) do
load_endpoint_fixture("issue")
end
defp get_body(:post, ["repos", _owner, _repo, "issues"], _, _, _) do
load_endpoint_fixture("issue")
end
defp get_body(:patch, ["repos", _owner, _repo, "issues", _number], _, _, _) do
load_endpoint_fixture("issue")
end
defp get_body(:post, ["repos", _owner, _repo, "issues", _number, "comments"], _, _, _) do
load_endpoint_fixture("issue_comment")
end
defp get_body(:patch, ["repos", _owner, _repo, "issues", "comments", _id], _, _, _) do
load_endpoint_fixture("issue_comment")
end
defp get_body(:get, ["repos", _owner, _repo, "issues"], _, _, _) do
load_endpoint_fixture("issues")
end
defp get_body(:get, ["repos", _owner, _repo, "pulls"], _, _, _) do
load_endpoint_fixture("pulls")
end
defp get_body(:get, ["repos", _owner, _repo, "pulls", _number], _, _, _) do
load_endpoint_fixture("pull_request")
end
defp get_body(method, endpoint, _, _, _) when is_binary(endpoint) do
raise UnhandledGitHubEndpointError, message: "You have an unhandled :#{method} request to #{endpoint}"
end
defp get_body(method, uri_parts, _, _, _) when is_list uri_parts do
endpoint = uri_parts |> Enum.join("/")
raise UnhandledGitHubEndpointError, message: "You have an unhandled API :#{method} request to #{endpoint}"
end
@spec success_code(atom) :: integer
defp success_code(:get), do: {:ok, 200}
defp success_code(:post), do: {:ok, 201}
defp success_code(:patch), do: {:ok, 202}
defp success_code(:put), do: {:ok, 202}
defp success_code(:delete), do: {:ok, 204}
defp success_code(:head), do: {:ok, 204}
defp success_code(_), do: {:error, :unexpected_code}
end
<|start_filename|>lib/code_corps/emails/organization_invite_email.ex<|end_filename|>
defmodule CodeCorps.Emails.OrganizationInviteEmail do
import Bamboo.Email, only: [to: 2]
import Bamboo.PostmarkHelper
alias CodeCorps.{Emails.BaseEmail, OrganizationInvite, WebClient}
@spec create(OrganizationInvite.t) :: Bamboo.Email.t
def create(%OrganizationInvite{} = invite) do
BaseEmail.create
|> to(invite.email)
|> template(template_id(), build_model(invite))
end
@spec build_model(OrganizationInvite.t) :: map
defp build_model(%OrganizationInvite{} = invite) do
%{
organization_name: invite.organization_name,
invite_url: invite_url(invite.code, invite.organization_name),
subject: "Create your first project on Code Corps"
}
end
@spec invite_url(String.t, String.t) :: String.t
defp invite_url(code, organization_name) do
query_params = set_params(code, organization_name)
WebClient.url()
|> URI.merge("/organizations/new" <> "?" <> query_params)
|> URI.to_string
end
@spec set_params(String.t, String.t) :: binary
defp set_params(code, organization_name) do
%{code: code, organization_name: organization_name}
|> URI.encode_query
end
@spec template_id :: String.t
defp template_id, do: Application.get_env(:code_corps, :postmark_organization_invite_email_template)
end
<|start_filename|>test/lib/code_corps/emails/message_initiated_by_project_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.MessageInitiatedByProjectEmailTest do
use CodeCorps.DbAccessCase
use Bamboo.Test
alias CodeCorps.Emails.MessageInitiatedByProjectEmail
test "email works" do
%{message: message} = conversation =
insert(:conversation)
|> Repo.preload([[message: :project], :user])
email = MessageInitiatedByProjectEmail.create(message, conversation)
assert email.from == "Code Corps<<EMAIL>>"
assert email.to == conversation.user.email
template_model = email.private.template_model
assert template_model == %{
conversation_url: "http://localhost:4200/conversations/#{conversation.id}",
name: conversation.user.first_name,
project_title: message.project.title,
subject: "You have a new message from #{message.project.title}"
}
end
end
<|start_filename|>test/lib/code_corps/model/organization_test.exs<|end_filename|>
defmodule CodeCorps.OrganizationTest do
use CodeCorps.ModelCase
alias CodeCorps.Organization
describe "changeset" do
@valid_attrs %{description: "Building a better future.", name: "Code Corps"}
@invalid_attrs %{}
test "with valid attributes" do
changeset = Organization.changeset(%Organization{}, @valid_attrs)
assert changeset.valid?
end
test "with invalid attributes" do
changeset = Organization.changeset(%Organization{}, @invalid_attrs)
refute changeset.valid?
end
end
describe "create_changeset" do
@valid_attrs %{
cloudinary_public_id: "foo",
description: "Building a better future.",
name: "Code Corps",
owner_id: 1
}
@invalid_attrs %{}
test "with valid attributes" do
changeset = Organization.create_changeset(%Organization{}, @valid_attrs)
assert changeset.valid?
assert changeset.changes.slug == "code-corps"
end
test "with invalid attributes" do
changeset = Organization.create_changeset(%Organization{}, @invalid_attrs)
refute changeset.valid?
end
test "ensures owner (user) exists" do
changeset = Organization.create_changeset(%Organization{}, @valid_attrs)
{result, changeset} = changeset |> Repo.insert
assert result == :error
changeset |> assert_error_message(:owner, "does not exist")
end
test "sets approved to false" do
changeset = Organization.create_changeset(%Organization{}, @valid_attrs)
assert changeset |> get_field(:approved) == false
end
test "generates slug if none provided" do
changeset = Organization.create_changeset(%Organization{}, @valid_attrs)
assert changeset |> get_field(:slug) == "code-corps"
end
test "leaves out slug generation if slug is provided" do
attrs = @valid_attrs |> Map.put(:slug, "custom-slug")
changeset = Organization.create_changeset(%Organization{}, attrs)
assert changeset |> get_field(:slug) == "custom-slug"
end
end
describe "update_changeset" do
@valid_struct %Organization{
description: "Building a better future.",
name: "Code Corps"
}
@valid_attrs %{
approved: true
}
test "with valid struct and attributes" do
changeset = Organization.update_changeset(@valid_struct, @valid_attrs)
assert changeset.valid?
end
test "with invalid struct" do
changeset = Organization.update_changeset(%Organization{}, %{})
refute changeset.valid?
end
end
end
<|start_filename|>test/lib/code_corps_web/views/github_pull_request_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.GithubPullRequestViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
github_repo = insert(:github_repo)
github_pull_request = insert(:github_pull_request, github_repo: github_repo)
rendered_json = render(CodeCorpsWeb.GithubPullRequestView, "show.json-api", data: github_pull_request)
expected_json = %{
"data" => %{
"id" => github_pull_request.id |> Integer.to_string,
"type" => "github-pull-request",
"attributes" => %{
"github-created-at" => github_pull_request.github_created_at,
"github-updated-at" => github_pull_request.github_updated_at,
"html-url" => github_pull_request.html_url,
"merged" => github_pull_request.merged,
"number" => github_pull_request.number,
"state" => github_pull_request.state
},
"relationships" => %{
"github-repo" => %{
"data" => %{"id" => github_repo.id |> Integer.to_string, "type" => "github-repo"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>priv/repo/migrations/20171201073818_add_approval_requested_to_projects.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddApprovalRequestedToProjects do
use Ecto.Migration
def change do
alter table(:projects) do
add :approval_requested, :boolean, default: false
end
create index(:projects, [:approval_requested])
end
end
<|start_filename|>lib/code_corps/policy/user_category.ex<|end_filename|>
defmodule CodeCorps.Policy.UserCategory do
alias CodeCorps.UserCategory
alias CodeCorps.User
def create?(%User{admin: true}, %UserCategory{}), do: true
def create?(%User{id: id}, %{"user_id" => user_id}), do: id == user_id
def create?(%User{}, %{}), do: false
def delete?(%User{admin: true}, %UserCategory{}), do: true
def delete?(%User{id: id}, %UserCategory{user_id: user_id}), do: id == user_id
def delete?(%User{}, %{}), do: false
end
<|start_filename|>test/lib/code_corps_web/views/role_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.RoleViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
role = insert(:role)
role_skill = insert(:role_skill, role: role)
role = CodeCorpsWeb.RoleController.preload(role)
rendered_json = render(CodeCorpsWeb.RoleView, "show.json-api", data: role)
expected_json = %{
"data" => %{
"attributes" => %{
"ability" => role.ability,
"inserted-at" => role.inserted_at,
"kind" => role.kind,
"name" => role.name,
"updated-at" => role.updated_at,
},
"id" => role.id |> Integer.to_string,
"relationships" => %{
"role-skills" => %{
"data" => [
%{"id" => role_skill.id |> Integer.to_string, "type" => "role-skill"}
]
}
},
"type" => "role",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/lib/code_corps_web/views/password_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.PasswordViewTest do
use CodeCorpsWeb.ViewCase
test "renders show" do
email = "<EMAIL>"
rendered_json = render(CodeCorpsWeb.PasswordView, "show.json", %{email: email})
expected_json = %{
email: email
}
assert expected_json == rendered_json
refute Map.has_key?(expected_json, :token)
end
end
<|start_filename|>test/lib/code_corps_web/views/skill_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.SkillViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
skill = insert(:skill)
role_skill = insert(:role_skill, skill: skill)
skill = CodeCorpsWeb.SkillController.preload(skill)
rendered_json = render(CodeCorpsWeb.SkillView, "show.json-api", data: skill)
expected_json = %{
"data" => %{
"attributes" => %{
"description" => skill.description,
"inserted-at" => skill.inserted_at,
"title" => skill.title,
"updated-at" => skill.updated_at,
},
"id" => skill.id |> Integer.to_string,
"relationships" => %{
"role-skills" => %{
"data" => [
%{"id" => role_skill.id |> Integer.to_string, "type" => "role-skill"}
]
}
},
"type" => "skill",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps/emails/forgot_password_email.ex<|end_filename|>
defmodule CodeCorps.Emails.ForgotPasswordEmail do
import Bamboo.Email, only: [to: 2]
import Bamboo.PostmarkHelper
alias CodeCorps.{Emails.BaseEmail, User, WebClient}
@spec create(User.t, String.t) :: Bamboo.Email.t
def create(%User{} = user, token) do
BaseEmail.create
|> to(user.email)
|> template(template_id(), %{link: link(token)})
end
@spec template_id :: String.t
defp template_id, do: Application.get_env(:code_corps, :postmark_forgot_password_template)
@spec link(String.t) :: String.t
defp link(token) do
WebClient.url()
|> URI.merge("password/reset?token=#{token}")
|> URI.to_string
end
end
<|start_filename|>test/lib/code_corps/adapter/map_transformer_test.exs<|end_filename|>
defmodule CodeCorps.Adapter.MapTransformerTest do
use ExUnit.Case
alias CodeCorps.Adapter.MapTransformer
@mapping [{:id, ["id"]}, {:user_id, ["user", "id"]}]
describe "transform/2" do
test "transforms map correctly" do
map = %{"id" => 1, "user" => %{"id" => 1}}
assert MapTransformer.transform(map, @mapping) == %{id: 1, user_id: 1}
end
end
describe "transform_inverse/2" do
test "inverse transforms map correctly" do
map = %{id: 1, user_id: 1}
assert MapTransformer.transform_inverse(map, @mapping) == %{"id" => 1, "user" => %{"id" => 1}}
end
end
end
<|start_filename|>lib/code_corps/policy/skill.ex<|end_filename|>
defmodule CodeCorps.Policy.Skill do
alias CodeCorps.User
def create?(%User{admin: true}), do: true
def create?(%User{admin: false}), do: false
end
<|start_filename|>lib/code_corps/processor/processor.ex<|end_filename|>
defmodule CodeCorps.Processor do
@processor Application.get_env(:code_corps, :processor)
@type result :: {:ok, pid} | any
@callback process(fun :: (() -> any)) :: result
@spec process((() -> any)) :: result
def process(fun) do
@processor.process(fun)
end
end
<|start_filename|>priv/repo/migrations/20171106103153_add_unique_constraints_to_specific_task_lists.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddUniqueConstraintsToSpecificTaskLists do
@moduledoc false
use Ecto.Migration
def change do
# There is already a "task_lists_project_id_index", so we name explicitly
create unique_index(
"task_lists", [:project_id],
where: "done = true", name: "task_lists_project_id_done_index")
create unique_index(
"task_lists", [:project_id],
where: "pull_requests = true", name: "task_lists_project_id_pull_requests_index")
create unique_index(
"task_lists", [:project_id],
where: "inbox = true", name: "task_lists_project_id_inbox_index")
end
end
<|start_filename|>test/lib/code_corps/model/stripe_file_upload_test.exs<|end_filename|>
defmodule CodeCorps.StripeFileUploadTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeFileUpload
@valid_attrs %{
id_from_stripe: "abc123"
}
@invalid_attrs %{}
describe "create_changeset/2" do
test "reports as valid when attributes are valid" do
changeset = StripeFileUpload.create_changeset(%StripeFileUpload{}, @valid_attrs)
assert changeset.valid?
end
test "reports as invalid when attributes are invalid" do
changeset = StripeFileUpload.create_changeset(%StripeFileUpload{}, @invalid_attrs)
refute changeset.valid?
assert_error_message(changeset, :id_from_stripe, "can't be blank")
end
test "can optionally belong to a StripeConnectAccount" do
stripe_connect_account_id = insert(:stripe_connect_account).id
changes = Map.merge(@valid_attrs, %{stripe_connect_account_id: stripe_connect_account_id})
changeset = StripeFileUpload.create_changeset(%StripeFileUpload{}, changes)
assert changeset.valid?
end
test "existing StripeConnectAccount association is required" do
stripe_connect_account_id = "abc456"
changes = Map.merge(@valid_attrs, %{stripe_connect_account_id: stripe_connect_account_id})
changeset = StripeFileUpload.create_changeset(%StripeFileUpload{}, changes)
refute changeset.valid?
assert_error_message(changeset, :stripe_connect_account_id, "is invalid")
end
end
end
<|start_filename|>test/lib/code_corps/github/sync/comment/changeset_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Comment.ChangesetTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.GitHub.Sync
alias Ecto.Changeset
describe "create_changeset/3" do
test "assigns proper changes to the comment" do
task = insert(:task)
user = insert(:user)
github_comment = insert(:github_comment)
changeset =
github_comment
|> Sync.Comment.Changeset.create_changeset(task, user)
expected_body =
github_comment.body
|> Earmark.as_html!(%Earmark.Options{code_class_prefix: "language-"})
assert changeset |> Changeset.get_change(:created_at) == github_comment.github_created_at
assert changeset |> Changeset.get_change(:markdown) == github_comment.body
assert changeset |> Changeset.get_change(:modified_at) == github_comment.github_updated_at
assert changeset |> Changeset.get_change(:created_from) == "github"
assert changeset |> Changeset.get_change(:modified_from) == "github"
assert changeset |> Changeset.get_change(:body) == expected_body
assert changeset.changes.github_comment.action == :update
assert changeset.changes.github_comment.data == github_comment
assert changeset.changes.task.action == :update
assert changeset.changes.task.data == task
assert changeset.changes.user.action == :update
assert changeset.changes.user.data == user
assert changeset.valid?
end
end
describe "update_changeset/2" do
test "assigns proper changes to the comment" do
comment = insert(:comment)
github_comment = insert(:github_comment)
changeset =
comment
|> Sync.Comment.Changeset.update_changeset(github_comment)
expected_body =
github_comment.body
|> Earmark.as_html!(%Earmark.Options{code_class_prefix: "language-"})
assert changeset |> Changeset.get_change(:markdown) == github_comment.body
assert changeset |> Changeset.get_change(:modified_at) == github_comment.github_updated_at
assert changeset |> Changeset.get_field(:created_from) == "code_corps"
assert changeset |> Changeset.get_change(:modified_from) == "github"
assert changeset |> Changeset.get_change(:body) == expected_body
refute changeset |> Changeset.get_change(:task)
refute changeset |> Changeset.get_change(:github_comment)
refute changeset |> Changeset.get_change(:user)
assert changeset.valid?
end
test "validates that modified_at has not already happened" do
# Set the modified_at in the future
github_comment = insert(:github_comment)
modified_at =
github_comment.github_updated_at |> Timex.shift(days: 1)
comment =
:comment
|> insert(modified_at: modified_at, github_comment: github_comment)
changeset =
comment
|> Sync.Comment.Changeset.update_changeset(github_comment)
refute changeset.valid?
assert changeset.errors[:modified_at] ==
{"cannot be before the last recorded time", []}
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/comment_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.CommentControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :comment
@valid_attrs %{markdown: "I love elixir!"}
@invalid_attrs %{markdown: ""}
describe "index" do
test "lists all entries on index", %{conn: conn} do
path = conn |> comment_path(:index)
conn = conn |> get(path)
assert json_response(conn, 200)["data"] == []
end
test "filters resources on index", %{conn: conn} do
first_comment = insert(:comment)
second_comment = insert(:comment)
insert(:comment)
path = "comments/?filter[id]=#{first_comment.id},#{second_comment.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([first_comment.id, second_comment.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
comment = insert(:comment)
conn
|> request_show(comment)
|> json_response(200)
|> assert_id_from_response(comment.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
task = insert(:task)
attrs = @valid_attrs |> Map.merge(%{task: task, user: current_user})
json = conn |> request_create(attrs) |> json_response(201)
assert json
user_id = current_user.id
tracking_properties = %{
comment_id: String.to_integer(json["data"]["id"]),
task: task.title,
task_id: task.id,
project_id: task.project_id
}
assert_received {:track, ^user_id, "Created Comment", ^tracking_properties}
end
@tag :authenticated
test "does not create resource and renders errors when data is invalid", %{conn: conn, current_user: current_user} do
attrs = @invalid_attrs |> Map.merge(%{user: current_user})
json = conn |> request_create(attrs) |> json_response(422)
assert json["errors"] != %{}
end
test "does not create resource and renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_create(@valid_attrs) |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create(@valid_attrs) |> json_response(403)
end
end
describe "update" do
@tag :authenticated
test "updates and renders chosen resource when data is valid", %{conn: conn, current_user: current_user} do
comment = insert(:comment, user: current_user)
attrs = @valid_attrs |> Map.merge(%{user: current_user})
assert conn |> request_update(comment, attrs) |> json_response(200)
user_id = current_user.id
task = comment.task
tracking_properties = %{
comment_id: comment.id,
task: task.title,
task_id: task.id,
project_id: task.project_id
}
assert_received {:track, ^user_id, "Edited Comment", ^tracking_properties}
end
@tag :authenticated
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn, current_user: current_user} do
comment = insert(:comment, user: current_user)
attrs = @invalid_attrs |> Map.merge(%{user: current_user})
json = conn |> request_update(comment, attrs) |> json_response(422)
assert json["errors"] != %{}
end
test "does not update resource and renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_update(@valid_attrs) |> json_response(401)
end
@tag :authenticated
test "does not update resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_update(@valid_attrs) |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_update(:not_found) |> json_response(404)
end
end
end
<|start_filename|>lib/code_corps/github/sync/github_user/github_user.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubUser do
@moduledoc ~S"""
In charge of syncing to a `GithubUser` record given a GitHub API payload
containing the user.
"""
alias CodeCorps.{GitHub.Adapters, GitHub.Sync, GithubUser, Repo}
@doc ~S"""
Finds or creates a `GithubUser` record using information in the GitHub API
payload.
"""
@spec create_or_update_github_user(map) :: {:ok, GithubUser.t}
def create_or_update_github_user(%{"user" => %{"id" => github_id} = params}) do
attrs = params |> Adapters.User.to_github_user()
case GithubUser |> Repo.get_by(github_id: github_id) do
nil ->
%GithubUser{}
|> Sync.GithubUser.Changeset.changeset(attrs)
|> Repo.insert()
%GithubUser{} = record ->
record
|> Sync.GithubUser.Changeset.changeset(attrs)
|> Repo.update()
end
end
end
<|start_filename|>test/lib/code_corps_web/views/error_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ErrorViewTest do
use CodeCorpsWeb.ViewCase
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json-api" do
rendered_json = render(CodeCorpsWeb.ErrorView, "404.json-api", [])
expected_json = %{
"errors" => [%{title: "404 Not Found", detail: "404 Not Found", status: "404"}],
"jsonapi" => %{"version" => "1.0"}
}
assert rendered_json == expected_json
end
test "renders 500.json-api" do
rendered_json = render(CodeCorpsWeb.ErrorView, "500.json-api", [])
expected_json = %{
"errors" => [%{title: "500 Internal Server Error", detail: "500 Internal Server Error", status: "500"}],
"jsonapi" => %{"version" => "1.0"}
}
assert rendered_json == expected_json
end
test "render any other" do
string = render_to_string(CodeCorpsWeb.ErrorView, "505.json-api", [])
assert String.contains? string, "Internal Server Error"
end
end
<|start_filename|>lib/code_corps/stripe_service/adapters/stripe_platform_card.ex<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripePlatformCardAdapter do
import CodeCorps.MapUtils, only: [rename: 3, keys_to_string: 1]
@stripe_attributes [:brand, :customer, :cvc_check, :exp_month, :exp_year, :id, :last4, :name, :user_id]
@spec to_params(Stripe.Card.t, map) :: {:ok, map}
def to_params(%Stripe.Card{} = stripe_card, %{} = attributes) do
result =
stripe_card
|> Map.from_struct
|> Map.take(@stripe_attributes)
|> rename(:id, :id_from_stripe)
|> rename(:customer, :customer_id_from_stripe)
|> keys_to_string
|> add_non_stripe_attributes(attributes)
{:ok, result}
end
@non_stripe_attributes ["user_id"]
@spec add_non_stripe_attributes(map, map) :: map
defp add_non_stripe_attributes(%{} = params, %{} = attributes) do
attributes
|> Map.take(@non_stripe_attributes)
|> Map.merge(params)
end
end
<|start_filename|>test/lib/code_corps/helpers/url_test.exs<|end_filename|>
defmodule CodeCorps.Helpers.URLTest do
use ExUnit.Case, async: true
import CodeCorps.Helpers.URL
alias Ecto.Changeset
test "returns nil when nil" do
changeset = create_prefixed_changeset(nil)
assert Changeset.get_change(changeset, :url) == nil
end
test "returns the original when starts with http://" do
original = "http://www.google.com"
changeset = create_prefixed_changeset(original)
assert Changeset.get_change(changeset, :url) == original
end
test "returns the original when starts with https://" do
original = "https://www.google.com"
changeset = create_prefixed_changeset(original)
assert Changeset.get_change(changeset, :url) == original
end
test "returns prefixed with http:// in every other case" do
changeset = create_prefixed_changeset("www.google.com")
assert Changeset.get_change(changeset, :url) == "http://www.google.com"
end
defp create_prefixed_changeset(value) do
%Changeset{changes: %{url: value}} |> prefix_url(:url)
end
end
<|start_filename|>test/lib/code_corps_web/controllers/organization_invite_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.OrganizationInviteControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :organization_invite
use Bamboo.Test
@valid_attrs %{email: "<EMAIL>", organization_name: "<NAME>"}
@invalid_attrs %{email: "code", organization_name: ""}
describe "show" do
test "shows chosen resource", %{conn: conn} do
organization_invite = insert(:organization_invite)
conn
|> request_show(organization_invite)
|> json_response(200)
|> assert_id_from_response(organization_invite.id)
end
test "filters resources on index", %{conn: conn} do
[organization_invite_1, organization_invite_2 | _] = insert_list(3, :organization_invite)
path = "organization-invites/?filter[id]=#{organization_invite_1.id},#{organization_invite_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([organization_invite_1.id, organization_invite_2.id])
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag authenticated: :admin
test "creates and renders resource when data is valid, sends valid email", %{conn: conn} do
assert conn |> request_create(@valid_attrs) |> json_response(201)
organization_invite_email =
CodeCorps.OrganizationInvite
|> first()
|> Repo.one()
|> CodeCorps.Emails.OrganizationInviteEmail.create()
assert_delivered_email organization_invite_email
end
@tag authenticated: :admin
test "renders 422 error when data is invalid", %{conn: conn} do
assert conn |> request_create(@invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
end
describe "update" do
@tag authenticated: :admin
test "updates chosen resource", %{conn: conn} do
assert conn |> request_update(@valid_attrs) |> json_response(200)
end
test "renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag authenticated: :admin
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_update(:not_found) |> json_response(404)
end
end
end
<|start_filename|>lib/code_corps/auth/error_handler.ex<|end_filename|>
defmodule CodeCorps.Auth.ErrorHandler do
use CodeCorpsWeb, :controller
def auth_error(conn, {type, _reason}, _opts) do
conn
|> put_status(401)
|> render(CodeCorpsWeb.TokenView, "401.json", message: to_string(type))
end
end
<|start_filename|>lib/code_corps_web/channels/conversation_channel.ex<|end_filename|>
defmodule CodeCorpsWeb.ConversationChannel do
use Phoenix.Channel
alias CodeCorps.{Conversation, Policy, Repo, User}
alias Phoenix.Socket
@spec join(String.t, map, Socket.t) :: {:ok, Socket.t} | {:error, map}
def join("conversation:" <> id, %{}, %Socket{} = socket) do
with %Conversation{} = conversation <- Conversation |> Repo.get(id),
%User{} = current_user <- socket.assigns[:current_user],
{:ok, :authorized} <- current_user |> Policy.authorize(:show, conversation, %{}) do
{:ok, socket}
else
nil -> {:error, %{reason: "unauthenticated"}}
{:error, :not_authorized} -> {:error, %{reason: "unauthorized"}}
end
end
def event("new:conversation-part", socket, message) do
broadcast socket, "new:conversation-part", message
{:ok, socket}
end
def broadcast_new_conversation_part(conversation_part) do
channel = "conversation:#{conversation_part.conversation_id}"
event = "new:conversation-part"
payload = %{
author_id: conversation_part.author_id,
id: conversation_part.id
}
CodeCorpsWeb.Endpoint.broadcast(channel, event, payload)
end
end
<|start_filename|>lib/code_corps/stripe_testing/fixtures/account.json<|end_filename|>
{
"business_name": "<NAME>",
"business_primary_color": null,
"business_url": "codecorps.org",
"charges_enabled": true,
"country": "US",
"default_currency": "usd",
"details_submitted": true,
"display_name": "<NAME>",
"email": "<EMAIL>",
"external_accounts": {
"object": "list",
"data": [],
"has_more": false,
"total_count": 0,
"url": "/v1/accounts/acct_123/external_accounts"
},
"id": "acct_123",
"object": "account",
"metadata": {},
"statement_descriptor": "CODECORPS.ORG",
"support_email": null,
"support_phone": "1234567890",
"support_url": null,
"timezone": "America/Los_Angeles",
"type": "custom",
"payouts_enabled": true
}
<|start_filename|>lib/code_corps/github/github.ex<|end_filename|>
defmodule CodeCorps.GitHub do
alias CodeCorps.GitHub.{
API,
API.Headers
}
defmodule APIErrorObject do
@moduledoc """
Represents an error object from the GitHub API.
Used in some `APIError`s when the API's JSON response contains an
`errors` key.
The full details of error objects can be found in the
[GitHub API documentation](https://developer.github.com/v3/#client-errors).
"""
@type t :: %__MODULE__{}
defstruct [:code, :field, :resource]
def new(opts) do
struct(__MODULE__, opts)
end
end
defmodule APIError do
@moduledoc """
Represents a client error from the GitHub API.
You can read more about client errors in the
[GitHub API documentation](https://developer.github.com/v3/#client-errors).
"""
defstruct [:documentation_url, :errors, :message, :status_code]
@type t :: %__MODULE__{
documentation_url: String.t | nil,
errors: list | nil,
message: String.t | nil,
status_code: pos_integer | nil
}
@spec new({integer, map}) :: t
def new({status_code, %{"message" => message, "errors" => errors}}) do
errors = Enum.into(errors, [], fn error -> convert_error(error) end)
%__MODULE__{
errors: errors,
message: message,
status_code: status_code
}
end
def new({status_code, %{"message" => message, "documentation_url" => documentation_url}}) do
%__MODULE__{
documentation_url: documentation_url,
message: message,
status_code: status_code
}
end
def new({status_code, %{"message" => message}}) do
%__MODULE__{
message: message,
status_code: status_code
}
end
@spec convert_error(map) :: APIErrorObject.t
defp convert_error(%{"code" => code, "field" => field, "resource" => resource}) do
APIErrorObject.new([code: code, field: field, resource: resource])
end
end
defmodule HTTPClientError do
defstruct [:reason, message: """
The GitHub HTTP client encountered an error while communicating with
the GitHub API.
"""]
@type t :: %__MODULE__{}
def new(opts) do
struct(__MODULE__, opts)
end
end
@type method :: :get | :post | :put | :delete | :patch | :head
@type body :: {:multipart, list} | map
@type headers :: %{String.t => String.t} | %{}
@type response :: {:ok, map} | {:error, api_error_struct}
@type api_error_struct :: APIError.t | HTTPClientError.t
@typedoc ~S"""
Potential errors which can happen when retrieving data from a paginated
endpoint.
If a new access token is required, then it is regenerated and stored into an
installation, which can result in any of
- `Ecto.Changeset.t`
- `CodeCorps.GitHub.APIError.t`
- `CodeCorps.GitHub.HTTPClientError.t`
Once that is done, the actual request is made, which can error out with
- `CodeCorps.GitHub.Errors.PaginationError.t`
"""
@type paginated_endpoint_error :: Ecto.Changeset.t | APIError.t | HTTPClientError.t | API.Errors.PaginationError.t
@doc """
A low level utility function to make a direct request to the GitHub API.
"""
@spec request(method, String.t, body, headers, list) :: response
def request(method, endpoint, body, headers, options) do
with {:ok, encoded_body} <- body |> Poison.encode do
API.request(
method,
api_url_for(endpoint),
encoded_body,
headers |> Headers.user_request(options),
options
)
else
_ -> {:error, HTTPClientError.new(reason: :body_encoding_error)}
end
end
@doc ~S"""
A low level utility function to make an authenticated request to a GitHub API
endpoint which supports pagination, and fetch all the pages from that endpoint
at once, by making parallel requests to each page and aggregating the results.
"""
@spec get_all(String.t, headers, list) :: {:ok, list(map)} | {:error, API.Errors.PaginationError.t} | {:error, api_error_struct}
def get_all(endpoint, headers, options) do
API.get_all(
api_url_for(endpoint),
headers |> Headers.user_request(options),
options
)
end
@doc """
A low level utility function to make an authenticated request to the
GitHub API on behalf of a GitHub App or integration
"""
@spec integration_request(method, String.t, body, headers, list) :: response
def integration_request(method, endpoint, body, headers, options) do
with {:ok, encoded_body} <- body |> Poison.encode do
API.request(
method,
api_url_for(endpoint),
encoded_body,
headers |> Headers.integration_request,
options
)
else
_ -> {:error, HTTPClientError.new(reason: :body_encoding_error)}
end
end
@token_url "https://github.com/login/oauth/access_token"
@doc """
A low level utility function to fetch a GitHub user's OAuth access token
"""
@spec user_access_token_request(String.t, String.t) :: response
def user_access_token_request(code, state) do
with {:ok, encoded_body} <- code |> build_access_token_params(state) |> Poison.encode do
API.request(
:post,
@token_url,
encoded_body,
Headers.access_token_request,
[]
)
else
_ -> {:error, HTTPClientError.new(reason: :body_encoding_error)}
end
end
@api_url "https://api.github.com/"
@spec api_url_for(String.t) :: String.t
defp api_url_for(endpoint) when is_binary(endpoint) do
@api_url |> URI.merge(endpoint) |> URI.to_string
end
@spec build_access_token_params(String.t, String.t) :: map
defp build_access_token_params(code, state) do
%{
client_id: Application.get_env(:code_corps, :github_app_client_id),
client_secret: Application.get_env(:code_corps, :github_app_client_secret),
code: code,
state: state
}
end
end
<|start_filename|>lib/code_corps/github/sync/github_repo/github_repo.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubRepo do
import Ecto.Query
alias CodeCorps.{
GitHub.Adapters,
GitHub.API.Installation,
GitHub.Utils.ResultAggregator,
GithubAppInstallation,
GithubRepo,
Repo
}
alias Ecto.{Changeset, Multi}
@typep commit_result :: {:ok, GithubRepo.t()} | {:error, Changeset.t()}
@typep aggregated_result ::
{:ok, list(GithubRepo.t())} |
{:error, {list(GithubRepo.t()), list(Changeset.t())}}
@spec sync_installation(GithubAppInstallation.t(), map) :: aggregated_result()
def sync_installation(
%GithubAppInstallation{} = installation,
%{"action" => "added", "repositories_added" => repositories}) do
do_sync_installation(installation, [], repositories)
end
def sync_installation(
%GithubAppInstallation{} = installation,
%{"action" => "removed", "repositories_removed" => repositories}) do
github_ids = repositories |> Enum.map(&Map.get(&1, "id"))
do_sync_installation(installation, github_ids, [])
end
@spec sync_installation(GithubAppInstallation.t()) ::
aggregated_result() | {:error, struct}
def sync_installation(%GithubAppInstallation{} = installation) do
with {:ok, payloads} <- installation |> Installation.repositories() do
%GithubAppInstallation{github_repos: repos} = installation =
installation |> Repo.preload(:github_repos)
master_id_list = payloads |> Enum.map(&Map.get(&1, "id"))
ids_to_delete =
repos
|> Enum.filter(fn repo -> not(repo.github_id in master_id_list) end)
|> Enum.map(&Map.get(&1, :github_id))
do_sync_installation(installation, ids_to_delete, payloads)
else
{:error, api_error} -> {:error, {:api_error, api_error}}
end
end
@spec do_sync_installation(GithubAppInstallation.t(), list, list) ::
aggregated_result()
defp do_sync_installation(
%GithubAppInstallation{} = installation, ids_to_delete, payloads_to_sync)
when is_list(ids_to_delete) and is_list(payloads_to_sync) do
Multi.new
|> Multi.run(:delete, fn _ -> ids_to_delete |> delete_all() end)
|> Multi.run(:sync, fn _ -> installation |> sync_all(payloads_to_sync) end)
|> Multi.run(:mark_processed, fn _ -> installation |> mark_processed() end)
|> Repo.transaction()
|> marshall_result()
end
@spec sync_all(GithubAppInstallation.t(), list) :: aggregated_result()
defp sync_all(%GithubAppInstallation{} = installation, payloads)
when is_list(payloads) do
payloads
|> Enum.map(&find_or_create(installation, &1))
|> ResultAggregator.aggregate()
end
@spec delete_all(list) :: {:ok, list(GithubRepo.t)}
defp delete_all(github_ids) when is_list(github_ids) do
GithubRepo
|> where([r], r.github_id in ^github_ids)
|> Repo.delete_all(returning: true)
|> (fn {_count, records} -> {:ok, records} end).()
end
@spec find_or_create(GithubAppInstallation.t(), map) :: {:ok, GithubRepo.t()} | {:error, Changeset.t()}
defp find_or_create(%GithubAppInstallation{} = installation, %{} = payload) do
case find_repo(payload) do
nil -> create_repo(installation, payload)
%GithubRepo{} = repo -> repo |> update_repo(payload)
end
end
@spec find_repo(map) :: GithubRepo.t() | nil
defp find_repo(%{"id" => github_id}) do
GithubRepo
|> Repo.get_by(github_id: github_id)
|> Repo.preload(:github_app_installation)
end
@spec create_repo(GithubAppInstallation.t(), map) :: commit_result()
defp create_repo(%GithubAppInstallation{} = installation, %{} = payload) do
attrs =
payload
|> Adapters.Repo.from_api()
|> Map.merge(installation |> Adapters.AppInstallation.to_github_repo_attrs())
%GithubRepo{}
|> GithubRepo.changeset(attrs)
|> Changeset.put_assoc(:github_app_installation, installation)
|> Repo.insert()
end
@spec update_repo(GithubRepo.t(), map) :: commit_result()
defp update_repo(%GithubRepo{} = github_repo, %{} = payload) do
github_repo
|> Changeset.change(payload |> Adapters.Repo.from_api())
|> Repo.update()
end
@spec mark_processed(GithubAppInstallation.t()) :: {:ok, GithubAppInstallation.t()}
defp mark_processed(%GithubAppInstallation{} = installation) do
installation
|> Changeset.change(%{state: "processed"})
|> Repo.update()
end
@spec marshall_result(tuple) :: tuple
defp marshall_result({:ok, %{sync: synced_repos, delete: deleted_repos}}) do
{:ok, {synced_repos, deleted_repos}}
end
defp marshall_result({:error, errored_step, error_response, _steps}) do
{:error, {errored_step, error_response}}
end
end
<|start_filename|>test/lib/code_corps_web/controllers/stripe_connect_subscription_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectSubscriptionControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :stripe_connect_subscription
defp build_payload(user, project, quantity) do
%{
"data" => %{
"attributes" => %{
"quantity" => quantity,
"project-id" => project.id
},
"relationships" => %{
"user" => %{"data" => %{"id" => user.id}}
}
}
}
end
defp make_create_request(conn, payload) do
path = conn |> stripe_connect_subscription_path(:create)
conn |> post(path, payload)
end
describe "create" do
@tag :authenticated
test "creates and renders resource when user is authenticated and authorized", %{conn: conn, current_user: current_user} do
# make project ready to accept donations
organization = insert(:organization)
insert(:stripe_connect_account, organization: organization, charges_enabled: true)
project = insert(:project, organization: organization)
insert(:stripe_connect_plan, project: project)
# make user ready to donate
insert(:stripe_platform_customer, user: current_user)
insert(:stripe_platform_card, user: current_user)
payload = build_payload(current_user, project, 10)
assert conn |> make_create_request(payload) |> json_response(201)
user_id = current_user.id
assert_received {:track, ^user_id, "Created Stripe Connect Subscription", %{}}
end
test "does not create resource and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "show" do
@tag :authenticated
test "shows resource when authenticated and authorized", %{conn: conn, current_user: current_user} do
stripe_connect_plan = insert(:stripe_connect_plan)
stripe_connect_subscription =
insert(:stripe_connect_subscription, user: current_user, stripe_connect_plan: stripe_connect_plan)
conn
|> request_show(stripe_connect_subscription)
|> json_response(200)
|> assert_id_from_response(stripe_connect_subscription.id)
end
test "renders 401 when unauthenticated", %{conn: conn} do
stripe_connect_subscription = insert(:stripe_connect_subscription)
assert conn |> request_show(stripe_connect_subscription) |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
stripe_connect_subscription = insert(:stripe_connect_subscription)
assert conn |> request_show(stripe_connect_subscription) |> json_response(403)
end
@tag :authenticated
test "renders 404 when record not found", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
end
<|start_filename|>lib/code_corps/policy/policy.ex<|end_filename|>
defmodule CodeCorps.Policy do
@moduledoc ~S"""
Handles authorization for various API actions performed on objects in the database.
"""
alias CodeCorps.{
Category,
Comment,
Conversation,
ConversationPart,
DonationGoal,
GithubAppInstallation,
GithubEvent,
GithubRepo,
Message,
Organization,
OrganizationInvite,
OrganizationGithubAppInstallation,
Policy,
Preview,
Project,
ProjectCategory,
ProjectSkill,
ProjectUser,
Role,
RoleSkill,
Skill,
StripeConnectAccount,
StripeConnectPlan,
StripeConnectSubscription,
StripePlatformCard,
StripePlatformCustomer,
Task,
TaskSkill,
User,
UserCategory,
UserRole,
UserSkill,
UserTask
}
@doc ~S"""
Determines if the specified user can perform the specified action on the
specified resource.
The resource can be a record, when performing an action on an existing record,
or it can be a map of parameters, when creating a new record.
"""
@spec authorize(User.t, atom, struct, map) :: {:ok, :authorized} | {:error, :not_authorized}
def authorize(%User{} = user, action, struct, %{} = params \\ %{}) do
case user |> can?(action, struct, params) do
true -> {:ok, :authorized}
false -> {:error, :not_authorized}
end
end
@doc ~S"""
Scopes a queryable so it's only able to return those records the specified
user is authorized to view.
"""
@spec scope(module, User.t) :: Ecto.Queryable.t
def scope(Message, %User{} = current_user), do: Message |> Policy.Message.scope(current_user)
def scope(Conversation, %User{} = current_user), do: Conversation |> Policy.Conversation.scope(current_user)
def scope(ConversationPart, %User{} = current_user), do: ConversationPart |> Policy.ConversationPart.scope(current_user)
@spec can?(User.t, atom, struct, map) :: boolean
# Category
defp can?(%User{} = current_user, :create, %Category{}, %{}), do: Policy.Category.create?(current_user)
defp can?(%User{} = current_user, :update, %Category{}, %{}), do: Policy.Category.update?(current_user)
# Comment
defp can?(%User{} = current_user, :create, %Comment{}, %{} = params), do: Policy.Comment.create?(current_user, params)
defp can?(%User{} = current_user, :update, %Comment{} = comment, %{}), do: Policy.Comment.update?(current_user, comment)
# Conversation
defp can?(%User{} = current_user, :show, %Conversation{} = conversation, %{}), do: Policy.Conversation.show?(current_user, conversation)
defp can?(%User{} = current_user, :update, %Conversation{} = conversation, %{}), do: Policy.Conversation.update?(current_user, conversation)
# ConversationPart
defp can?(%User{} = current_user, :create, %ConversationPart{}, %{} = params), do: Policy.ConversationPart.create?(current_user, params)
defp can?(%User{} = current_user, :show, %ConversationPart{} = conversation_part, %{}), do: Policy.ConversationPart.show?(current_user, conversation_part)
# DonationGoal
defp can?(%User{} = current_user, :create, %DonationGoal{}, %{} = params), do: Policy.DonationGoal.create?(current_user, params)
defp can?(%User{} = current_user, :update, %DonationGoal{} = donation_goal, %{}), do: Policy.DonationGoal.update?(current_user, donation_goal)
defp can?(%User{} = current_user, :delete, %DonationGoal{} = donation_goal, %{}), do: Policy.DonationGoal.delete?(current_user, donation_goal)
# GithubAppInstallation
defp can?(%User{} = current_user, :create, %GithubAppInstallation{}, %{} = params), do: Policy.GithubAppInstallation.create?(current_user, params)
# GithubEvent
defp can?(%User{} = current_user, :show, %GithubEvent{}, %{}), do: Policy.GithubEvent.show?(current_user)
defp can?(%User{} = current_user, :index, %GithubEvent{}, %{}), do: Policy.GithubEvent.index?(current_user)
defp can?(%User{} = current_user, :update, %GithubEvent{}, %{}), do: Policy.GithubEvent.update?(current_user)
# GithubRepo
defp can?(%User{} = current_user, :update, %GithubRepo{} = github_repo, %{} = params), do: Policy.GithubRepo.update?(current_user, github_repo, params)
# Message
defp can?(%User{} = current_user, :show, %Message{} = message, %{}), do: Policy.Message.show?(current_user, message)
defp can?(%User{} = current_user, :create, %Message{}, %{} = params), do: Policy.Message.create?(current_user, params)
# Organization
defp can?(%User{} = current_user, :create, %Organization{}, %{} = params), do: Policy.Organization.create?(current_user, params)
defp can?(%User{} = current_user, :update, %Organization{} = organization, %{} = params), do: Policy.Organization.update?(current_user, organization, params)
# OrganizationGithubAppInstallation
defp can?(%User{} = current_user, :create, %OrganizationGithubAppInstallation{}, %{} = params), do: Policy.OrganizationGithubAppInstallation.create?(current_user, params)
defp can?(%User{} = current_user, :delete, %OrganizationGithubAppInstallation{} = organization_github_app_installation, %{}),
do: Policy.OrganizationGithubAppInstallation.delete?(current_user, organization_github_app_installation)
# OrganizationInvite
defp can?(%User{} = current_user, :create, %OrganizationInvite{}, %{}), do: Policy.OrganizationInvite.create?(current_user)
defp can?(%User{} = current_user, :update, %OrganizationInvite{}, %{}), do: Policy.OrganizationInvite.update?(current_user)
# Preview
defp can?(%User{} = current_user, :create, %Preview{}, %{} = params), do: Policy.Preview.create?(current_user, params)
# Project
defp can?(%User{} = current_user, :create, %Project{}, %{} = params), do: Policy.Project.create?(current_user, params)
defp can?(%User{} = current_user, :update, %Project{} = project, params = %{}), do: Policy.Project.update?(current_user, project, params)
# ProjectCategory
defp can?(%User{} = current_user, :create, %ProjectCategory{}, %{} = params), do: Policy.ProjectCategory.create?(current_user, params)
defp can?(%User{} = current_user, :delete, %ProjectCategory{} = project_category, %{}), do: Policy.ProjectCategory.delete?(current_user, project_category)
# ProjectSkill
defp can?(%User{} = current_user, :create, %ProjectSkill{}, %{} = params), do: Policy.ProjectSkill.create?(current_user, params)
defp can?(%User{} = current_user, :delete, %ProjectSkill{} = project_skill, %{}), do: Policy.ProjectSkill.delete?(current_user, project_skill)
# ProjectUser
defp can?(%User{} = current_user, :create, %ProjectUser{}, %{} = params), do: Policy.ProjectUser.create?(current_user, params)
defp can?(%User{} = current_user, :update, %ProjectUser{} = project_user, %{} = params), do: Policy.ProjectUser.update?(current_user, project_user, params)
defp can?(%User{} = current_user, :delete, %ProjectUser{} = project_user, %{}), do: Policy.ProjectUser.delete?(current_user, project_user)
# Role
defp can?(%User{} = current_user, :create, %Role{}, %{}), do: Policy.Role.create?(current_user)
# RoleSkill
defp can?(%User{} = current_user, :create, %RoleSkill{}, %{}), do: Policy.RoleSkill.create?(current_user)
defp can?(%User{} = current_user, :delete, %RoleSkill{}, %{}), do: Policy.RoleSkill.delete?(current_user)
# Skill
defp can?(%User{} = current_user, :create, %Skill{}, %{}), do: Policy.Skill.create?(current_user)
# StripeConnectAccount
defp can?(%User{} = current_user, :show, %StripeConnectAccount{} = stripe_connect_account, %{}),
do: Policy.StripeConnectAccount.show?(current_user, stripe_connect_account)
defp can?(%User{} = current_user, :create, %StripeConnectAccount{}, %{} = params),
do: Policy.StripeConnectAccount.create?(current_user, params)
defp can?(%User{} = current_user, :update, %StripeConnectAccount{} = stripe_connect_account, %{}),
do: Policy.StripeConnectAccount.update?(current_user, stripe_connect_account)
# StripeConnectPlan
defp can?(%User{} = current_user, :show, %StripeConnectPlan{} = stripe_connect_plan, %{}),
do: Policy.StripeConnectPlan.show?(current_user, stripe_connect_plan)
defp can?(%User{} = current_user, :create, %StripeConnectPlan{}, %{} = params),
do: Policy.StripeConnectPlan.create?(current_user, params)
# StripeConnectSubscription
defp can?(%User{} = current_user, :show, %StripeConnectSubscription{} = stripe_connect_subscription, %{}),
do: Policy.StripeConnectSubscription.show?(current_user, stripe_connect_subscription)
defp can?(%User{} = current_user, :create, %StripeConnectSubscription{}, %{} = params),
do: Policy.StripeConnectSubscription.create?(current_user, params)
# StripePlatformCard
defp can?(%User{} = current_user, :show, %StripePlatformCard{} = stripe_platform_card, %{}),
do: Policy.StripePlatformCard.show?(current_user, stripe_platform_card)
defp can?(%User{} = current_user, :create, %StripePlatformCard{}, %{} = params),
do: Policy.StripePlatformCard.create?(current_user, params)
# StripePlatformCustomer
defp can?(%User{} = current_user, :create, %StripePlatformCustomer{}, %{} = params),
do: Policy.StripePlatformCustomer.create?(current_user, params)
defp can?(%User{} = current_user, :show, %StripePlatformCustomer{} = stripe_platform_customer, %{}),
do: Policy.StripePlatformCustomer.show?(current_user, stripe_platform_customer)
# Task
defp can?(%User{} = current_user, :create, %Task{}, %{} = params), do: Policy.Task.create?(current_user, params)
defp can?(%User{} = current_user, :update, %Task{} = task, %{}), do: Policy.Task.update?(current_user, task)
# TaskSkill
defp can?(%User{} = current_user, :create, %TaskSkill{}, %{} = params), do: Policy.TaskSkill.create?(current_user, params)
defp can?(%User{} = current_user, :delete, %TaskSkill{} = task_skill, %{}), do: Policy.TaskSkill.delete?(current_user, task_skill)
# User
defp can?(%User{} = current_user, :update, %User{} = user, %{}), do: Policy.User.update?(current_user, user)
# UserCategory
defp can?(%User{} = current_user, :create, %UserCategory{}, %{} = params), do: Policy.UserCategory.create?(current_user, params)
defp can?(%User{} = current_user, :delete, %UserCategory{} = user_category, %{}), do: Policy.UserCategory.delete?(current_user, user_category)
# UserRole
defp can?(%User{} = current_user, :create, %UserRole{}, %{} = params), do: Policy.UserRole.create?(current_user, params)
defp can?(%User{} = current_user, :delete, %UserRole{} = user_role, %{}), do: Policy.UserRole.delete?(current_user, user_role)
# UserSkill
defp can?(%User{} = current_user, :create, %UserSkill{}, %{} = params), do: Policy.UserSkill.create?(current_user, params)
defp can?(%User{} = current_user, :delete, %UserSkill{} = user_skill, %{}), do: Policy.UserSkill.delete?(current_user, user_skill)
# UserTask
defp can?(%User{} = current_user, :create, %UserTask{}, %{} = params), do: Policy.UserTask.create?(current_user, params)
defp can?(%User{} = current_user, :update, %UserTask{} = user_task, %{}), do: Policy.UserTask.update?(current_user, user_task)
defp can?(%User{} = current_user, :delete, %UserTask{} = user_task, %{}), do: Policy.UserTask.delete?(current_user, user_task)
end
<|start_filename|>test/lib/code_corps/services/project_test.exs<|end_filename|>
defmodule CodeCorps.Services.ProjectServiceTest do
use CodeCorps.ModelCase
alias CodeCorps.Project
alias CodeCorps.Repo
alias CodeCorps.Services.ProjectService
describe "update_project_totals/1" do
test "updates the project totals when has active subscriptions" do
project = insert(:project)
plan = insert(:stripe_connect_plan, project: project)
insert(:stripe_connect_subscription, stripe_connect_plan: plan, quantity: 1000, status: "active")
insert(:stripe_connect_subscription, stripe_connect_plan: plan, quantity: 1000, status: "active")
repo_project =
Project
|> Repo.get(project.id)
|> Repo.preload([:stripe_connect_plan])
{:ok, result} = ProjectService.update_project_totals(repo_project)
assert result.id == project.id
assert result.total_monthly_donated == 2000
end
test "updates the project totals when has no active subscriptions" do
project = insert(:project)
insert(:stripe_connect_plan, project: project)
repo_project =
Project
|> Repo.get(project.id)
|> Repo.preload([:stripe_connect_plan])
{:ok, result} = ProjectService.update_project_totals(repo_project)
assert result.id == project.id
assert result.total_monthly_donated == 0
end
end
end
<|start_filename|>lib/code_corps/github/api/pull_request.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.PullRequest do
@moduledoc ~S"""
Functions for working with pull requests on GitHub.
"""
alias CodeCorps.{
GitHub,
GithubAppInstallation,
GithubRepo
}
@doc """
Fetches a pull request from the GitHub API, given the API URL for the pull
request and the `CodeCorps.GithubRepo` record that points to its GitHub
repository.
"""
def from_url(url, %GithubRepo{github_app_installation: %GithubAppInstallation{} = installation}) do
"https://api.github.com/" <> endpoint = url
with opts when is_list(opts) <- GitHub.API.opts_for(installation) do
GitHub.request(:get, endpoint, %{}, %{}, opts)
else
{:error, github_error} -> {:error, github_error}
end
end
end
<|start_filename|>lib/code_corps_web/controllers/password_reset_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.PasswordResetController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{AuthToken, User}
action_fallback CodeCorpsWeb.FallbackController
@doc """
Requires a `token`, `password`, and `password_confirmation` and checks:
1. The token exists in an `AuthToken` record, verified with
`Phoenix.Token.verify`
2. The `password` and `password_confirmation` match, and the auth token
exists:
- If yes, a `200` response will return the email.
- If no, a `422` response will return the error.
"""
def reset_password(conn, %{"token" => reset_token, "password" => <PASSWORD>, "password_confirmation" => _password_confirmation} = params) do
with %AuthToken{user: user} = auth_token <- AuthToken |> Repo.get_by(%{value: reset_token}) |> Repo.preload(:user),
{:ok, _} <- Phoenix.Token.verify(conn, "user", reset_token, max_age: Application.get_env(:code_corps, :password_reset_timeout)),
{:ok, %User{} = updated_user} <- user |> User.reset_password_changeset(params) |> Repo.update(),
{:ok, _auth_token} <- auth_token |> Repo.delete(),
{:ok, auth_token, _claims} = updated_user |> CodeCorps.Guardian.encode_and_sign()
do
conn
|> Plug.Conn.assign(:current_user, updated_user)
|> put_status(:created)
|> render("show.json", token: auth_token, user_id: updated_user.id, email: updated_user.email)
end
end
end
<|start_filename|>test/lib/code_corps/policy/stripe_connect_subscription_test.exs<|end_filename|>
defmodule CodeCorps.Policy.StripeConnectSubscriptionTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.StripeConnectSubscription, only: [create?: 2, show?: 2]
describe "create?" do
test "returns true if user is creating their own record" do
user = insert(:user)
stripe_connect_subscription = insert(:stripe_connect_subscription, user: user)
params = %{"id" => stripe_connect_subscription.id, "user_id" => user.id}
assert create?(user, params)
end
test "returns false if user is creating someone else's record" do
user = build(:user)
stripe_connect_subscription = insert(:stripe_connect_subscription)
params = %{"id" => stripe_connect_subscription.id, "user_id" => -1}
refute create?(user, params)
end
end
describe "show?" do
test "returns true if user is viewing their own record" do
user = insert(:user)
stripe_connect_subscription = insert(:stripe_connect_subscription, user: user)
assert show?(user, stripe_connect_subscription)
end
test "returns false if user is viewing someone else's record" do
user = insert(:user)
stripe_connect_subscription = insert(:stripe_connect_subscription)
refute show?(user, stripe_connect_subscription)
end
end
end
<|start_filename|>lib/code_corps_web/views/page_view.ex<|end_filename|>
defmodule CodeCorpsWeb.PageView do
@moduledoc false
use CodeCorpsWeb, :view
@dialyzer :no_match
end
<|start_filename|>test/lib/code_corps/github/sync/comment/comment_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Comment.CommentTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.{Comment, GithubComment, GitHub.Sync, Repo}
describe "sync/4" do
test "creates missing comments for each project associated with the github repo" do
user = insert(:user)
project = insert(:project)
github_repo = insert(:github_repo, project: project)
github_issue = insert(:github_issue, github_repo: github_repo)
github_comment = insert(:github_comment, github_issue: github_issue)
task = insert(:task, project: project, user: user, github_issue: github_issue, github_repo: github_repo)
# update will fail unless source is newer than target
github_comment =
github_comment
|> Map.update!(:github_updated_at, &Timex.shift(&1, minutes: 1))
{:ok, comment} =
task |> Sync.Comment.sync(github_comment, user)
assert comment.user_id == user.id
assert comment.markdown == github_comment.body
assert comment.github_comment_id == github_comment.id
assert comment.task_id == task.id
assert Repo.one(Comment)
end
test "updates existing comments for each project associated with the github repo" do
user = insert(:user)
project = insert(:project)
github_repo = insert(:github_repo, project: project)
github_issue = insert(:github_issue, github_repo: github_repo)
github_comment = insert(:github_comment, github_issue: github_issue)
task = insert(:task, project: project, user: user, github_issue: github_issue, github_repo: github_repo)
existing_comment = insert(:comment, task: task, user: user, github_comment: github_comment)
# update will fail unless source is newer than target
github_comment =
github_comment
|> Map.update!(:github_updated_at, &Timex.shift(&1, minutes: 1))
{:ok, comment} =
task |> Sync.Comment.sync(github_comment, user)
assert comment.user_id == user.id
assert comment.markdown == github_comment.body
assert comment.github_comment_id == github_comment.id
assert comment.task_id == task.id
assert comment.id == existing_comment.id
end
test "fails on validation errors" do
user = insert(:user)
project = insert(:project)
github_repo = insert(:github_repo, project: project)
github_issue = insert(:github_issue, github_repo: github_repo)
# body will trigger validation error
github_comment = insert(:github_comment, github_issue: github_issue, body: nil)
task = insert(:task, project: project, user: user, github_issue: github_issue, github_repo: github_repo)
# update will fail either way unless source is newer than target
# we do not want to test for that problem in this test
github_comment =
github_comment
|> Map.update!(:github_updated_at, &Timex.shift(&1, minutes: 1))
%{user: user} = insert(:comment, task: task, github_comment: github_comment)
{:error, changeset} =
task |> Sync.Comment.sync(github_comment, user)
refute changeset.valid?
end
end
describe "delete/1" do
test "deletes the Comment record for a GithubComment" do
github_comment = insert(:github_comment)
comments = insert_list(2, :comment, github_comment: github_comment)
insert(:comment)
comment_ids = Enum.map(comments, &Map.get(&1, :id))
{:ok, deleted_comments} =
github_comment.github_id
|> Sync.Comment.delete()
assert Enum.count(deleted_comments) == 2
assert Repo.aggregate(Comment, :count, :id) == 1
assert Repo.aggregate(GithubComment, :count, :id) == 1
for deleted_comment <- deleted_comments do
assert deleted_comment.id in comment_ids
end
end
test "works when there is no associated Comment record" do
github_comment = insert(:github_comment)
{:ok, deleted_comments} =
github_comment.github_id
|> Sync.Comment.delete()
assert Enum.count(deleted_comments) == 0
end
end
end
<|start_filename|>test/lib/code_corps/model/stripe_connect_plan_test.exs<|end_filename|>
defmodule CodeCorps.StripeConnectPlanTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeConnectPlan
@valid_attrs %{
id_from_stripe: "abc123"
}
@invalid_attrs %{}
describe "create_changeset/2" do
test "reports as valid when attributes are valid" do
project_id = insert(:project).id
changes = Map.merge(@valid_attrs, %{project_id: project_id})
changeset = StripeConnectPlan.create_changeset(%StripeConnectPlan{}, changes)
assert changeset.valid?
end
test "reports as invalid when attributes are invalid" do
changeset = StripeConnectPlan.create_changeset(%StripeConnectPlan{}, @invalid_attrs)
refute changeset.valid?
assert_error_message(changeset, :id_from_stripe, "can't be blank")
assert_error_message(changeset, :project_id, "can't be blank")
end
test "ensures associations link to records that exist" do
attrs = @valid_attrs |> Map.merge(%{project_id: -1})
{result, changeset} =
StripeConnectPlan.create_changeset(%StripeConnectPlan{}, attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :project, "does not exist")
end
end
end
<|start_filename|>test/lib/code_corps/model/stripe_invoice_test.exs<|end_filename|>
defmodule CodeCorps.StripeInvoiceTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeInvoice
@valid_attrs %{
charge_id_from_stripe: "ch_123",
customer_id_from_stripe: "cus_123",
id_from_stripe: "in_123",
subscription_id_from_stripe: "sub_123"
}
test "changeset with valid attributes" do
user_id = insert(:user).id
stripe_connect_subscription_id = insert(:stripe_connect_subscription).id
relationships = %{user_id: user_id, stripe_connect_subscription_id: stripe_connect_subscription_id}
attrs = Map.merge(@valid_attrs, relationships)
changeset =
%StripeInvoice{}
|> StripeInvoice.create_changeset(attrs)
assert changeset.valid?
end
test "changeset requires user_id" do
stripe_connect_subscription_id = insert(:stripe_connect_subscription).id
relationships = %{stripe_connect_subscription_id: stripe_connect_subscription_id}
attrs = Map.merge(@valid_attrs, relationships)
changeset =
%StripeInvoice{}
|> StripeInvoice.create_changeset(attrs)
refute changeset.valid?
assert_error_message(changeset, :user_id, "can't be blank")
end
test "changeset requires stripe_connect_subscription_id" do
user_id = insert(:user).id
relationships = %{user_id: user_id}
attrs = Map.merge(@valid_attrs, relationships)
changeset =
%StripeInvoice{}
|> StripeInvoice.create_changeset(attrs)
refute changeset.valid?
assert_error_message(changeset, :stripe_connect_subscription_id, "can't be blank")
end
test "changeset requires id of actual user" do
user_id = -1
stripe_connect_subscription_id = insert(:stripe_connect_subscription).id
relationships = %{user_id: user_id, stripe_connect_subscription_id: stripe_connect_subscription_id}
attrs = Map.merge(@valid_attrs, relationships)
{result, changeset} =
%StripeInvoice{}
|> StripeInvoice.create_changeset(attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :user, "does not exist")
end
test "changeset requires id of actual stripe_connect_subscription" do
user_id = insert(:user).id
stripe_connect_subscription_id = -1
relationships = %{user_id: user_id, stripe_connect_subscription_id: stripe_connect_subscription_id}
attrs = Map.merge(@valid_attrs, relationships)
{result, changeset} =
%StripeInvoice{}
|> StripeInvoice.create_changeset(attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :stripe_connect_subscription, "does not exist")
end
end
<|start_filename|>test/lib/code_corps_web/controllers/organization_github_app_installation_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.OrganizationGithubAppInstallationControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :organization_github_app_installation
use Bamboo.Test
@attrs %{role: "contributor"}
describe "index" do
test "lists all resources", %{conn: conn} do
[record_1, record_2] = insert_pair(:organization_github_app_installation)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
test "filters resources by record id", %{conn: conn} do
[record_1, record_2 | _] = insert_list(3, :organization_github_app_installation)
path = "organization-github-app-installations/?filter[id]=#{record_1.id},#{record_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
record = insert(:organization_github_app_installation)
conn
|> request_show(record)
|> json_response(200)
|> assert_id_from_response(record.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: user} do
github_app_installation = insert(:github_app_installation)
organization = insert(:organization, owner: user)
attrs = @attrs |> Map.merge(%{github_app_installation: github_app_installation, organization: organization})
assert conn |> request_create(attrs) |> json_response(201)
end
@tag :authenticated
test "does not create resource and renders 422 when data is invalid", %{conn: conn, current_user: user} do
# only way to trigger a validation error is to provide a non-existant github_app_installation or organization
# anything else will fail on authorization level
github_app_installation = build(:github_app_installation)
organization = insert(:organization, owner: user)
attrs = @attrs |> Map.merge(%{github_app_installation: github_app_installation, organization: organization})
assert conn |> request_create(attrs) |> json_response(422)
end
test "does not create resource and renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "delete" do
@tag :authenticated
test "deletes resource", %{conn: conn, current_user: user} do
organization = insert(:organization, owner: user)
record = insert(:organization_github_app_installation, organization: organization)
assert conn |> request_delete(record) |> response(204)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent on delete", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/adapters/stripe_external_account_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.Adapters.StripeExternalAccountTest do
use CodeCorps.ModelCase
import CodeCorps.StripeService.Adapters.StripeExternalAccountAdapter, only: [to_params: 2]
@stripe_external_account %Stripe.BankAccount{
id: "ba_19SSZG2eZvKYlo2CXnmzYU5H",
object: "bank_account",
account: "acct_1032D82eZvKYlo2C",
account_holder_name: "<NAME>",
account_holder_type: "individual",
bank_name: "STRIPE TEST BANK",
country: "US",
currency: "usd",
default_for_currency: false,
fingerprint: "1JWtPxqbdX5Gamtc",
last4: "6789",
metadata: {},
routing_number: "110000000",
status: "new"
}
@local_map %{
id_from_stripe: "ba_19SSZG2eZvKYlo2CXnmzYU5H",
account_holder_name: "<NAME>",
account_holder_type: "individual",
bank_name: "STRIPE TEST BANK",
country: "US",
currency: "usd",
default_for_currency: false,
fingerprint: "1JWtPxqbdX5Gamtc",
last4: "6789",
routing_number: "110000000",
status: "new"
}
describe "to_params/2" do
test "converts from stripe map to local properly" do
connect_account = insert(:stripe_connect_account)
attrs_from_connect_account = %{
stripe_connect_account_id: connect_account.id,
account_id_from_stripe: connect_account.id_from_stripe
}
expected_result = @local_map |> Map.merge(attrs_from_connect_account)
{:ok, result} = to_params(@stripe_external_account, connect_account)
assert result == expected_result
end
end
end
<|start_filename|>config/timber.exs<|end_filename|>
use Mix.Config
# Update the instrumenters so that we can structure Phoenix logs
config :code_corps, CodeCorpsWeb.Endpoint,
instrumenters: [Timber.Integrations.PhoenixInstrumenter]
# Structure Ecto logs
config :code_corps, CodeCorps.Repo,
loggers: [{Timber.Integrations.EctoLogger, :log, []}]
# Sets the Logger application to use the `:console` backend with UTC-oriented
# timestamps
config :logger,
backends: [:console],
utc_log: true
# Configures the `:console` backend to:
# - Use Timber.Formatter.format/4 to format log lines
# - Pass _all_ metadata for every log line into formatters
config :logger, :console,
format: {Timber.Formatter, :format},
metadata: :all
# Configures the Timber.Formatter to:
# - Colorize the log level
# - Format metadata using logfmt (if metadata printing is enabled)
# - Print the log level
# - Print the timestamp
# Note: print_metadata is false, so the format key will be ignored
config :timber, Timber.Formatter,
colorize: true,
format: :logfmt,
print_log_level: true,
print_metadata: false,
print_timestamps: true
# Compiling the configuration from the following Mix environments will result
# in the Timber.Formatter using a "production friendly" configuration.
environments_to_include = [
:prod,
:staging
]
if Enum.member?(environments_to_include, Mix.env()) do
# Configures the Timber.Formatter for outputting to Heroku Logplex
# - Removes log level colorization (since the colorization codes are not machine-friendly)
# - Formats the data using the JSON formatter
# - Removes the log level (this is in the metadata)
# - Prints the metadata at the end of the line
# - Removes the timestamp (this is in the metadata and Heroku will also add its own)
config :timber, Timber.Formatter,
colorize: false,
format: :json,
print_log_level: false,
print_metadata: true,
print_timestamps: false
end
# Need help?
# Email us: <EMAIL>
# Or, file an issue: https://github.com/timberio/timber-elixir/issues
<|start_filename|>test/lib/code_corps/github/api/installation_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.API.InstallationTest do
@moduledoc false
use CodeCorpsWeb.ApiCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GithubAppInstallation,
GitHub.API.Installation
}
@access_token "v<PASSWORD>"
@expires_at Timex.now() |> Timex.shift(hours: 1) |> DateTime.to_iso8601()
@installation_repositories load_endpoint_fixture("installation_repositories")
describe "repositories/1" do
test "makes a request to get the repositories for the authenticated installation" do
installation = %GithubAppInstallation{access_token: @access_token, access_token_expires_at: @expires_at}
assert Installation.repositories(installation) == {:ok, @installation_repositories |> Map.get("repositories")}
end
defmodule PaginatedRepositoriesAPI do
@url "https://api.github.com/installation/repositories"
defp build_repo(id), do: %{github_id: id}
def request(:head, @url, _, _, _) do
next = '<#{@url}?page=2>; rel="next"'
last = '<#{@url}?page=2>; rel="last"'
headers = [{"Link", [next, last] |> Enum.join(", ")}]
{:ok, %HTTPoison.Response{body: "", headers: headers, status_code: 200}}
end
def request(:get, @url, _, _, opts) do
body = case opts[:params][:page] do
1 -> %{"repositories" => 1..100 |> Enum.map(&build_repo/1)}
2 -> %{"repositories" => 1..50 |> Enum.map(&build_repo/1)}
end
{:ok, %HTTPoison.Response{body: body |> Poison.encode!, status_code: 200}}
end
def request(method, url, body, headers, opts) do
CodeCorps.GitHub.SuccessAPI.request(method, url, body, headers, opts)
end
end
test "supports pagination" do
installation = %GithubAppInstallation{access_token: @access_token, access_token_expires_at: @expires_at}
with_mock_api(PaginatedRepositoriesAPI) do
{:ok, issues} = installation |> Installation.repositories
end
assert issues |> Enum.count == 150
end
end
describe "get_access_token/1" do
test "returns current token if expires time has not passed" do
expires_at =
Timex.now()
|> Timex.shift(hours: 1)
installation = %GithubAppInstallation{access_token: @access_token, access_token_expires_at: expires_at}
assert Installation.get_access_token(installation) == {:ok, @access_token}
end
test "returns a new token if expires time has passed" do
expires_at =
Timex.now()
|> Timex.shift(hours: -1)
installation = insert(
:github_app_installation,
access_token: "<PASSWORD>", access_token_expires_at: expires_at,
github_id: 1)
assert Installation.get_access_token(installation) == {:ok, @access_token}
end
test "returns a new token if token and expires time are nil" do
installation = insert(
:github_app_installation,
access_token: nil, access_token_expires_at: nil,
github_id: 1)
assert Installation.get_access_token(installation) == {:ok, @access_token}
end
end
describe "token_expired?/1" do
test "returns false for a future ISO8601 timestamp" do
time = Timex.now() |> Timex.shift(days: 14) |> DateTime.to_iso8601()
refute Installation.token_expired?(time)
end
test "returns false for a current ISO8601 timestamp" do
time = Timex.now() |> DateTime.to_iso8601()
assert Installation.token_expired?(time)
end
test "returns true for a past ISO8601 timestamp" do
time = Timex.now() |> Timex.shift(days: -14) |> DateTime.to_iso8601()
assert Installation.token_expired?(time)
end
test "returns true for a nil value" do
assert Installation.token_expired?(nil)
end
end
end
<|start_filename|>test/lib/code_corps/policy/task_skill_test.exs<|end_filename|>
defmodule CodeCorps.Policy.TaskSkillTest do
@moduledoc false
use CodeCorps.PolicyCase
import CodeCorps.Policy.TaskSkill, only: [create?: 2, delete?: 2]
describe "create?" do
test "returns false when user is not member of project" do
user = insert(:user)
task = insert(:task)
params = %{"task_id" => task.id}
refute create?(user, params)
end
test "returns false when user is pending member of project" do
%{project: project, user: user} = insert(:project_user, role: "pending")
task = insert(:task, project: project)
params = %{"task_id" => task.id}
refute create?(user, params)
end
test "returns true when user is contributor of project" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
task = insert(:task, project: project)
params = %{"task_id" => task.id}
assert create?(user, params)
end
test "returns true when user is admin of project" do
%{project: project, user: user} = insert(:project_user, role: "admin")
task = insert(:task, project: project)
params = %{"task_id" => task.id}
assert create?(user, params)
end
test "returns true when user is owner of project" do
%{project: project, user: user} = insert(:project_user, role: "owner")
task = insert(:task, project: project)
params = %{"task_id" => task.id}
assert create?(user, params)
end
test "returns true when user is author of task" do
user = insert(:user)
task = insert(:task, user: user)
params = %{"task_id" => task.id}
assert create?(user, params)
end
end
describe "delete?" do
test "returns false when user is not member of project" do
user = insert(:user)
task = insert(:task)
task_skill = insert(:task_skill, task: task)
refute delete?(user, task_skill)
end
test "returns false when user is pending member of project" do
%{project: project, user: user} = insert(:project_user, role: "pending")
task = insert(:task, project: project)
task_skill = insert(:task_skill, task: task)
refute delete?(user, task_skill)
end
test "returns true when user is contributor of project" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
task = insert(:task, project: project)
task_skill = insert(:task_skill, task: task)
assert delete?(user, task_skill)
end
test "returns true when user is admin of project" do
%{project: project, user: user} = insert(:project_user, role: "admin")
task = insert(:task, project: project)
task_skill = insert(:task_skill, task: task)
assert delete?(user, task_skill)
end
test "returns true when user is owner of project" do
%{project: project, user: user} = insert(:project_user, role: "owner")
task = insert(:task, project: project)
task_skill = insert(:task_skill, task: task)
assert delete?(user, task_skill)
end
test "returns true when user is author of task" do
user = insert(:user)
task = insert(:task, user: user)
task_skill = insert(:task_skill, task: task)
assert delete?(user, task_skill)
end
end
end
<|start_filename|>lib/code_corps_web/controllers/role_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.RoleController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Role, User, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
roles = Role |> Query.id_filter(params) |> Repo.all |> preload()
conn |> render("index.json-api", data: roles)
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %Role{} = role <- Role |> Repo.get(id) |> preload() do
conn |> render("show.json-api", data: role)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %Role{}, params),
{:ok, %Role{} = role} <- %Role{} |> Role.changeset(params) |> Repo.insert,
role = preload(role)
do
conn |> put_status(:created) |> render("show.json-api", data: role)
end
end
@preloads [:role_skills]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>test/lib/code_corps/model/auth_token_test.exs<|end_filename|>
defmodule CodeCorps.AuthTokenTest do
use CodeCorps.ModelCase
alias CodeCorps.AuthToken
test "changeset with valid attributes" do
user = insert(:user)
changeset = AuthToken.changeset(%AuthToken{}, user)
assert changeset.valid?
assert changeset.changes.value
end
end
<|start_filename|>test/lib/code_corps/model/github_issue_assignee_test.exs<|end_filename|>
defmodule CodeCorps.GithubIssueAssigneeTest do
@moduledoc false
use CodeCorps.ModelCase
alias CodeCorps.GithubIssueAssignee
describe "changeset/2" do
@required_attrs ~w(github_issue_id github_user_id)
test "requires #{@required_attrs}" do
changeset = GithubIssueAssignee.changeset(%GithubIssueAssignee{}, %{})
assert_validation_triggered(changeset, :github_issue_id, :required)
assert_validation_triggered(changeset, :github_user_id, :required)
end
test "ensures associated GithubIssue record exists" do
github_user = insert(:github_user)
changeset = GithubIssueAssignee.changeset(%GithubIssueAssignee{}, %{github_issue_id: -1, github_user_id: github_user.id})
{:error, response_changeset} = Repo.insert(changeset)
assert_error_message(response_changeset, :github_issue, "does not exist")
end
test "ensures associated GithubUser record exists" do
github_issue = insert(:github_issue)
changeset = GithubIssueAssignee.changeset(%GithubIssueAssignee{}, %{github_issue_id: github_issue.id, github_user_id: -1})
{:error, response_changeset} = Repo.insert(changeset)
assert_error_message(response_changeset, :github_user, "does not exist")
end
test "ensures uniqueness of GithubUser/GithubIssue combination" do
github_issue = insert(:github_issue)
github_user = insert(:github_user)
insert(:github_issue_assignee, github_issue: github_issue, github_user: github_user)
changeset = GithubIssueAssignee.changeset(%GithubIssueAssignee{}, %{github_issue_id: github_issue.id, github_user_id: github_user.id})
{:error, response_changeset} = Repo.insert(changeset)
assert_error_message(response_changeset, :github_user, "has already been taken")
end
end
end
<|start_filename|>lib/code_corps_web/controllers/github_repo_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubRepoController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{
Analytics.SegmentTracker,
GithubRepo,
Helpers.Query,
Processor,
User
}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with github_repos <- GithubRepo |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: github_repos)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %GithubRepo{} = github_repo <- GithubRepo |> Repo.get(id) do
conn |> render("show.json-api", data: github_repo)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %GithubRepo{} = github_repo <- GithubRepo |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, github_repo, params),
{:ok, %GithubRepo{} = github_repo} <- github_repo |> GithubRepo.update_changeset(params) |> Repo.update()
do
github_repo |> postprocess()
current_user |> track(github_repo)
conn |> render("show.json-api", data: github_repo)
end
end
@spec postprocess(GithubRepo.t) :: any
defp postprocess(%GithubRepo{project_id: nil}), do: nil
defp postprocess(%GithubRepo{} = github_repo) do
Processor.process(fn -> CodeCorps.GitHub.Sync.sync_repo(github_repo) end)
end
@spec track(User.t, GithubRepo.t) :: any
defp track(%User{id: user_id}, %GithubRepo{project_id: nil} = github_repo) do
user_id |> SegmentTracker.track("Disconnected GitHub Repo from Project", github_repo)
end
defp track(%User{id: user_id}, %GithubRepo{} = github_repo) do
user_id |> SegmentTracker.track("Connected GitHub Repo to Project", github_repo)
end
end
<|start_filename|>lib/code_corps/model/message.ex<|end_filename|>
defmodule CodeCorps.Message do
@moduledoc """
A message sent from a project to a user or from a user to a project.
The author does not need to be a member of the project in order to send a
message to the project.
No recipient will be defined for the message. The recipient is defined at the
level of the `CodeCorps.Conversation`.
A message may be used as a broadcast to a number of users. A message MAY
therefore have many conversations associated with it.
"""
use CodeCorps.Model
alias CodeCorps.Message
@type t :: %__MODULE__{}
schema "messages" do
field :body, :string
field :initiated_by, :string
field :subject, :string
belongs_to :author, CodeCorps.User
belongs_to :project, CodeCorps.Project
has_many :conversations, CodeCorps.Conversation
timestamps()
end
@doc false
@spec changeset(Message.t, map) :: Ecto.Changeset.t
def changeset(%Message{} = message, attrs) do
message
|> cast(attrs, [:body, :initiated_by, :subject])
|> validate_required([:body, :initiated_by])
|> validate_inclusion(:initiated_by, initiated_by_sources())
|> require_subject_if_admin()
end
# validate subject only if initiated_by "admin"
@spec require_subject_if_admin(Ecto.Changeset.t) :: Ecto.Changeset.t
defp require_subject_if_admin(changeset) do
initiated_by = changeset |> Ecto.Changeset.get_field(:initiated_by)
changeset |> do_require_subject_if_admin(initiated_by)
end
defp do_require_subject_if_admin(changeset, "admin") do
changeset |> validate_required(:subject)
end
defp do_require_subject_if_admin(changeset, _), do: changeset
@spec initiated_by_sources :: list(String.t)
defp initiated_by_sources do
~w{ admin user }
end
end
<|start_filename|>lib/code_corps/github/event/pull_request/pull_request.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.PullRequest do
@moduledoc ~S"""
In charge of handling a GitHub Webhook payload for the PullRequest event type
[https://developer.github.com/v3/activity/events/types/#pullrequestevent](https://developer.github.com/v3/activity/events/types/#pullrequestevent)
"""
@behaviour CodeCorps.GitHub.Event.Handler
alias CodeCorps.{
GitHub.Sync,
GitHub.Event.PullRequest.Validator
}
@doc ~S"""
Handles the "PullRequest" GitHub webhook
The process is as follows:
- validate the payload is structured as expected
- validate the action is properly supported
- sync the pull request using `CodeCorps.GitHub.Sync.PullRequest`
"""
@impl CodeCorps.GitHub.Event.Handler
@spec handle(map) ::
Sync.pull_request_event_outcome() | {:error, :unexpected_payload}
def handle(payload) do
with {:ok, :valid} <- validate_payload(payload) do
Sync.pull_request_event(payload)
else
{:error, error} -> {:error, error}
end
end
@spec validate_payload(map) :: {:ok, :valid} | {:error, :unexpected_payload}
defp validate_payload(%{} = payload) do
if Validator.valid?(payload) do
{:ok, :valid}
else
{:error, :unexpected_payload}
end
end
end
<|start_filename|>test/lib/code_corps/model/github_repo_test.exs<|end_filename|>
defmodule CodeCorps.GithubRepoTest do
use CodeCorps.ModelCase
alias CodeCorps.GithubRepo
@valid_attrs %{
github_account_avatar_url: "https://avatars.githubusercontent.com/u/6752317?v=3",
github_account_id: 6752317,
github_account_login: "baxterthehacker",
github_account_type: "User",
github_id: 35129377,
name: "public-repo",
}
@invalid_attrs %{}
describe "changeset/2" do
test "with valid attributes" do
changeset = GithubRepo.changeset(%GithubRepo{}, @valid_attrs)
assert changeset.valid?
end
test "with invalid attributes" do
changeset = GithubRepo.changeset(%GithubRepo{}, @invalid_attrs)
refute changeset.valid?
end
end
describe "update_changeset/2" do
test "when project added" do
attrs = @valid_attrs |> Map.put(:project_id, 1)
changeset = GithubRepo.update_changeset(%GithubRepo{}, attrs)
assert changeset.valid?
end
test "when project removed" do
attrs = @valid_attrs |> Map.put(:project_id, nil)
changeset = GithubRepo.update_changeset(%GithubRepo{sync_state: "synced"}, attrs)
assert changeset.valid?
assert changeset.changes[:sync_state] == "unsynced"
end
end
describe "update_sync_changeset/2" do
test "with valid attributes" do
GithubRepo.sync_states |> Enum.each(fn state ->
attrs = @valid_attrs |> Map.put(:sync_state, state)
changeset = GithubRepo.update_sync_changeset(%GithubRepo{}, attrs)
assert changeset.valid?
end)
end
test "with invalid attributes" do
attrs = @valid_attrs |> Map.put(:sync_state, "not_a_valid_sync_state")
changeset = GithubRepo.update_sync_changeset(%GithubRepo{}, attrs)
refute changeset.valid?
assert changeset.errors[:sync_state] == {"is invalid", [validation: :inclusion]}
end
end
end
<|start_filename|>lib/code_corps_web/plugs/set_timber_user_context.ex<|end_filename|>
defmodule CodeCorpsWeb.Plug.SetTimberUserContext do
@moduledoc """
Captures user context.
"""
@behaviour Plug
alias CodeCorps.User
@impl true
def init(opts), do: opts
@impl true
def call(%{assigns: %{current_user: user}} = conn, _), do: add_context(conn, user)
def call(conn, _), do: conn
@impl false
def add_context(conn, %User{} = user) do
%Timber.Contexts.UserContext{
id: user.id, email: user.email, name: User.full_name(user)
} |> Timber.add_context()
conn
end
def add_context(conn, _), do: conn
end
<|start_filename|>test/support/github/failure_api.ex<|end_filename|>
defmodule CodeCorps.GitHub.FailureAPI do
@moduledoc ~S"""
A basic GitHub API mock which returns a 401 forbidden for all requests.
Should be good enough for any tests that simply assert a piece of code is able
to recover from a generic request error.
For any tests that cover handling of specific errors, a non-default API should
be defined inline.
Since our GitHub requests are often forced to start with an installation
access token request, that one is set to succeed here as well.
"""
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.SuccessAPI
def request(method, url, body, headers, options) do
case {method, url} |> for_access_token?() do
true -> SuccessAPI.request(method, url, body, headers, options)
false ->
send(self(), {method, url, body, headers, options})
{:ok, body} = load_endpoint_fixture("forbidden") |> Poison.encode
{:ok, %HTTPoison.Response{status_code: 401, body: body}}
end
end
defp for_access_token?({:post, url}), do: url |> access_token_url?()
defp for_access_token?({_method, _url}), do: false
defp access_token_url?("https://api.github.com/" <> path), do: path |> String.split("/") |> access_token_parts?()
defp access_token_url?(_), do: false
defp access_token_parts?(["installations", _, "access_tokens"]), do: true
defp access_token_parts?(_), do: false
end
<|start_filename|>priv/repo/migrations/20171114232534_remove_project_github_repos.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.RemoveProjectGithubRepos do
use Ecto.Migration
def up do
drop table(:project_github_repos)
end
def down do
create table(:project_github_repos) do
add :project_id, references(:projects, on_delete: :delete_all)
add :github_repo_id, references(:github_repos, on_delete: :delete_all)
add :sync_state, :string, default: "unsynced"
timestamps()
end
create unique_index(:project_github_repos, [:project_id, :github_repo_id])
create index(:project_github_repos, [:sync_state])
end
end
<|start_filename|>emails/styles.css<|end_filename|>
/* Base ------------------------------ */
*:not(br):not(tr):not(html) {
font-family: Arial, 'Helvetica Neue', Helvetica, sans-serif;
box-sizing: border-box;
}
body {
width: 100% !important;
height: 100%;
margin: 0;
line-height: 1.4;
color: #333;
-webkit-text-size-adjust: none;
}
p,
ul,
ol,
blockquote {
line-height: 1.4;
text-align: left;
}
a {
color: #08A8FC;
text-decoration: none;
}
a img {
border: none;
}
td {
word-break: break-word;
}
/* Layout ------------------------------ */
.email-wrapper {
width: 100%;
margin: 0;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.email-content {
width: 100%;
margin: 0;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
/* Masthead ----------------------- */
.email-masthead_inner {
padding: 20px 0;
text-align: center;
}
.email-masthead_logo {
width: 161px;
height: 35px;
}
/* Body ------------------------------ */
.email-body {
width: 100%;
margin: 0;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #FFFFFF;
}
.email-body_inner {
width: 570px;
margin: 0 auto;
padding: 0;
border-top: 2px solid #08A8FC;
border-right: 2px solid #EEE;
border-bottom: 2px solid #EEE;
border-left: 2px solid #EEE;
-premailer-width: 570px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #FFFFFF;
}
.email-footer {
width: 570px;
margin: 0 auto;
padding: 20px 10px;
-premailer-width: 570px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
text-align: center;
}
.email-footer p {
color: #777;
font-size: 12px;
margin: 0;
}
.email-footer__cell {
padding: 10px 0;
}
.body-action {
width: 100%;
margin: 30px auto;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
text-align: center;
}
.body-signature {
margin-top: 16px;
width: 100%;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.body-signature p:last-child {
margin-bottom: 0;
}
.body-sub {
width: 100%;
margin-top: 25px;
padding-top: 25px;
border-top: 1px solid #EDEFF2;
}
.content-cell {
padding: 35px;
}
.preheader {
display: none !important;
}
/* Attribute list ------------------------------ */
.attributes {
margin: 0 0 21px;
}
.attributes_content {
background-color: #EDEFF2;
padding: 16px;
}
.attributes_item {
padding: 0;
}
/* Related Items ------------------------------ */
.related {
width: 100%;
margin: 0;
padding: 25px 0 0 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.related_item {
padding: 10px 0;
color: #333;
font-size: 15px;
line-height: 18px;
}
.related_item-title {
display: block;
margin: .5em 0 0;
}
.related_item-thumb {
display: block;
padding-bottom: 10px;
}
.related_heading {
border-top: 1px solid #EDEFF2;
text-align: center;
padding: 25px 0 10px;
}
/* Social Icons ------------------------------ */
.social {
width: auto;
}
.social td {
padding: 0;
width: auto;
}
.social_icon {
height: 20px;
margin: 0 8px 10px 8px;
padding: 0;
}
/* Donation -------------------------------- */
.donation_image {
margin-bottom: 30px
}
.donation_text {
margin-bottom: 30px;
}
.donation_goal_header {
border-top: 1px solid #EEE;
padding: 20px 10px 0px 10px;
font-size: 12px;
text-align: center;
text-transform: uppercase;
color: #999;
font-weight: bold;
}
.donation_goal_footer {
font-size: 14px;
border-bottom: 1px solid #EEE;
padding: 0px 10px 20px 10px;
text-align: center;
}
/* Project joining -------------------------------- */
.joined_images {
height: 70px;
margin: 0 0 30px 0;
padding: 0;
text-align: center;
width: 100%;
}
.joined_images li {
display: inline-block;
}
.joined_images li.icon {
margin: 0 5px;
width: 25px;
}
.joined_images li.photo {
width: 70px;
}
.joined_images li.photo img {
border-radius: 4px;
}
.joined_images li:first-child {
/*margin-left: 300px;*/
}
/* Data table ------------------------------ */
.purchase {
width: 100%;
margin: 0;
padding: 35px 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.charge_content {
font-size: 14px;
width: 100%;
margin: 0;
padding: 25px 0 0 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.charge_item {
padding: 10px 0;
color: #333;
font-size: 14px;
line-height: 18px;
}
.charge_heading {
padding-bottom: 8px;
border-bottom: 1px solid #EDEFF2;
}
.charge_heading p {
margin: 0;
color: #9BA2AB;
font-size: 12px;
}
.charge_footer {
padding-top: 15px;
border-top: 1px solid #EDEFF2;
}
.charge_total {
margin: 0;
text-align: right;
font-size: 14px;
font-weight: bold;
line-height: 18px;
color: #2F3133;
}
.charge_total--label {
padding: 0 15px 0 0;
font-size: 12px;
line-height: 18px;
}
/* Utilities ------------------------------ */
.align-right {
text-align: right;
}
.align-left {
text-align: left;
}
.align-center {
text-align: center;
}
/*Media Queries ------------------------------ */
@media only screen and (max-width: 600px) {
.email-body_inner,
.email-footer {
width: 100% !important;
}
}
@media only screen and (max-width: 500px) {
.button {
width: 100% !important;
}
}
/* Buttons ------------------------------ */
.button {
background-color: #08A8FC;
border-top: 10px solid #08A8FC;
border-right: 18px solid #08A8FC;
border-bottom: 10px solid #08A8FC;
border-left: 18px solid #08A8FC;
display: inline-block;
color: #FFF;
text-decoration: none;
border-radius: 3px;
box-shadow: 0 2px 3px rgba(0, 0, 0, 0.16);
-webkit-text-size-adjust: none;
}
/* Type ------------------------------ */
h1 {
margin-top: 0;
color: #2F3133;
font-size: 19px;
font-weight: bold;
text-align: left;
}
h2 {
margin-top: 0;
color: #2F3133;
font-size: 16px;
font-weight: bold;
text-align: left;
}
h3 {
margin-top: 0;
color: #2F3133;
font-size: 14px;
font-weight: bold;
text-align: left;
}
p {
margin-top: 0;
color: #333;
font-size: 16px;
line-height: 1.5em;
text-align: left;
}
p.small {
font-size: 14px;
}
p.sub {
color: #666;
font-size: 12px;
}
p.center {
text-align: center;
}
<|start_filename|>test/lib/code_corps/model/organization_invite_test.exs<|end_filename|>
defmodule CodeCorps.OrganizationInviteTest do
use CodeCorps.ModelCase
alias CodeCorps.OrganizationInvite
@valid_attrs %{email: "<EMAIL>", organization_name: "Code Corps"}
@invalid_attrs %{}
describe "changeset/2" do
test "with valid attributes" do
changeset = OrganizationInvite.changeset(%OrganizationInvite{}, @valid_attrs)
assert changeset.valid?
end
test "with invalid attributes" do
changeset = OrganizationInvite.changeset(%OrganizationInvite{}, @invalid_attrs)
refute changeset.valid?
end
end
describe "create_changeset/2" do
test "with valid attributes" do
changeset = OrganizationInvite.create_changeset(%OrganizationInvite{}, @valid_attrs)
assert changeset.valid?
end
test "with invalid attributes" do
changeset = OrganizationInvite.create_changeset(%OrganizationInvite{}, @invalid_attrs)
refute changeset.valid?
end
test "generates code" do
changeset = OrganizationInvite.create_changeset(%OrganizationInvite{}, @valid_attrs)
assert changeset.changes.code != nil
end
end
end
<|start_filename|>lib/code_corps/web_client.ex<|end_filename|>
defmodule CodeCorps.WebClient do
@moduledoc ~S"""
Confirms URLs for the web client app routes
"""
alias CodeCorps.{
Comment,
Organization,
Project,
Task,
User
}
@doc ~S"""
Returns the web client site root URL
"""
@spec url :: String.t
def url, do: Application.get_env(:code_corps, :site_url)
@doc ~S"""
Return the web client site url for the specified record
"""
@spec url(User.t | Organization.t | Project.t | Task.t | Comment.t) :: String.t
def url(%User{username: username}), do: url() <> "/" <> username
def url(%Organization{slug: slug}), do: url() <> "/" <> slug
def url(%Project{slug: slug, organization: %Organization{} = organization}) do
(organization |> url()) <> "/" <> slug
end
def url(%Task{project: %Project{} = project, number: number}) do
(project |> url()) <> "/" <> (number |> Integer.to_string)
end
def url(%Comment{task: %Task{} = task}), do: task |> url()
end
<|start_filename|>test/lib/code_corps/organizations/organizations_test.exs<|end_filename|>
defmodule CodeCorps.OrganizationsTest do
use CodeCorps.DbAccessCase
alias CodeCorps.{Organization, Organizations, OrganizationInvite}
alias Ecto.Changeset
describe "create/1" do
test "creates an organization" do
%{id: owner_id} = insert(:user)
attrs = %{
"cloudinary_public_id" => "Baz",
"description" => "Bar",
"name" => "Foo",
"owner_id" => owner_id
}
{:ok, %Organization{} = organization} = Organizations.create(attrs)
assert organization.name == "Foo"
assert organization.description == "Bar"
assert organization.cloudinary_public_id == "Baz"
end
test "returns changeset tuple if there are validation errors"do
{:error, %Changeset{} = changeset} = Organizations.create(%{})
refute changeset.valid?
end
test "fulfills associated organization invite if invite code provided" do
%{code: invite_code, id: invite_id} = insert(:organization_invite)
%{id: owner_id} = insert(:user)
attrs = %{
"cloudinary_public_id" => "Baz",
"description" => "Bar",
"invite_code" => invite_code,
"name" => "Foo",
"owner_id" => owner_id
}
{:ok, %Organization{id: organization_id}} = Organizations.create(attrs)
associated_organization_id =
OrganizationInvite |> Repo.get(invite_id) |> Map.get(:organization_id)
assert associated_organization_id == organization_id
end
end
end
<|start_filename|>test/lib/code_corps/map_utils_test.exs<|end_filename|>
defmodule CodeCorps.MapUtilsTest do
use ExUnit.Case, async: true
import CodeCorps.MapUtils, only: [keys_to_string: 1, rename: 3]
test "&rename/3 renames old key in map to new key" do
assert %{"foo" => 2} |> rename("foo", "bar") == %{"bar" => 2}
end
test "&keys_to_string/1 stringifies any keys in map" do
assert %{:a => "one", :b => "two"} |> keys_to_string == %{"a" => "one", "b" => "two"}
assert %{} |> keys_to_string == %{}
end
end
<|start_filename|>test/lib/code_corps_web/views/github_repo_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.GithubRepoViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
project = insert(:project)
github_app_installation = insert(:github_app_installation)
github_repo = insert(:github_repo, github_app_installation: github_app_installation, project: project)
rendered_json = render(CodeCorpsWeb.GithubRepoView, "show.json-api", data: github_repo)
expected_json = %{
"data" => %{
"id" => github_repo.id |> Integer.to_string,
"type" => "github-repo",
"attributes" => %{
"github-account-avatar-url" => github_repo.github_account_avatar_url,
"github-account-id" => github_repo.github_account_id,
"github-account-login" => github_repo.github_account_login,
"github-account-type" => github_repo.github_account_type,
"github-id" => github_repo.github_id,
"inserted-at" => github_repo.inserted_at,
"name" => github_repo.name,
"syncing-comments-count" => github_repo.syncing_comments_count,
"syncing-issues-count" => github_repo.syncing_issues_count,
"syncing-pull-requests-count" => github_repo.syncing_pull_requests_count,
"sync-state" => github_repo.sync_state,
"updated-at" => github_repo.updated_at
},
"relationships" => %{
"github-app-installation" => %{
"data" => %{"id" => github_app_installation.id |> Integer.to_string, "type" => "github-app-installation"}
},
"project" => %{
"data" => %{"id" => project.id |> Integer.to_string, "type" => "project"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/lib/code_corps/model/organization_github_app_installation_test.exs<|end_filename|>
defmodule CodeCorps.OrganizationGithubAppInstallationTest do
use CodeCorps.ModelCase
alias CodeCorps.{OrganizationGithubAppInstallation, Repo}
describe "create_changeset/2" do
test "ensures organization record exists" do
github_app_installation = insert(:github_app_installation)
attrs = %{github_app_installation_id: github_app_installation.id, organization_id: -1}
changeset =
%OrganizationGithubAppInstallation{}
|> OrganizationGithubAppInstallation.create_changeset(attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :organization)
end
test "ensures github_app_installation record exists" do
organization = insert(:organization)
attrs = %{github_app_installation_id: -1, organization_id: organization.id}
changeset =
%OrganizationGithubAppInstallation{}
|> OrganizationGithubAppInstallation.create_changeset(attrs)
{:error, invalid_changeset} = changeset |> Repo.insert
refute invalid_changeset.valid?
assert assoc_constraint_triggered?(invalid_changeset, :github_app_installation)
end
end
end
<|start_filename|>lib/code_corps/policy/task_skill.ex<|end_filename|>
defmodule CodeCorps.Policy.TaskSkill do
@moduledoc """
Represents an authorization policy for performing actions on TaskSkill records.
Used to authorize a controller action.
"""
import CodeCorps.Policy.Helpers,
only: [
contributed_by?: 2,
get_project: 1,
get_task: 1,
task_authored_by?: 2
]
alias CodeCorps.{TaskSkill, User}
@spec create?(User.t, map) :: boolean
def create?(%User{} = user, %{} = params) do
cond do
params |> get_task |> task_authored_by?(user) -> true
params |> get_task |> get_project |> contributed_by?(user) -> true
true -> false
end
end
@spec delete?(User.t, TaskSkill.t) :: boolean
def delete?(%User{} = user, %TaskSkill{} = task_skill) do
cond do
task_skill |> get_task |> task_authored_by?(user) -> true
task_skill |> get_task |> get_project |> contributed_by?(user) -> true
true -> false
end
end
end
<|start_filename|>lib/code_corps/messages/conversation_query.ex<|end_filename|>
defmodule CodeCorps.Messages.ConversationQuery do
@moduledoc ~S"""
Holds helpers to query `CodeCorps.Conversation` records using a map of params.
"""
import Ecto.Query
alias CodeCorps.{Conversation, ConversationPart, Message, Repo}
alias Ecto.Queryable
@doc ~S"""
Narrows down a `CodeCorps.Conversation` query by `project_id` of the parent
`CodeCorps.Message`, if specified in a params map
"""
@spec project_filter(Queryable.t, map) :: Queryable.t
def project_filter(queryable, %{"project_id" => project_id}) do
queryable
|> join(:left, [c], m in Message, c.message_id == m.id)
|> where([_c, m], m.project_id == ^project_id)
end
def project_filter(queryable, %{}), do: queryable
@doc ~S"""
Narrows down a `CodeCorps.Conversation` query by `user_id`, if specified in a
params map
"""
@spec user_filter(Queryable.t, map) :: Queryable.t
def user_filter(queryable, %{"user_id" => user_id}) do
queryable
|> where([c], c.user_id == ^user_id)
end
def user_filter(queryable, %{}), do: queryable
@doc ~S"""
Filters `CodeCorps.Conversation` record queries to return only those
considered to be active.
Active conversations belong either:
- to a `CodeCorps.Message` initiated by user
- to a `CodeCorps.Message` initiated by an admin, with at least a single
conversation part
"""
@spec active_filter(Queryable.t, map) :: Queryable.t
def active_filter(queryable, %{"active" => true}) do
prefiltered_ids = queryable |> select([c], c.id) |> Repo.all
Conversation
|> where([c], c.id in ^prefiltered_ids)
|> join(:left, [c], m in Message, c.message_id == m.id)
|> join(:left, [c, _m], cp in ConversationPart, c.id == cp.conversation_id)
|> group_by([c, m, _cp], [c.id, m.initiated_by])
|> having([_c, m, _cp], m.initiated_by == "user")
|> or_having([c, m, cp], m.initiated_by == "admin" and count(cp.id) > 0)
end
def active_filter(query, %{}), do: query
@doc ~S"""
Filters `CodeCorps.Conversation` record queries by their status.
"""
@spec status_filter(Queryable.t, map) :: Queryable.t
def status_filter(queryable, %{"status" => status}) do
queryable
|> where([c], c.status == ^status)
end
def status_filter(query, _), do: query
end
<|start_filename|>lib/code_corps/policy/message.ex<|end_filename|>
defmodule CodeCorps.Policy.Message do
@moduledoc """
Handles `User` authorization of actions on `Message` records
"""
import CodeCorps.Policy.Helpers, only: [administered_by?: 2, get_project: 1]
import Ecto.Query
alias CodeCorps.{Conversation, Message, Project, ProjectUser, Repo, User}
@spec scope(Ecto.Queryable.t, User.t) :: Ecto.Queryable.t
def scope(queryable, %User{admin: true}), do: queryable
def scope(queryable, %User{id: id}) do
projects_administered_by_user_ids =
Project
|> join(:inner, [p], pu in ProjectUser, pu.project_id == p.id)
|> where([_p, pu], pu.user_id == ^id)
|> where([_p, pu], pu.role in ~w(admin owner))
|> select([p], p.id)
|> Repo.all
messages_targeted_at_user_ids =
Message
|> join(:inner, [m], c in Conversation, c.message_id == m.id)
|> where([_m, c], c.user_id == ^id)
|> select([m, _c], m.id)
|> Repo.all
queryable
|> where([m], m.author_id == ^id)
|> or_where([m], m.id in ^messages_targeted_at_user_ids)
|> or_where([m], m.project_id in ^projects_administered_by_user_ids)
end
def show?(
%User{id: user_id},
%Message{initiated_by: "user", author_id: author_id})
when user_id == author_id do
true
end
def show?(%User{} = user, %Message{} = message) do
cond do
message |> get_project() |> administered_by?(user) -> true
message.conversations |> Enum.any?(fn c -> c.user_id == user.id end) -> true
true -> false
end
end
def show?(_, _), do: false
def create?(%User{id: id}, %{"initiated_by" => "user", "author_id" => author_id}) when id === author_id do
true
end
def create?(%User{} = user, %{"initiated_by" => "admin", "project_id" => _} = params) do
params |> get_project() |> administered_by?(user)
end
def create?(_, _), do: false
end
<|start_filename|>config/config.exs<|end_filename|>
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :code_corps,
ecto_repos: [CodeCorps.Repo]
# Configures the endpoint
config :code_corps, CodeCorpsWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "<KEY>",
render_errors: [view: CodeCorpsWeb.ErrorView, accepts: ~w(html json json-api)],
pubsub: [name: CodeCorps.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Configures JSON API encoding
config :phoenix, :format_encoders,
"json-api": Poison
# Configures JSON API mime type
config :mime, :types, %{
"application/vnd.api+json" => ["json-api"]
}
config :code_corps, CodeCorps.Guardian,
issuer: "CodeCorps",
ttl: { 30, :days },
verify_issuer: true, # optional
secret_key: System.get_env("GUARDIAN_SECRET_KEY")
# Configures ex_aws with credentials
config :ex_aws, :code_corps,
access_key_id: [System.get_env("AWS_ACCESS_KEY_ID"), :instance_role],
secret_access_key: [System.get_env("AWS_SECRET_ACCESS_KEY"), :instance_role]
config :code_corps,
asset_host: System.get_env("CLOUDFRONT_DOMAIN")
config :code_corps,
intercom_identity_secret_key: System.get_env("INTERCOM_IDENTITY_SECRET_KEY")
config :segment,
write_key: System.get_env("SEGMENT_WRITE_KEY")
config :code_corps, :cloudex, Cloudex
config :cloudex,
api_key: System.get_env("CLOUDEX_API_KEY"),
secret: System.get_env("CLOUDEX_SECRET"),
cloud_name: System.get_env("CLOUDEX_CLOUD_NAME")
# Configures random icon color generator
config :code_corps, :icon_color_generator, CodeCorps.RandomIconColor.Generator
# Set Corsica logging to output a console warning when rejecting a request
config :code_corps, :corsica_log_level, [rejected: :warn]
{:ok, pem} = (System.get_env("GITHUB_APP_PEM") || "") |> Base.decode64()
config :code_corps,
github: CodeCorps.GitHub.API.Gateway,
github_app_id: System.get_env("GITHUB_APP_ID"),
github_app_client_id: System.get_env("GITHUB_APP_CLIENT_ID"),
github_app_client_secret: System.get_env("GITHUB_APP_CLIENT_SECRET"),
github_app_pem: pem
config :stripity_stripe,
api_key: System.get_env("STRIPE_SECRET_KEY"),
connect_client_id: System.get_env("STRIPE_PLATFORM_CLIENT_ID")
config :sentry,
dsn: System.get_env("SENTRY_DSN"),
enable_source_code_context: true,
included_environments: ~w(prod staging)a
config :code_corps, :sentry, CodeCorps.Sentry.Async
config :code_corps, :processor, CodeCorps.Processor.Async
config :code_corps, password_reset_timeout: 3600
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
# Import Timber, structured logging
import_config "timber.exs"
import_config "scout_apm.exs"
config :code_corps, CodeCorps.Repo,
loggers: [{Ecto.LogEntry, :log, []},
{ScoutApm.Instruments.EctoLogger, :log, []}]
<|start_filename|>test/lib/code_corps_web/views/stripe_connect_account_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectAccountViewTest do
@moduledoc false
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
organization = insert(:organization)
account = insert(:stripe_connect_account,
organization: organization,
verification_disabled_reason: "fields_needed",
verification_fields_needed: ["legal_entity.first_name", "legal_entity.last_name"]
)
insert(:stripe_external_account,
stripe_connect_account: account,
bank_name: "<NAME>",
last4: "1234",
routing_number: "123456789"
)
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
expected_json = %{
"data" => %{
"attributes" => %{
"bank-account-bank-name" => "<NAME>",
"bank-account-last4" => "1234",
"bank-account-routing-number" => "123456789",
"bank-account-status" => "pending_requirement",
"business-name" => account.business_name,
"business-url" => account.business_url,
"can-accept-donations" => true,
"charges-enabled" => account.charges_enabled,
"country" => account.country,
"default-currency" => account.default_currency,
"details-submitted" => account.details_submitted,
"display-name" => account.display_name,
"email" => account.email,
"id-from-stripe" => account.id_from_stripe,
"inserted-at" => account.inserted_at,
"legal-entity-address-city" => account.legal_entity_address_city,
"legal-entity-address-country" => account.legal_entity_address_country,
"legal-entity-address-line1" => account.legal_entity_address_line1,
"legal-entity-address-line2" => account.legal_entity_address_line2,
"legal-entity-address-postal-code" => account.legal_entity_address_postal_code,
"legal-entity-address-state" => account.legal_entity_address_state,
"legal-entity-business-name" => account.legal_entity_business_name,
"legal-entity-business-tax-id" => account.legal_entity_business_tax_id,
"legal-entity-business-tax-id-provided" => account.legal_entity_business_tax_id_provided,
"legal-entity-business-vat-id" => account.legal_entity_business_vat_id,
"legal-entity-business-vat-id-provided" => account.legal_entity_business_vat_id_provided,
"legal-entity-dob-day" => account.legal_entity_dob_day,
"legal-entity-dob-month" => account.legal_entity_dob_month,
"legal-entity-dob-year" => account.legal_entity_dob_year,
"legal-entity-first-name" => account.legal_entity_first_name,
"legal-entity-last-name" => account.legal_entity_last_name,
"legal-entity-gender" => account.legal_entity_gender,
"legal-entity-maiden-name" => account.legal_entity_maiden_name,
"legal-entity-personal-address-city" => account.legal_entity_personal_address_city,
"legal-entity-personal-address-country" => account.legal_entity_personal_address_country,
"legal-entity-personal-address-line1" => account.legal_entity_personal_address_line1,
"legal-entity-personal-address-line2" => account.legal_entity_personal_address_line2,
"legal-entity-personal-address-postal-code" => account.legal_entity_personal_address_postal_code,
"legal-entity-personal-address-state" => account.legal_entity_personal_address_state,
"legal-entity-phone-number" => account.legal_entity_phone_number,
"legal-entity-personal-id-number" => account.legal_entity_personal_id_number,
"legal-entity-personal-id-number-provided" => account.legal_entity_personal_id_number_provided,
"legal-entity-ssn-last-4" => account.legal_entity_ssn_last_4,
"legal-entity-ssn-last-4-provided" => account.legal_entity_ssn_last_4_provided,
"legal-entity-type" => account.legal_entity_type,
"legal-entity-verification-details" => account.legal_entity_verification_details,
"legal-entity-verification-details-code" => account.legal_entity_verification_details_code,
"legal-entity-verification-document" => account.legal_entity_verification_document,
"legal-entity-verification-status" => account.legal_entity_verification_status,
"payouts-enabled" => account.payouts_enabled,
"personal-id-number-status" => "pending_requirement",
"recipient-status" => "required",
"support-email" => account.support_email,
"support-phone" => account.support_phone,
"support-url" => account.support_url,
"type" => account.type,
"updated-at" => account.updated_at,
"verification-disabled-reason" => account.verification_disabled_reason,
"verification-document-status" => "pending_requirement",
"verification-due-by" => account.verification_due_by,
"verification-fields-needed" => account.verification_fields_needed
},
"id" => account.id |> Integer.to_string,
"relationships" => %{
"organization" => %{
"data" => %{"id" => organization.id |> Integer.to_string, "type" => "organization"}
}
},
"type" => "stripe-connect-account",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
test "renders can-accept-donations as true in prod when charges-enabled is true" do
Application.put_env(:code_corps, :stripe_env, :prod)
organization = insert(:organization)
account = insert(:stripe_connect_account, organization: organization, charges_enabled: true)
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["can-accept-donations"] == true
assert rendered_json["data"]["attributes"]["charges-enabled"] == true
Application.put_env(:code_corps, :stripe_env, :test)
end
test "renders can-accept-donations as false in prod when charges-enabled is false" do
Application.put_env(:code_corps, :stripe_env, :prod)
organization = insert(:organization)
account = insert(:stripe_connect_account, organization: organization, charges_enabled: false)
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["can-accept-donations"] == false
assert rendered_json["data"]["attributes"]["charges-enabled"] == false
Application.put_env(:code_corps, :stripe_env, :test)
end
test "renders can-accept-donations as true in test when charges-enabled is false" do
organization = insert(:organization)
account = insert(:stripe_connect_account, organization: organization, charges_enabled: false)
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["can-accept-donations"] == true
assert rendered_json["data"]["attributes"]["charges-enabled"] == false
end
describe "recipient-status" do
test "renders as 'required' by default" do
account = insert(:stripe_connect_account, legal_entity_verification_status: "unverified")
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["recipient-status"] == "required"
end
test "renders as 'required' when fields_needed includes personal_id_number" do
account = insert(:stripe_connect_account, legal_entity_verification_status: "pending", verification_fields_needed: ["legal_entity.personal_id_number"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["recipient-status"] == "required"
end
test "renders as 'required' when fields_needed includes verification.document" do
account = insert(:stripe_connect_account, legal_entity_verification_status: "pending", verification_fields_needed: ["legal_entity.verification.document"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["recipient-status"] == "required"
end
test "renders as 'verified' when fields_needed does not include a legal_entity field" do
account = insert(:stripe_connect_account, legal_entity_verification_status: "pending", verification_fields_needed: [])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["recipient-status"] == "verified"
end
test "renders as 'verified' when verification status is 'verified'" do
account = insert(:stripe_connect_account, legal_entity_verification_status: "verified")
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["recipient-status"] == "verified"
end
end
describe "verification-document-status" do
test "renders as 'pending_requirement' by default" do
account = insert(:stripe_connect_account)
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["verification-document-status"] == "pending_requirement"
end
test "renders as 'pending_requirement' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_verification_document: nil,
verification_fields_needed: ["legal_entity.type"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["verification-document-status"] == "pending_requirement"
end
test "renders as 'required' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_verification_document: nil,
verification_fields_needed: ["legal_entity.verification.document"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["verification-document-status"] == "required"
end
test "renders as 'verifying' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_verification_document: "file_123",
legal_entity_verification_status: "pending",
verification_fields_needed: ["legal_entity.verification.document"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["verification-document-status"] == "verifying"
end
test "renders as 'verified' when the verification status is verified" do
account = insert(
:stripe_connect_account,
legal_entity_verification_status: "verified")
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["verification-document-status"] == "verified"
end
test "renders as 'verified' when there's a document and document is not required" do
account = insert(
:stripe_connect_account,
legal_entity_verification_document: "file_123",
verification_fields_needed: ["legal_entity.personal_id_number"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["verification-document-status"] == "verified"
end
test "renders as 'errored' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_verification_document: "file_123",
verification_fields_needed: ["legal_entity.verification.document"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["verification-document-status"] == "errored"
end
end
describe "personal-id-number-status" do
test "renders as 'pending_requirement' by default" do
account = insert(:stripe_connect_account)
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["personal-id-number-status"] == "pending_requirement"
end
test "renders as 'pending_requirement' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_personal_id_number_provided: false,
verification_fields_needed: ["legal_entity.type"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["personal-id-number-status"] == "pending_requirement"
end
test "renders as 'required' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_personal_id_number_provided: false,
verification_fields_needed: ["legal_entity.personal_id_number"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["personal-id-number-status"] == "required"
end
test "renders as 'verifying' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_personal_id_number_provided: true,
legal_entity_verification_status: "pending")
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["personal-id-number-status"] == "verifying"
end
test "renders as 'verified' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_personal_id_number_provided: true,
verification_fields_needed: ["external_account"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["personal-id-number-status"] == "verified"
end
end
describe "bank-account-status" do
test "renders as 'pending_requirement' by default" do
account = insert(:stripe_connect_account)
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["bank-account-status"] == "pending_requirement"
end
test "renders as 'pending_requirement' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_verification_status: "pending",
verification_fields_needed: ["legal_entity.personal_id_number"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["bank-account-status"] == "pending_requirement"
end
test "renders as 'required' when appropriate" do
account = insert(
:stripe_connect_account,
legal_entity_verification_status: "verified",
verification_fields_needed: ["external_account"])
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["bank-account-status"] == "required"
end
test "renders as 'verified' when appropriate" do
account = insert(
:stripe_connect_account,
external_account: "ba_123")
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["bank-account-status"] == "verified"
end
end
describe "external account fields" do
test "render if there is an associated external account" do
account = insert(:stripe_connect_account)
insert(:stripe_external_account, last4: "ABCD", routing_number: "123456", stripe_connect_account: account)
account = CodeCorpsWeb.StripeConnectAccountController.preload(account)
rendered_json = render(CodeCorpsWeb.StripeConnectAccountView, "show.json-api", data: account)
assert rendered_json["data"]["attributes"]["bank-account-last4"] == "ABCD"
assert rendered_json["data"]["attributes"]["bank-account-routing-number"] == "123456"
end
end
end
<|start_filename|>test/lib/code_corps/services/user_service_test.exs<|end_filename|>
defmodule CodeCorps.Services.UserServiceTest do
use ExUnit.Case, async: true
use CodeCorps.ModelCase
alias CodeCorps.StripePlatformCustomer
alias CodeCorps.Services.UserService
describe "update/1" do
test "it just updates the user if there is nothing associated to update" do
user = insert(:user, email: "<EMAIL>", first_name: "Joe")
{:ok, user, nil, nil}
= UserService.update(user, %{email: "<EMAIL>"})
assert user.email == "<EMAIL>"
assert user.first_name == "Joe"
end
test "it returns an {:error, changeset} if there are validation errors with the user" do
user = insert(:user, email: "<EMAIL>")
{:error, changeset} = UserService.update(user, %{email: ""})
refute changeset.valid?
end
test "it just updates the user if the changeset does not contain an email" do
user = insert(:user, email: "<EMAIL>")
stripe_platform_customer = insert(:stripe_platform_customer, email: "<EMAIL>", user: user)
{:ok, user, nil, nil}
= UserService.update(user, %{first_name: "Mark"})
assert user.first_name == "Mark"
assert user.email == "<EMAIL>"
stripe_platform_customer = Repo.get(StripePlatformCustomer, stripe_platform_customer.id)
assert stripe_platform_customer.email == "<EMAIL>"
end
test "it also updates the associated platform customer if there is one" do
user = insert(:user, email: "<EMAIL>")
platform_customer = insert(:stripe_platform_customer, user: user)
{:ok, user, %StripePlatformCustomer{}, nil}
= UserService.update(user, %{email: "<EMAIL>"})
assert user.email == "<EMAIL>"
platform_customer = Repo.get(StripePlatformCustomer, platform_customer.id)
assert platform_customer.email == "<EMAIL>"
end
test "it also updates the associated connect customers if there are any" do
user = insert(:user, email: "<EMAIL>")
platform_customer = %{id_from_stripe: platform_customer_id}
= insert(:stripe_platform_customer, user: user)
[connect_customer_1, connect_customer_2] =
insert_pair(:stripe_connect_customer, stripe_platform_customer: platform_customer)
{:ok, user, %StripePlatformCustomer{}, connect_updates} = UserService.update(user, %{email: "<EMAIL>"})
assert user.email == "<EMAIL>"
platform_customer = Repo.get_by(StripePlatformCustomer, id_from_stripe: platform_customer_id)
assert platform_customer.email == "<EMAIL>"
[
{:ok, %Stripe.Customer{} = stripe_record_1},
{:ok, %Stripe.Customer{} = stripe_record_2}
] = connect_updates
original_ids_from_stripe =
[connect_customer_1, connect_customer_2]
|> Enum.map(&Map.get(&1, :id_from_stripe))
|> Enum.sort
result_ids_from_stripe =
[stripe_record_1, stripe_record_2]
|> Enum.map(&Map.get(&1, :id))
|> Enum.sort
assert result_ids_from_stripe == original_ids_from_stripe
assert stripe_record_1.email == "<EMAIL>"
assert stripe_record_2.email == "<EMAIL>"
end
end
end
<|start_filename|>lib/code_corps/github/api/gateway.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.Gateway do
@moduledoc ~S"""
The gate through which all communication with the GitHub API must go through.
The purpose of this module is to centralize the most basic GitHub API request,
so the module can be injected into tests easily, giving full control to what
the tested response is.
"""
alias CodeCorps.GitHub
@spec request(GitHub.method, String.t, GitHub.body, GitHub.headers, list) :: GitHub.response
def request(method, url, body, headers, options) do
HTTPoison.request(method, url, body, headers, options)
end
end
<|start_filename|>lib/code_corps/emails/project_user_acceptance_email.ex<|end_filename|>
defmodule CodeCorps.Emails.ProjectUserAcceptanceEmail do
import Bamboo.Email, only: [to: 2]
import Bamboo.PostmarkHelper
alias CodeCorps.{Project, ProjectUser, Repo, User, WebClient}
alias CodeCorps.Emails.BaseEmail
alias CodeCorps.Presenters.ImagePresenter
@spec create(ProjectUser.t) :: Bamboo.Email.t
def create(%ProjectUser{project: project, user: user}) do
BaseEmail.create
|> to(user.email)
|> template(template_id(), build_model(project, user))
end
@spec build_model(Project.t, User.t) :: map
defp build_model(%Project{} = project, %User{} = user) do
%{
project_logo_url: ImagePresenter.large(project),
project_title: project.title,
project_url: project |> preload() |> url(),
subject: "#{project.title} just added you as a contributor",
user_first_name: user.first_name,
user_image_url: ImagePresenter.large(user)
}
end
@spec preload(Project.t) :: Project.t
defp preload(%Project{} = project), do: project |> Repo.preload(:organization)
@spec url(Project.t) :: String.t
defp url(project) do
WebClient.url()
|> URI.merge(project.organization.slug <> "/" <> project.slug)
|> URI.to_string
end
@spec template_id :: String.t
defp template_id, do: Application.get_env(:code_corps, :postmark_project_user_acceptance_template)
end
<|start_filename|>test/lib/code_corps/model/stripe_external_account_test.exs<|end_filename|>
defmodule CodeCorps.StripeExternalAccountTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeExternalAccount
@valid_attrs %{account_id_from_stripe: "some content", id_from_stripe: "some content"}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = StripeExternalAccount.changeset(%StripeExternalAccount{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = StripeExternalAccount.changeset(%StripeExternalAccount{}, @invalid_attrs)
refute changeset.valid?
end
end
<|start_filename|>lib/code_corps/emails/base_email.ex<|end_filename|>
defmodule CodeCorps.Emails.BaseEmail do
import Bamboo.Email, only: [from: 2, new_email: 0]
alias CodeCorps.User
@spec create :: Bamboo.Email.t
def create do
new_email()
|> from("Code Corps<<EMAIL>>")
end
@spec get_name(User.t) :: String.t
def get_name(%User{first_name: nil}), do: "there"
def get_name(%User{first_name: name}), do: name
end
<|start_filename|>priv/repo/migrations/20171123065902_remove_github_repo_and_owner_from_project.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.RemoveGithubRepoAndOwnerFromProject do
use Ecto.Migration
def change do
alter table(:projects) do
remove(:github_repo)
remove(:github_owner)
end
end
end
<|start_filename|>test/lib/code_corps/policy/message_test.exs<|end_filename|>
defmodule CodeCorps.Policy.MessageTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.Message, only: [create?: 2, show?: 2, scope: 2]
alias CodeCorps.{Message, Repo}
defp params_for(initiated_by, project_id, author_id) do
%{
"initiated_by" => initiated_by,
"author_id" => author_id,
"project_id" => project_id
}
end
describe "scope" do
test "returns all records for admin user" do
insert_list(3, :message)
user = insert(:user, admin: true)
assert Message |> scope(user) |> Repo.all |> Enum.count == 3
end
test "returns records where user is the author or they administer the project, or they are the target of the conversation" do
user = insert(:user, admin: false)
%{project: project_user_applied_to} =
insert(:project_user, user: user, role: "pending")
%{project: project_user_contributes_to} =
insert(:project_user, user: user, role: "contributor")
%{project: project_user_administers} =
insert(:project_user, user: user, role: "admin")
%{project: other_project_user_administers} =
insert(:project_user, user: user, role: "admin")
%{project: project_user_owns} =
insert(:project_user, user: user, role: "owner")
message_authored_by = insert(:message, author: user)
some_other_message = insert(:message)
message_from_project_applied_to =
insert(:message, project: project_user_applied_to)
message_from_contributing_project =
insert(:message, project: project_user_contributes_to)
message_from_administered_project =
insert(:message, project: project_user_administers)
message_from_other_administered_project =
insert(:message, project: other_project_user_administers)
message_from_owned_project =
insert(:message, project: project_user_owns)
%{message: message_where_user_is_target} =
insert(:conversation, user: user)
result_ids =
Message
|> scope(user)
|> Repo.all
|> Enum.map(&Map.get(&1, :id))
assert message_authored_by.id in result_ids
refute message_from_project_applied_to.id in result_ids
refute message_from_contributing_project.id in result_ids
assert message_from_administered_project.id in result_ids
assert message_from_other_administered_project.id in result_ids
assert message_from_owned_project.id in result_ids
refute some_other_message.id in result_ids
assert message_where_user_is_target.id in result_ids
end
end
describe "show?" do
test "returns true when initiated by user and user is the author" do
author = insert(:user)
message = insert(:message, initiated_by: "user", author: author)
assert show?(author, message)
end
test "returns false when initiated by user and user is not the author" do
user = insert(:user)
message = insert(:message, initiated_by: "user")
refute show?(user, message |> Repo.preload(:conversations))
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
message = insert(:message, initiated_by: "user", project: project)
refute show?(user, message |> Repo.preload(:conversations))
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
message = insert(:message, initiated_by: "user", project: project)
refute show?(user, message |> Repo.preload(:conversations))
end
test "returns true when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
message = insert(:message, initiated_by: "user", project: project)
assert show?(user, message |> Repo.preload(:conversations))
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
message = insert(:message, initiated_by: "user", project: project)
assert show?(user, message |> Repo.preload(:conversations))
end
test "returns true when message conversation is targeted at user" do
user = insert(:user)
%{message: message} = insert(:conversation, user: user)
assert show?(user, message |> Repo.preload(:conversations))
end
end
describe "create?" do
test "returns true when initiated by user and user is the author" do
author = insert(:user)
params = params_for("user", 1, author.id)
assert create?(author, params)
end
test "returns false when initiated by user and user is not the author" do
user = insert(:user)
params = params_for("user", 1, -1)
refute create?(user, params)
end
test "returns false when initiated by admin and user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
author = insert(:user)
params = params_for("admin", project.id, author.id)
refute create?(user, params)
end
test "returns false when initiated by admin and user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
author = insert(:user)
params = params_for("admin", project.id, author.id)
refute create?(user, params)
end
test "returns true when initiated by admin and user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
author = insert(:user)
params = params_for("admin", project.id, author.id)
assert create?(user, params)
end
test "returns true when initiated by admin and user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
author = insert(:user)
params = params_for("admin", project.id, author.id)
assert create?(user, params)
end
end
end
<|start_filename|>lib/code_corps_web/controllers/user_task_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.UserTaskController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{
Analytics.SegmentTracker,
UserTask,
User,
Helpers.Query
}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with user_tasks <- UserTask |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: user_tasks)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %UserTask{} = user_task <- UserTask |> Repo.get(id) do
conn |> render("show.json-api", data: user_task)
end
end
@spec create(Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %UserTask{}, params),
{:ok, %UserTask{} = user_task} <- %UserTask{} |> UserTask.create_changeset(params) |> Repo.insert
do
current_user |> track_assigned(user_task)
conn |> put_status(:created) |> render("show.json-api", data: user_task)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{"id" => id} = params) do
with %UserTask{} = user_task <- UserTask |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, user_task),
{:ok, %UserTask{} = user_task} <- user_task |> UserTask.update_changeset(params) |> Repo.update
do
current_user |> track_assigned(user_task)
conn |> render("show.json-api", data: user_task)
end
end
@spec delete(Conn.t, map) :: Conn.t
def delete(%Conn{} = conn, %{"id" => id} = _params) do
with %UserTask{} = user_task <- UserTask |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:delete, user_task),
{:ok, %UserTask{} = _user_task} <- user_task |> Repo.delete
do
current_user |> track_unassigned(user_task)
conn |> send_resp(:no_content, "")
end
end
@spec track_assigned(User.t, UserTask.t) :: any
defp track_assigned(%User{id: user_id}, %UserTask{user_id: assigned_user_id} = user_task)
when user_id == assigned_user_id, do: SegmentTracker.track(user_id, "Assigned Task to Self", user_task)
defp track_assigned(%User{id: user_id}, %UserTask{} = user_task),
do: SegmentTracker.track(user_id, "Assigned Task to Someone Else", user_task)
@spec track_unassigned(User.t, UserTask.t) :: any
defp track_unassigned(%User{id: user_id}, %UserTask{user_id: assigned_user_id} = user_task)
when user_id == assigned_user_id, do: SegmentTracker.track(user_id, "Unassigned Task from Self", user_task)
defp track_unassigned(%User{id: user_id}, %UserTask{} = user_task),
do: SegmentTracker.track(user_id, "Unassigned Task from Someone Else", user_task)
end
<|start_filename|>lib/code_corps/policy/task.ex<|end_filename|>
defmodule CodeCorps.Policy.Task do
@moduledoc ~S"""
Authorization policy in charge of determining if a `User` is authorized to
perform an action on a `Task`.
"""
import CodeCorps.Policy.Helpers,
only: [get_project: 1, administered_by?: 2, task_authored_by?: 2]
alias CodeCorps.{Task, User}
def create?(%User{id: user_id}, %{"user_id" => author_id})
when user_id == author_id and not is_nil(user_id), do: true
def create?(%User{}, %{}), do: false
def update?(%User{} = user, %Task{} = task) do
case task |> task_authored_by?(user) do
true -> true
false -> task |> get_project |> administered_by?(user)
end
end
end
<|start_filename|>test/lib/code_corps/policy/github_event_test.exs<|end_filename|>
defmodule CodeCorps.GithubEventPolicyTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.GithubEvent, only: [index?: 1, show?: 1, update?: 1]
describe "index" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
assert index?(user)
end
test "returns false when user is not an admin" do
user = build(:user, admin: false)
refute index?(user)
end
end
describe "show" do
test "returns true when user is an admin" do
user = insert(:user, admin: true)
assert show?(user)
end
test "returns false when user is not an admin" do
user = insert(:user, admin: false)
refute show?(user)
end
end
describe "update" do
test "returns true when user is an admin" do
user = insert(:user, admin: true)
assert update?(user)
end
test "returns false when user is not an admin" do
user = insert(:user, admin: false)
refute update?(user)
end
end
end
<|start_filename|>lib/code_corps/github/event/issue_comment/issue_comment.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.IssueComment do
@moduledoc ~S"""
In charge of handling a GitHub Webhook payload for the IssueComment event type
[https://developer.github.com/v3/activity/events/types/#issuecommentevent](https://developer.github.com/v3/activity/events/types/#issuecommentevent)
"""
@behaviour CodeCorps.GitHub.Event.Handler
alias CodeCorps.{
GitHub.Sync,
GitHub.Event.IssueComment.Validator
}
@doc ~S"""
Handles the "IssueComment" GitHub webhook
The process is as follows:
- validate the payload is structured as expected
- validate the action is properly supported
- sync the comment using `CodeCorps.GitHub.Sync.Comment`
"""
@impl CodeCorps.GitHub.Event.Handler
@spec handle(map) ::
Sync.issue_comment_event_outcome() | {:error, :unexpected_payload}
def handle(payload) do
with {:ok, :valid} <- validate_payload(payload) do
Sync.issue_comment_event(payload)
else
{:error, error} -> {:error, error}
end
end
@spec validate_payload(map) :: {:ok, :valid} | {:error, :unexpected_payload}
defp validate_payload(%{} = payload) do
if Validator.valid?(payload) do
{:ok, :valid}
else
{:error, :unexpected_payload}
end
end
end
<|start_filename|>lib/code_corps/sentry/sync.ex<|end_filename|>
defmodule CodeCorps.Sentry.Sync do
def capture_exception(exception, opts \\ []) do
exception
|> Sentry.capture_exception(opts |> Keyword.put(:result, :sync))
end
end
<|start_filename|>lib/code_corps/github/adapters/issue.ex<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.Issue do
@moduledoc """
Used to adapt a GitHub Issue payload into attributes for creating or updating
a `CodeCorps.GithubIssue` and vice-versa.
"""
alias CodeCorps.{
Adapter.MapTransformer,
GitHub.Adapters.Utils.BodyDecorator,
GithubIssue,
Task
}
@github_payload_to_github_issue_mapping [
{:body, ["body"]},
{:closed_at, ["closed_at"]},
{:comments_url, ["comments_url"]},
{:events_url, ["events_url"]},
{:github_created_at, ["created_at"]},
{:github_id, ["id"]},
{:github_updated_at, ["updated_at"]},
{:html_url, ["html_url"]},
{:labels_url, ["labels_url"]},
{:locked, ["locked"]},
{:number, ["number"]},
{:state, ["state"]},
{:title, ["title"]},
{:url, ["url"]}
]
@doc ~S"""
Converts a GitHub Issue payload into a set of attributes used to create or
update a `GithubIssue` record.
"""
@spec to_issue(map) :: map
def to_issue(%{} = payload) do
payload |> MapTransformer.transform(@github_payload_to_github_issue_mapping)
end
@github_payload_to_task_mapping [
{:created_at, ["created_at"]},
{:markdown, ["body"]},
{:modified_at, ["updated_at"]},
{:status, ["state"]},
{:title, ["title"]}
]
@github_issue_to_task_mapping [
{:created_at, [:github_created_at]},
{:markdown, [:body]},
{:modified_at, [:github_updated_at]},
{:status, [:state]},
{:title, [:title]}
]
@doc ~S"""
Converts a GitHub Issue payload into a set of attributes used to create or
update a `Task` record.
"""
@spec to_task(GithubIssue.t) :: map
def to_task(%GithubIssue{} = github_issue) do
github_issue
|> Map.from_struct
|> MapTransformer.transform(@github_issue_to_task_mapping)
end
@autogenerated_github_keys ~w(closed_at comments_url created_at events_url html_url id labels_url number updated_at url)
@doc ~S"""
Converts a `GithubIssue` or `Task` into a set of attributes used to create or
update an associated GitHub Issue on the GitHub API.
"""
@spec to_api(GithubIssue.t | Task.t) :: map
def to_api(%GithubIssue{} = github_issue) do
github_issue
|> Map.from_struct
|> MapTransformer.transform_inverse(@github_payload_to_github_issue_mapping)
|> Map.drop(@autogenerated_github_keys)
|> BodyDecorator.add_code_corps_header(github_issue)
end
def to_api(%Task{} = task) do
task
|> Map.from_struct
|> MapTransformer.transform_inverse(@github_payload_to_task_mapping)
|> Map.drop(@autogenerated_github_keys)
|> BodyDecorator.add_code_corps_header(task)
end
end
<|start_filename|>test/lib/code_corps_web/controllers/user_role_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.UserRoleControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :user_role
describe "index" do
test "lists all entries on index", %{conn: conn} do
[user_role_1, user_role_2] = insert_pair(:user_role)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([user_role_1.id, user_role_2.id])
end
test "filters resources on index", %{conn: conn} do
[user_role_1, user_role_2 | _] = insert_list(3, :user_role)
path = "user-roles/?filter[id]=#{user_role_1.id},#{user_role_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([user_role_1.id, user_role_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
user_role = insert(:user_role)
conn
|> request_show(user_role)
|> json_response(200)
|> assert_id_from_response(user_role.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
role = insert(:role)
attrs = (%{user: current_user, role: role})
assert conn |> request_create(attrs) |> json_response(201)
user_id = current_user.id
tracking_properties = %{
role: role.name,
role_id: role.id
}
assert_received {:track, ^user_id, "Added User Role", ^tracking_properties}
end
@tag :authenticated
test "renders 422 when data is invalid", %{conn: conn, current_user: current_user} do
role = build(:role)
invalid_attrs = %{role: role, user: current_user}
assert conn |> request_create(invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
role = insert(:role)
user = insert(:user)
attrs = %{role: role, user: user}
assert conn |> request_create(attrs) |> json_response(403)
end
end
describe "delete" do
@tag authenticated: :admin
test "deletes resource", %{conn: conn, current_user: current_user} do
user_role = insert(:user_role)
assert conn |> request_delete(user_role.id) |> response(204)
user_id = current_user.id
tracking_properties = %{
role: user_role.role.name,
role_id: user_role.role.id
}
assert_received {:track, ^user_id, "Removed User Role", ^tracking_properties}
end
test "does not delete resource and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag :authenticated
test "renders page not found when id is nonexistent on delete", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>test/lib/code_corps/github/webhook/event_support_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Webhook.EventSupportTest do
@moduledoc false
use ExUnit.Case, async: true
alias CodeCorps.GitHub.Webhook.EventSupport
test "supported_events/0 returns a list of supported events" do
assert EventSupport.supported_events |> is_list
end
test "unsupported_events/0 returns a list of unsupported events" do
assert EventSupport.unsupported_events |> is_list
end
describe "status/1" do
test "returns :supported for all supported events" do
EventSupport.supported_events |> Enum.each(fn {type, action} ->
assert EventSupport.status(type, action) == :supported
end)
end
test "returns :unsupported for all unsupported events" do
EventSupport.unsupported_events |> Enum.each(fn {type, action} ->
assert EventSupport.status(type, action) == :unsupported
end)
end
test "returns :ignored for any other event" do
assert EventSupport.status("foo", "bar") == :ignored
end
end
end
<|start_filename|>lib/code_corps_web/controllers/project_category_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.ProjectCategoryController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{ProjectCategory, User, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with project_categories <- ProjectCategory |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: project_categories)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %ProjectCategory{} = project_category <- ProjectCategory |> Repo.get(id) do
conn |> render("show.json-api", data: project_category)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %ProjectCategory{}, params),
{:ok, %ProjectCategory{} = project_category} <- %ProjectCategory{} |> ProjectCategory.create_changeset(params) |> Repo.insert do
conn |> put_status(:created) |> render("show.json-api", data: project_category)
end
end
@spec delete(Conn.t, map) :: Conn.t
def delete(%Conn{} = conn, %{"id" => id} = _params) do
with %ProjectCategory{} = project_category <- ProjectCategory |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:delete, project_category),
{:ok, %ProjectCategory{} = _project_category} <- project_category |> Repo.delete
do
conn |> Conn.assign(:project_category, project_category) |> send_resp(:no_content, "")
end
end
end
<|start_filename|>lib/code_corps/github/event.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event do
@moduledoc ~S"""
In charge of marking `GithubEvent` records as "processing", "processed" or
"errored", based on the outcome of processing a webhook event payload.
"""
alias CodeCorps.{GithubEvent, Repo}
alias Ecto.Changeset
defmodule GitHubEventError do
defexception [:reason]
def exception(reason),
do: %__MODULE__{reason: reason}
def message(%__MODULE__{reason: reason}),
do: reason
end
@type error :: atom | Changeset.t
@type result :: {:ok, any} | {:error, atom} | {:error, atom, any}
@doc ~S"""
Sets record status to "processing", marking it as being processed at this
moment. Our webhook handling should skip processing payloads for events which
are already being processed.
"""
@spec start_processing(GithubEvent.t) :: {:ok, GithubEvent.t}
def start_processing(%GithubEvent{} = event) do
event
|> Changeset.change(%{status: "processing"})
|> Repo.update()
end
@doc ~S"""
Sets record status to "processed" or "errored" based on the first element of
first argument, which is the result tuple. The first element of the result
tuple should always be either `:ok`, or `:error`. Any number of elements in
the tuple is suported.
"""
@spec stop_processing(result, GithubEvent.t) :: {:ok, GithubEvent.t}
def stop_processing({:ok, _data}, %GithubEvent{} = event) do
event
|> Changeset.change(%{status: "processed"})
|> Repo.update()
end
def stop_processing({:error, reason}, %GithubEvent{} = event) do
stop_processing({:error, reason, %{}}, event)
end
def stop_processing({:error, reason, error}, %GithubEvent{} = event) do
%GitHubEventError{reason: error}
|> CodeCorps.Sentry.capture_exception([stacktrace: System.stacktrace()])
changes = %{
status: "errored",
data: error |> format_data_if_exists(),
error: error |> Kernel.inspect(pretty: true),
failure_reason: reason |> Atom.to_string()
}
event
|> Changeset.change(changes)
|> Repo.update()
end
defp format_data_if_exists(%Ecto.Changeset{data: data}) do
data |> Kernel.inspect(pretty: true)
end
defp format_data_if_exists(_error), do: nil
end
<|start_filename|>test/support/stripe_case.ex<|end_filename|>
defmodule CodeCorps.StripeCase do
@moduledoc """
This module defines the test case to be used by
tests involving the stripe service.
Basically a stripped down `CodeCorps.ModelCase`
"""
use ExUnit.CaseTemplate
using do
quote do
alias CodeCorps.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import CodeCorps.Factories
import CodeCorps.StripeCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(CodeCorps.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(CodeCorps.Repo, {:shared, self()})
end
:ok
end
end
<|start_filename|>lib/code_corps_web/plugs/ids_to_integers.ex<|end_filename|>
defmodule CodeCorpsWeb.Plug.IdsToIntegers do
@moduledoc ~S"""
Converts `id` values in a `conn` parameters map into integers, if applicable.
The JSON API specification expects `id` values in resource objects to be
strings.
See http://jsonapi.org/format/#document-resource-object-identification
"""
alias Plug.Conn
@spec init(Keyword.t) :: Keyword.t
def init(opts), do: opts
@spec call(Conn.t, Keyword.t) :: Plug.Conn.t
def call(%Conn{params: %{} = params} = conn, _opts) do
converted_params =
params
|> Enum.map(&convert_key_value/1)
|> Enum.into(%{})
conn |> Map.put(:params, converted_params)
end
def call(%Conn{} = conn, _opts), do: conn
@spec convert_key_value(tuple) :: tuple
defp convert_key_value({key, value}) do
case convert?(key) do
true -> {key, value |> ensure_integer()}
false -> {key, value}
end
end
@spec convert?(any) :: boolean
defp convert?("id"), do: true
defp convert?("auth_token_id"), do: true
defp convert?("category_id"), do: true
defp convert?("comment_id"), do: true
defp convert?("donation_goal_id"), do: true
defp convert?("github_app_installation_id"), do: true
defp convert?("github_repo_id"), do: true
defp convert?("organization_github_app_installation_id"), do: true
defp convert?("organization_invite_id"), do: true
defp convert?("organization_id"), do: true
defp convert?("preview_id"), do: true
defp convert?("project_id"), do: true
defp convert?("project_category_id"), do: true
defp convert?("project_skill_id"), do: true
defp convert?("project_user_id"), do: true
defp convert?("role_id"), do: true
defp convert?("role_skill_id"), do: true
defp convert?("skill_id"), do: true
defp convert?("slugged_route_id"), do: true
defp convert?("stripe_connect_account_id"), do: true
defp convert?("stripe_connect_card_id"), do: true
defp convert?("stripe_connect_charge_id"), do: true
defp convert?("stripe_connect_customer_id"), do: true
defp convert?("stripe_connect_plan_id"), do: true
defp convert?("stripe_connect_subscription_id"), do: true
defp convert?("stripe_event_id"), do: true
defp convert?("stripe_external_account_id"), do: true
defp convert?("stripe_file_upload_id"), do: true
defp convert?("stripe_invoice_id"), do: true
defp convert?("stripe_platform_card_id"), do: true
defp convert?("stripe_platform_customer_id"), do: true
defp convert?("task_id"), do: true
defp convert?("task_list_id"), do: true
defp convert?("task_skill_id"), do: true
defp convert?("user_id"), do: true
defp convert?("user_category_id"), do: true
defp convert?("user_role_id"), do: true
defp convert?("user_skill_id"), do: true
defp convert?("user_task_id"), do: true
defp convert?(_other), do: false
defp ensure_integer(value) when is_binary(value) do
value |> String.to_integer
end
defp ensure_integer(value), do: value
end
<|start_filename|>test/fixtures/github/endpoints/forbidden.json<|end_filename|>
{
"message": "Maximum number of login attempts exceeded. Please try again later.",
"documentation_url": "https://developer.github.com/v3"
}
<|start_filename|>lib/code_corps/policy/stripe_platform_customer.ex<|end_filename|>
defmodule CodeCorps.Policy.StripePlatformCustomer do
alias CodeCorps.StripePlatformCustomer
alias CodeCorps.User
def create?(%User{id: current_user_id}, %{"user_id" => user_id}), do: current_user_id == user_id
def create?(%User{}, %{}), do: false
def show?(%User{admin: true}, %StripePlatformCustomer{}), do: true
def show?(%User{id: current_user_id}, %StripePlatformCustomer{user_id: user_id}), do: current_user_id == user_id
def show?(%User{}, %StripePlatformCustomer{}), do: false
end
<|start_filename|>lib/code_corps_web/controllers/project_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.ProjectController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Project, Projects, User}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with projects <- Project.Query.list(params) |> preload do
conn |> render("index.json-api", data: projects)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{} = params) do
with %Project{} = project <- Project.Query.find(params) |> preload do
conn |> render("show.json-api", data: project)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %Project{}, params),
{:ok, %Project{} = project} <- params |> Projects.create(current_user)
do
conn |> put_status(:created) |> render("show.json-api", data: project)
end
end
@spec update(Conn.t, map) :: Conn.t
def update(%Conn{} = conn, %{} = params) do
with %Project{} = project <- Project.Query.find(params),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:update, project),
{:ok, %Project{} = updated_project} <- project |> Projects.update(params, current_user)
do
conn |> render("show.json-api", data: updated_project)
end
end
@preloads [
:categories, :donation_goals, :github_repos,
[organization: :stripe_connect_account], :project_categories,
:project_skills, :project_users, :skills, :stripe_connect_plan,
:task_lists, :tasks
]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>priv/repo/migrations/20170727052644_create_organization_invite.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.CreateOrganizationInvite do
use Ecto.Migration
def change do
create table(:organization_invites) do
add :code, :string, null: false
add :email, :string, null: false
add :title, :string, null: false
add :fulfilled, :boolean, default: false, null: false
timestamps()
end
create index(:organization_invites, [:code], unique: true)
create index(:organization_invites, [:email])
end
end
<|start_filename|>lib/code_corps/github/sync/user/record_linker.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.User.RecordLinker do
@moduledoc ~S"""
In charge of finding a user to link with a given record given a GitHub API
payload containing the user.
The only entry point is `link_to/2`.
"""
import Ecto.Query
alias CodeCorps.{
Accounts,
Comment,
GithubComment,
GithubIssue,
Repo,
Task,
User
}
@type result :: {:ok, User.t} |
{:error, Ecto.Changeset.t} |
{:error, :multiple_users}
@doc ~S"""
Finds or creates a user using information in the resource and the GitHub API
payload.
The process is as follows:
- Find all affected records and extract their user data.
- Search for the user in our database.
- If we match a single user, then the resource should be related to that
user.
- If there are no matching users, then the resource was created on GitHub by
someone who does not have a matching GitHub-connected Code Corps account.
We create a placeholder user account until that GitHub user is claimed by
a Code Corps user.
- If there are multiple matching users, this is an unexpected scenario and
should error out.
"""
@spec link_to(GithubComment.t() | GithubIssue.t(), map) :: result
def link_to(%GithubComment{} = comment, %{"user" => user}), do: do_link_to(comment, user)
def link_to(%GithubIssue{} = issue, %{"user" => user}), do: do_link_to(issue, user)
defp do_link_to(record, user_attrs) do
record |> match_users() |> marshall_response(user_attrs)
end
@spec match_users(GithubComment.t() | GithubIssue.t()) :: list(User.t())
defp match_users(%GithubComment{github_id: github_id}) do
query = from u in User,
distinct: u.id,
join: c in Comment, on: u.id == c.user_id,
join: gc in GithubComment, on: gc.id == c.github_comment_id, where: gc.github_id == ^github_id
query |> Repo.all()
end
defp match_users(%GithubIssue{id: github_issue_id}) do
query = from u in User,
distinct: u.id,
join: t in Task, on: u.id == t.user_id, where: t.github_issue_id == ^github_issue_id
query |> Repo.all()
end
@spec marshall_response(list, map) :: result
defp marshall_response([%User{} = single_user], %{}), do: {:ok, single_user}
defp marshall_response([], %{} = user_attrs) do
user_attrs |> find_or_create_disassociated_user()
end
defp marshall_response([_head | _tail], %{}), do: {:error, :multiple_users}
@spec find_or_create_disassociated_user(map) :: {:ok, User.t}
def find_or_create_disassociated_user(%{"id" => github_id} = attrs) do
case User |> Repo.get_by(github_id: github_id) do
nil -> attrs |> Accounts.create_from_github()
%User{} = user -> {:ok, user}
end
end
end
<|start_filename|>test/lib/code_corps/accounts/users_test.exs<|end_filename|>
defmodule CodeCorps.Accounts.UsersTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.TestHelpers, only: [assert_ids_from_query: 2]
alias CodeCorps.{Accounts, User}
describe "project_filter/2" do
test "filters users by project filter" do
user_1 = insert(:user)
user_2 = insert(:user)
project = insert(:project)
insert(:project_user, user: user_1, project: project)
insert(:project_user, user: user_2, project: project)
insert(:project_user)
result =
User
|> Accounts.Users.project_filter(%{"project_id" => project.id})
|> Repo.all()
assert_ids_from_query(result, [user_1.id, user_2.id])
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/conversation_part_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ConversationPartControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :conversation_part
@valid_attrs %{
body: "Test body."
}
@invalid_attrs %{
body: nil
}
describe "index" do
@tag :authenticated
test "lists all entries user is authorized to view", %{conn: conn, current_user: user} do
%{project: project} = insert(:project_user, role: "admin", user: user)
message_on_user_administered_project = insert(:message, project: project)
conversation_on_user_administered_project =
insert(:conversation, message: message_on_user_administered_project)
conversation_part_in_project =
insert(:conversation_part, conversation: conversation_on_user_administered_project)
conversation_by_user = insert(:conversation, user: user)
conversation_part_from_user =
insert(:conversation_part, conversation: conversation_by_user)
other_conversation = insert(:conversation)
_other_part = insert(:conversation_part, conversation: other_conversation)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([
conversation_part_in_project.id,
conversation_part_from_user.id
])
end
@tag authenticated: :admin
test "lists all entries if user is admin", %{conn: conn} do
[part_1, part_2] = insert_pair(:conversation_part)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([part_1.id, part_2.id])
end
end
describe "show" do
@tag :authenticated
test "shows chosen resource", %{conn: conn, current_user: user} do
conversation = insert(:conversation, user: user)
conversation_part = insert(:conversation_part, conversation: conversation)
conn
|> request_show(conversation_part)
|> json_response(200)
|> assert_id_from_response(conversation_part.id)
end
test "renders 401 when unauthenticated", %{conn: conn} do
conversation_part = insert(:conversation_part)
assert conn |> request_show(conversation_part) |> json_response(401)
end
@tag :authenticated
test "renders 403 when unauthorized", %{conn: conn} do
conversation_part = insert(:conversation_part)
assert conn |> request_show(conversation_part) |> json_response(403)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: user} do
conversation = insert(:conversation, user: user)
attrs = @valid_attrs |> Map.merge(%{author_id: user.id, conversation_id: conversation.id})
assert conn |> request_create(attrs) |> json_response(201)
end
@tag :authenticated
test "does not create resource and renders 422 when data is invalid", %{
conn: conn,
current_user: user
} do
conversation = insert(:conversation, user: user)
attrs = @invalid_attrs |> Map.merge(%{author_id: user.id, conversation_id: conversation.id})
assert conn |> request_create(attrs) |> json_response(422)
end
test "does not create resource and renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
end
<|start_filename|>priv/repo/migrations/20171006161407_change_organization_invite_title_to_organization_name.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.ChangeOrganizationInviteTitleToOrganizationName do
use Ecto.Migration
def change do
rename table(:organization_invites), :title, to: :organization_name
end
end
<|start_filename|>priv/repo/migrations/20171114225713_migrate_project_github_repos_to_github_repo.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.MigrateProjectGithubReposToGithubRepo do
use Ecto.Migration
import Ecto.Query
alias CodeCorps.Repo
def up do
project_github_repos = from(
pgr in "project_github_repos",
left_join:
gr in "github_repos",
on: gr.id == pgr.github_repo_id,
select: {pgr.project_id, pgr.sync_state, gr.id, gr.sync_state, pgr.id}
) |> Repo.all()
project_github_repos
|> Enum.each(fn {project_id, project_repo_state, repo_id, repo_state, project_repo_id} ->
sync_state = transform_sync_state(project_repo_state, repo_state)
from(
gr in "github_repos",
where: [id: ^repo_id],
inner_join:
pgr in "project_github_repos",
on: gr.id == pgr.github_repo_id,
where: pgr.id == ^project_repo_id,
update: [set: [project_id: ^project_id, sync_state: ^sync_state]]
) |> Repo.update_all([])
end)
end
defp transform_sync_state("unsynced", repo_state), do: repo_state
defp transform_sync_state("syncing_github_repo", repo_state), do: repo_state
defp transform_sync_state("errored_syncing_github_repo", repo_state), do: repo_state
defp transform_sync_state(project_repo_state, _repo_state), do: project_repo_state
def down do
# unsupported
end
end
<|start_filename|>test/test_helper.exs<|end_filename|>
# Make sure all required plugins start before tests start running
# Needs to be called before ExUnit.start
{:ok, _} = Application.ensure_all_started(:ex_machina)
{:ok, _} = Application.ensure_all_started(:bypass)
ExUnit.configure exclude: [acceptance: true]
ExUnit.start
Ecto.Adapters.SQL.Sandbox.mode(CodeCorps.Repo, :manual)
<|start_filename|>lib/code_corps_web/controllers/user_role_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.UserRoleController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{UserRole, User, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with user_roles <- UserRole |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: user_roles)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %UserRole{} = user_role <- UserRole |> Repo.get(id) do
conn |> render("show.json-api", data: user_role)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %UserRole{}, params),
{:ok, %UserRole{} = user_role} <- %UserRole{} |> UserRole.create_changeset(params) |> Repo.insert do
conn |> put_status(:created) |> render("show.json-api", data: user_role)
end
end
@spec delete(Conn.t, map) :: Conn.t
def delete(%Conn{} = conn, %{"id" => id} = _params) do
with %UserRole{} = user_role <- UserRole |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:delete, user_role),
{:ok, %UserRole{} = _user_role} <- user_role |> Repo.delete
do
conn |> Conn.assign(:user_role, user_role) |> send_resp(:no_content, "")
end
end
end
<|start_filename|>test/lib/code_corps/stripe_service/stripe_connect_external_account_service_test.exs<|end_filename|>
defmodule CodeCorps.StripeService.StripeConnectExternalAccountServiceTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeService.StripeConnectExternalAccountService
describe "create" do
test "creates a StripeExternalAccount" do
api_external_account = %Stripe.BankAccount{id: "bnk_123"}
local_connect_account = insert(:stripe_connect_account)
{:ok, %CodeCorps.StripeExternalAccount{} = external_account} =
StripeConnectExternalAccountService.create(api_external_account, local_connect_account)
assert external_account.id_from_stripe == "bnk_123"
assert external_account.stripe_connect_account_id == local_connect_account.id
assert external_account.account_id_from_stripe == local_connect_account.id_from_stripe
end
end
end
<|start_filename|>test/lib/code_corps_web/views/project_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ProjectViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
organization = insert(:organization)
project = insert(:project, organization: organization, total_monthly_donated: 5000, default_color: "blue", approval_requested: true, approved: true)
donation_goal = insert(:donation_goal, project: project)
project_category = insert(:project_category, project: project)
github_repo = insert(:github_repo, project: project)
skill = insert(:skill)
project_skill = insert(:project_skill, project: project, skill: skill)
project_user = insert(:project_user, project: project)
stripe_connect_plan = insert(:stripe_connect_plan, project: project)
task_list = insert(:task_list, project: project)
task = insert(:task, task_list: task_list, project: project)
host = Application.get_env(:code_corps, :asset_host)
project = CodeCorpsWeb.ProjectController.preload(project)
rendered_json = render(CodeCorpsWeb.ProjectView, "show.json-api", data: project)
expected_json = %{
"data" => %{
"attributes" => %{
"approval-requested" => true,
"approved" => true,
"can-activate-donations" => false,
"cloudinary-public-id" => nil,
"description" => project.description,
"donations-active" => true,
"icon-large-url" => "#{host}/icons/project_default_large_blue.png",
"icon-thumb-url" => "#{host}/icons/project_default_thumb_blue.png",
"inserted-at" => project.inserted_at,
"long-description-body" => project.long_description_body,
"long-description-markdown" => project.long_description_markdown,
"should-link-externally" => project.should_link_externally,
"slug" => project.slug,
"title" => project.title,
"total-monthly-donated" => project.total_monthly_donated,
"updated-at" => project.updated_at,
"website" => project.website,
},
"id" => project.id |> Integer.to_string,
"relationships" => %{
"categories" => %{
"data" => [
%{"id" => project_category.category_id |> Integer.to_string, "type" => "category"}
]
},
"donation-goals" => %{"data" => [
%{
"id" => donation_goal.id |> Integer.to_string,
"type" => "donation-goal"
}
]},
"github-repos" => %{
"data" => [
%{
"id" => github_repo.id |> Integer.to_string,
"type" => "github-repo"
}
]
},
"organization" => %{
"data" => %{
"id" => organization.id |> Integer.to_string,
"type" => "organization"
}
},
"project-categories" => %{
"data" => [
%{
"id" => project_category.id |> Integer.to_string,
"type" => "project-category"
}
]
},
"project-skills" => %{
"data" => [
%{
"id" => project_skill.id |> Integer.to_string,
"type" => "project-skill"
}
]
},
"project-users" => %{
"data" => [
%{"id" => project_user.id |> Integer.to_string, "type" => "project-user"}
]
},
"skills" => %{
"data" => [
%{
"id" => skill.id |> Integer.to_string,
"type" => "skill"
}
]
},
"stripe-connect-plan" => %{
"data" => %{
"id" => stripe_connect_plan.id |> Integer.to_string,
"type" => "stripe-connect-plan"
}
},
"task-lists" => %{
"data" => [
%{
"id" => task_list.id |> Integer.to_string,
"type" => "task-list"
}
]
},
"tasks" => %{
"data" => [
%{
"id" => task.id |> Integer.to_string,
"type" => "task"
}
]
}
},
"type" => "project",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
test "renders can-activate-donations true when project has donations, no plan, payouts are enabled" do
organization = insert(:organization)
project = insert(:project, organization: organization)
insert(:donation_goal, project: project)
insert(:stripe_connect_account, organization: organization, charges_enabled: true, payouts_enabled: true)
conn = Phoenix.ConnTest.build_conn()
project = CodeCorpsWeb.ProjectController.preload(project)
rendered_json = render(CodeCorpsWeb.ProjectView, "show.json-api", data: project, conn: conn)
assert rendered_json["data"]["attributes"]["can-activate-donations"] == true
end
test "renders donations-active true when project has donations and a plan" do
project = insert(:project)
insert(:donation_goal, project: project)
insert(:stripe_connect_plan, project: project)
conn = Phoenix.ConnTest.build_conn()
project = CodeCorpsWeb.ProjectController.preload(project)
rendered_json = render(CodeCorpsWeb.ProjectView, "show.json-api", data: project, conn: conn)
assert rendered_json["data"]["attributes"]["donations-active"] == true
end
test "renders donations-active false when project has donations and no plan" do
project = insert(:project)
insert(:donation_goal, project: project)
conn = Phoenix.ConnTest.build_conn()
project = CodeCorpsWeb.ProjectController.preload(project)
rendered_json = render(CodeCorpsWeb.ProjectView, "show.json-api", data: project, conn: conn)
assert rendered_json["data"]["attributes"]["donations-active"] == false
end
test "renders donations-active false when project has no donations and no plan" do
project = insert(:project)
conn = Phoenix.ConnTest.build_conn()
project = CodeCorpsWeb.ProjectController.preload(project)
rendered_json = render(CodeCorpsWeb.ProjectView, "show.json-api", data: project, conn: conn)
assert rendered_json["data"]["attributes"]["donations-active"] == false
end
end
<|start_filename|>lib/code_corps/github/event/installation_repositories/installation_repositories.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.InstallationRepositories do
@moduledoc ~S"""
In charge of handling a GitHub Webhook payload for the
InstallationRepositories event type.
[https://developer.github.com/v3/activity/events/types/#installationrepositoriesevent](https://developer.github.com/v3/activity/events/types/#installationrepositoriesevent)
"""
@behaviour CodeCorps.GitHub.Event.Handler
alias CodeCorps.{
GitHub.Sync,
GitHub.Event.InstallationRepositories
}
@doc """
Handles an "InstallationRepositories" GitHub Webhook event. The event could be
of subtype "added" or "removed" and is handled differently based on that.
- the process of handling the "added" subtype is as follows
- try to match with `CodeCorps.GithubAppInstallation` record
- sync affected `CodeCorps.GithubRepo` records (update, create)
- the process of handling the "removed" subtype is as follows
- try to match with a `CodeCorps.GithubAppInstallation` record
- delete affected `CodeCorps.GithubRepo` records, respecting the rules
- if the GitHub payload for a repo is not matched with a record in our
database, just skip deleting it
"""
@impl CodeCorps.GitHub.Event.Handler
@spec handle(map) ::
Sync.installation_repositories_event_outcome() |
{:error, :unexpected_payload}
def handle(payload) do
with {:ok, :valid} <- payload |> validate_payload() do
Sync.installation_repositories_event(payload)
else
{:error, :invalid} -> {:error, :unexpected_payload}
end
end
@spec validate_payload(map) :: {:ok, :valid} | {:error, :invalid}
defp validate_payload(%{} = payload) do
case payload |> InstallationRepositories.Validator.valid? do
true -> {:ok, :valid}
false -> {:error, :invalid}
end
end
end
<|start_filename|>test/lib/code_corps/emails/project_approved_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.ProjectApprovedEmailTest do
use CodeCorps.ModelCase
use Bamboo.Test
alias CodeCorps.Emails.ProjectApprovedEmail
test "email has the correct data" do
project = insert(:project)
%{user: owner1} = insert(:project_user, project: project, role: "owner")
%{user: owner2} = insert(:project_user, project: project, role: "owner")
email = ProjectApprovedEmail.create(project)
assert email.from == "Code Corps<<EMAIL>>"
assert Enum.count(email.to) == 2
assert Enum.member?(email.to, owner1.email)
assert Enum.member?(email.to, owner2.email)
template_model = email.private.template_model
assert template_model == %{
project_title: project.title,
project_url: "http://localhost:4200/#{project.organization.slug}/#{project.slug}",
subject: "#{project.title} is approved!"
}
end
end
<|start_filename|>test/lib/code_corps/analytics/segment_traits_builder_test.exs<|end_filename|>
defmodule CodeCorps.Analytics.SegmentTraitsBuilderTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.Analytics.SegmentTraitsBuilder
describe "build/1" do
# NOTE: These tests only make sure there's a function clause for each
# supported struct and do not assert the traits content. Simply put, the
# only way to assert that would mean we're practically re-implementing the
# builder within tests
test "works for all supported struct types" do
assert :comment |> insert |> SegmentTraitsBuilder.build
assert :donation_goal |> insert |> SegmentTraitsBuilder.build
assert :github_app_installation |> insert |> SegmentTraitsBuilder.build
assert :github_repo |> insert |> SegmentTraitsBuilder.build
assert :project |> insert |> SegmentTraitsBuilder.build
assert :project_skill |> insert |> SegmentTraitsBuilder.build
assert :project_user |> insert |> SegmentTraitsBuilder.build
data = %{
acceptor: insert(:user),
project_user: insert(:project_user)
}
assert SegmentTraitsBuilder.build(data)
assert :stripe_connect_account |> insert |> SegmentTraitsBuilder.build
assert :stripe_connect_charge |> insert |> SegmentTraitsBuilder.build
assert :stripe_connect_plan |> insert |> SegmentTraitsBuilder.build
assert :stripe_connect_subscription |> insert |> SegmentTraitsBuilder.build
assert :stripe_platform_card |> insert |> SegmentTraitsBuilder.build
assert :stripe_platform_customer |> insert |> SegmentTraitsBuilder.build
assert :task |> insert |> SegmentTraitsBuilder.build
assert :task_skill |> insert |> SegmentTraitsBuilder.build
assert :user |> insert |> SegmentTraitsBuilder.build
assert :user_category |> insert |> SegmentTraitsBuilder.build
assert :user_role |> insert |> SegmentTraitsBuilder.build
assert :user_skill |> insert |> SegmentTraitsBuilder.build
assert :user_task |> insert |> SegmentTraitsBuilder.build
assert %{token: "foo", user_id: 1} |> SegmentTraitsBuilder.build
end
end
end
<|start_filename|>test/lib/code_corps_web/views/github_event_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.GithubEventViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
github_event = insert(:github_event, action: "created", github_delivery_id: "6c305920-c374-11e7-91e8-12f64fc6d596", payload: %{"key" => "value"}, status: "processed", type: "issue_comment")
rendered_json =
CodeCorpsWeb.GithubEventView
|> render("show.json-api", data: github_event)
expected_json = %{
"data" => %{
"id" => github_event.id |> Integer.to_string,
"type" => "github-event",
"attributes" => %{
"action" => github_event.action,
"event-type" => github_event.type,
"error" => github_event.error,
"failure-reason" => github_event.failure_reason,
"github-delivery-id" => github_event.github_delivery_id,
"inserted-at" => github_event.inserted_at,
"payload" => github_event.payload,
"record-data" => github_event.data,
"status" => github_event.status,
"updated-at" => github_event.updated_at
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps/github/api/api.ex<|end_filename|>
defmodule CodeCorps.GitHub.API do
alias CodeCorps.{
GithubAppInstallation,
GitHub,
GitHub.API.Errors.PaginationError,
GitHub.API.Pagination,
GitHub.APIError,
GitHub.HTTPClientError,
GitHub.Utils.ResultAggregator,
User
}
alias HTTPoison.{Error, Response}
def gateway(), do: Application.get_env(:code_corps, :github)
@typep raw_body :: String.t
@typep raw_headers :: list({String.t, String.t})
@typep raw_options :: Keyword.t
@spec request(GitHub.method, String.t, raw_body, raw_headers, raw_options) :: GitHub.response
def request(method, url, body, headers, options) do
gateway().request(method, url, body, headers, options)
|> marshall_response()
end
@spec get_all(String.t, raw_headers, raw_options) :: {:ok, list(map)} | {:error, PaginationError.t} | {:error, GitHub.api_error_struct}
def get_all(url, headers, options) do
case gateway().request(:head, url, "", headers, options) do
{:ok, %Response{headers: response_headers, status_code: code}} when code in 200..399 ->
response_headers
|> Pagination.retrieve_total_pages()
|> Pagination.to_page_numbers()
|> Enum.map(&Pagination.add_page_param(options, &1))
|> Enum.map(&gateway().request(:get, url, "", headers, &1))
|> Enum.map(&marshall_response/1)
|> ResultAggregator.aggregate
|> marshall_paginated_response()
other
-> other |> marshall_response()
end
end
@doc """
Get access token headers for a given `CodeCorps.User` and
`CodeCorps.GithubAppInstallation`.
If the user does not have a `github_auth_token` (meaning they are not
connected to GitHub), then we default to the installation which will post on
behalf of the user as a bot.
"""
@spec opts_for(User.t, GithubAppInstallation.t) :: list
def opts_for(%User{github_auth_token: nil}, %GithubAppInstallation{} = installation) do
opts_for(installation)
end
def opts_for(%User{github_auth_token: token}, %GithubAppInstallation{}) do
[access_token: token]
end
@doc """
Get access token headers for a given `CodeCorps.GithubAppInstallation`.
This should only be used in instances where the full permissions of the
application are needed and there is no need for attribution to a user.
"""
@spec opts_for(GithubAppInstallation.t) :: list
def opts_for(%GithubAppInstallation{} = installation) do
with {:ok, token} <- installation |> GitHub.API.Installation.get_access_token do
[access_token: token]
else
{:error, github_error} -> {:error, github_error}
end
end
@typep http_success :: {:ok, Response.t}
@typep http_failure :: {:error, term}
@spec marshall_response(http_success | http_failure) :: GitHub.response
defp marshall_response({:ok, %Response{body: "", status_code: status}}) when status in 200..299 do
{:ok, %{}}
end
defp marshall_response({:ok, %Response{body: body, status_code: status}}) when status in 200..299 do
case body |> Poison.decode do
{:ok, json} ->
{:ok, json}
{:error, _value} ->
{:error, HTTPClientError.new(reason: :body_decoding_error)}
end
end
defp marshall_response({:ok, %Response{body: body, status_code: 404}}) do
{:error, APIError.new({404, %{"message" => body}})}
end
defp marshall_response({:ok, %Response{body: "", status_code: status}}) when status in 400..599 do
{:error, APIError.new({status, %{"message" => "API Error during HEAD request"}})}
end
defp marshall_response({:ok, %Response{body: body, status_code: status}}) when status in 400..599 do
case body |> Poison.decode do
{:ok, json} ->
{:error, APIError.new({status, json})}
{:error, _value} ->
{:error, HTTPClientError.new(reason: :body_decoding_error)}
end
end
defp marshall_response({:error, %Error{reason: reason}}) do
{:error, HTTPClientError.new(reason: reason)}
end
defp marshall_response({:error, reason}) do
{:error, HTTPClientError.new(reason: reason)}
end
@spec marshall_paginated_response(tuple) :: tuple
defp marshall_paginated_response({:ok, pages}), do: {:ok, pages |> List.flatten}
defp marshall_paginated_response({:error, responses}), do: {:error, responses |> PaginationError.new}
end
<|start_filename|>lib/code_corps/github/sync/github_pull_request/body_parser.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.GithubPullRequest.BodyParser do
@moduledoc ~S"""
In charge of extracting ids from markdown content, paired to a predefined list
of keywords.
"""
@doc ~S"""
Searches for GitHub closing keyword format inside a content string.
Returns all unique ids matched, as integers.
"""
@spec extract_closing_ids(String.t) :: list(integer)
def extract_closing_ids(content) when is_binary(content) do
~w(close closes closed fix fixes fixed resolve resolves resolved)
|> matching_regex()
|> Regex.scan(content) # [["closes #1", "closes", "1"], ["fixes #2", "fixes", "2"]]
|> Enum.map(&List.last/1) # ["1", "2"]
|> Enum.map(&String.to_integer/1) # [1, 2]
|> Enum.uniq
end
defp matching_regex(keywords) do
matches = keywords |> Enum.join("|")
~r/(?:(#{matches}))\s+#(\d+)/i
end
end
<|start_filename|>priv/repo/migrations/20170102130055_add_tos_acceptance_fields_to_stripe_connect_accounts.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddTosAcceptanceFieldsToStripeConnectAccounts do
use Ecto.Migration
def change do
alter table(:stripe_connect_accounts) do
add :tos_acceptance_date, :utc_datetime
add :tos_acceptance_ip, :string
add :tos_acceptance_user_agent, :string
end
end
end
<|start_filename|>test/lib/code_corps/github/event/installation/installation_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.InstallationTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GithubAppInstallation,
GithubRepo,
GitHub.Event.Installation,
Repo
}
defmodule BadRepoRequest do
def request(:get, "https://api.github.com/installation/repositories", _, _, _) do
{:ok, body} = load_endpoint_fixture("forbidden") |> Poison.encode
{:ok, %HTTPoison.Response{status_code: 404, body: body}}
end
def request(method, endpoint, body, headers, options) do
CodeCorps.GitHub.SuccessAPI.request(method, endpoint, body, headers, options)
end
end
defmodule InvalidRepoRequest do
def request(:get, "https://api.github.com/installation/repositories", _, _, _) do
good_payload = "installation_repositories" |> load_endpoint_fixture
%{"repositories" => [repo_1, repo_2]} = good_payload
bad_repo_1 = repo_1 |> Map.put("name", nil)
bad_payload =
good_payload |> Map.put("repositories", [bad_repo_1, repo_2])
{:ok, body} = bad_payload |> Poison.encode
{:ok, %HTTPoison.Response{status_code: 200, body: body}}
end
def request(method, endpoint, body, headers, options) do
CodeCorps.GitHub.SuccessAPI.request(method, endpoint, body, headers, options)
end
end
@installation_created load_event_fixture("installation_created")
@bad_sender_payload @installation_created |> Map.put("sender", "foo")
@bad_installation_payload @installation_created |> Map.put("installation", "foo")
describe "handle/1" do
test "returns error if payload is wrong" do
assert {:error, :unexpected_payload} == Installation.handle(%{})
end
test "returns error if user payload is wrong" do
assert {:error, :unexpected_payload} ==
Installation.handle(@bad_sender_payload)
end
test "returns error if installation payload is wrong" do
assert {:error, :unexpected_payload} ==
Installation.handle(@bad_installation_payload)
end
test "returns installation as errored if api error" do
with_mock_api(BadRepoRequest) do
assert {:error, :github_api_error_on_syncing_repos, _error} =
Installation.handle(@installation_created)
end
end
test "returns installation as errored if error creating repos" do
with_mock_api(InvalidRepoRequest) do
assert {:error, :validation_error_on_syncing_existing_repos, {_repos, _changesets}} =
Installation.handle(@installation_created)
end
end
end
describe "handle/1 for Installation::created" do
test "creates installation for unmatched user if no user, syncs repos" do
payload = %{"installation" => %{"id" => installation_github_id}} = @installation_created
{:ok, %GithubAppInstallation{} = installation}
= Installation.handle(payload)
assert installation.github_id == installation_github_id
assert installation.origin == "github"
assert installation.state == "processed"
refute installation.user_id
assert installation.installed == true
assert Repo.aggregate(GithubRepo, :count, :id) == 2
end
test "creates installation if user matched but installation unmatched, syncs repos" do
%{"sender" => %{"id" => user_github_id}} = payload = @installation_created
user = insert(:user, github_id: user_github_id)
{:ok, %GithubAppInstallation{} = installation}
= Installation.handle(payload)
assert installation.github_id == (payload |> get_in(["installation", "id"]))
assert installation.origin == "github"
assert installation.state == "processed"
assert installation.user_id == user.id
assert installation.installed == true
assert Repo.aggregate(GithubRepo, :count, :id) == 2
end
test "updates installation, if both user and installation matched, syncs repos" do
%{"sender" => %{"id" => user_github_id}, "installation" => %{"id" => installation_github_id}} = payload = @installation_created
user = insert(:user, github_id: user_github_id)
insert(
:github_app_installation,
user: user,
access_token_expires_at: Timex.now |> Timex.shift(days: 1),
github_id: nil
)
{:ok, %GithubAppInstallation{} = installation}
= Installation.handle(payload)
assert installation.origin == "codecorps"
assert installation.state == "processed"
assert installation.user_id == user.id
assert installation.github_id == installation_github_id
assert installation.installed == true
assert Repo.aggregate(GithubRepo, :count, :id) == 2
end
test "updates installation if there is an installation, but no user, syncs repos" do
%{"installation" => %{"id" => installation_github_id}, "sender" => %{"id" => sender_github_id}} = payload = @installation_created
insert(:github_app_installation, github_id: installation_github_id)
{:ok, %GithubAppInstallation{} = installation}
= Installation.handle(payload)
assert installation.origin == "codecorps"
assert installation.state == "processed"
assert installation.sender_github_id == sender_github_id
assert Repo.aggregate(GithubRepo, :count, :id) == 2
end
end
end
<|start_filename|>test/lib/code_corps/policy/project_test.exs<|end_filename|>
defmodule CodeCorps.Policy.ProjectTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.Project, only: [create?: 2, update?: 3]
import CodeCorps.Project, only: [create_changeset: 2]
alias CodeCorps.Project
describe "create?" do
test "returns true when user is owner of organization" do
user = insert(:user)
organization = insert(:organization, owner: user)
changeset = %Project{} |> create_changeset(%{organization_id: organization.id})
assert create?(user, changeset)
end
test "returns false otherwise" do
user = insert(:user)
organization = insert(:organization)
changeset = %Project{} |> create_changeset(%{organization_id: organization.id})
refute create?(user, changeset)
end
end
describe "update?" do
test "returns false when user is not a project member" do
user = insert(:user)
project = insert(:project)
refute update?(user, project, %{})
end
test "returns false when user is pending member of project" do
%{project: project, user: user} = insert(:project_user, role: "pending")
refute update?(user, project, %{})
end
test "returns false when user is contributor of project" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
refute update?(user, project, %{})
end
test "returns true when user is admin of project" do
%{project: project, user: user} = insert(:project_user, role: "admin")
assert update?(user, project, %{})
end
test "returns false when user is project owner and approving" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
refute update?(user, project, %{"approved" => true})
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
assert update?(user, project, %{})
end
test "returns true when user is site admin and approving" do
user = insert(:user, admin: true)
project = insert(:project)
assert update?(user, project, %{"approved" => true})
end
end
end
<|start_filename|>lib/code_corps/github/sync/comment/comment.ex<|end_filename|>
defmodule CodeCorps.GitHub.Sync.Comment do
@moduledoc ~S"""
In charge of syncing `CodeCorps.Comment` records with a GitHub comment
payload.
A single GitHub comment always matches a single `CodeCorps.GithubComment`, but
it can match multiple `CodeCorps.Comment` records. This module handles
creating or updating all those records.
"""
import Ecto.Query
alias CodeCorps.{
Comment,
GitHub.Sync,
GitHub.Utils.ResultAggregator,
GithubComment,
GithubIssue,
GithubRepo,
GithubUser,
Repo,
Task,
User
}
alias Ecto.Changeset
@type commit_result_aggregate ::
{:ok, list(Comment.t())} | {:error, {list(Comment.t()), list(Changeset.t())}}
@type commit_result :: {:ok, Comment.t()} | {:error, Changeset.t()}
@doc ~S"""
Creates or updates a `CodeCorps.Comment` for the specified `CodeCorps.Task`.
When provided a `CodeCorps.Task`, a `CodeCorps.GithubComment`, a
`CodeCorps.User`, and a GitHub API payload, it creates or updates a
`CodeCorps.Comment` record, using the provided GitHub API
payload, associated to the specified `CodeCorps.GithubComment`,
`CodeCorps.Task` and `CodeCorps.User`
"""
@spec sync(Task.t(), GithubComment.t(), User.t()) :: commit_result()
def sync(%Task{} = task, %GithubComment{} = github_comment, %User{} = user) do
case find_comment(task, github_comment) do
nil ->
github_comment
|> Sync.Comment.Changeset.create_changeset(task, user)
|> Repo.insert()
%Comment{} = comment ->
comment
|> Sync.Comment.Changeset.update_changeset(github_comment)
|> Repo.update()
end
end
@spec find_comment(Task.t(), GithubComment.t()) :: Comment.t() | nil
defp find_comment(%Task{id: task_id}, %GithubComment{id: github_comment_id}) do
query = from c in Comment,
where: c.task_id == ^task_id,
join: gc in GithubComment, on: c.github_comment_id == gc.id, where: gc.id == ^github_comment_id
query |> Repo.one()
end
@doc ~S"""
Creates or updates `CodeCorps.Comment` records for the specified
`CodeCorps.GithubRepo`.
For each `CodeCorps.GithubComment` record that relates to the
`CodeCorps.GithubRepo` for a given `CodeCorps.GithubRepo`:
- Find the related `CodeCorps.Task` record
- Create or update the related `CodeCorps.Comment` record
- Associate the `CodeCorps.Comment` record with the `CodeCorps.User` that
relates to the `CodeCorps.GithubUser` for the `CodeCorps.GithubComment`
"""
@spec sync_github_repo(GithubRepo.t()) :: commit_result_aggregate()
def sync_github_repo(%GithubRepo{} = github_repo) do
preloads = [
github_comments: [:github_issue, github_user: [:user]]
]
%GithubRepo{github_comments: github_comments} =
github_repo |> Repo.preload(preloads)
github_comments
|> Enum.map(fn %GithubComment{github_user: %GithubUser{user: %User{} = user}} = github_comment ->
github_comment
|> find_task(github_repo)
|> sync(github_comment, user)
end)
|> ResultAggregator.aggregate()
end
# TODO: can this return a nil?
@spec find_task(GithubComment.t(), GithubRepo.t()) :: Task.t()
defp find_task(
%GithubComment{github_issue: %GithubIssue{id: github_issue_id}},
%GithubRepo{project_id: project_id}) do
query = from t in Task,
where: t.project_id == ^project_id,
join: gi in GithubIssue, on: t.github_issue_id == gi.id, where: gi.id == ^github_issue_id
query |> Repo.one()
end
@doc ~S"""
Deletes `CodeCorps.Comment` records associated to `CodeCorps.GithubComment`
with specified `github_id`
Since there can be 0 or 1 such records, returns `{:ok, results}` where
`results` is a 1-element or blank list of deleted records.
"""
@spec delete(String.t()) :: {:ok, list(Comment.t())}
def delete(github_id) do
query =
from c in Comment,
join: gc in GithubComment, on: gc.id == c.github_comment_id, where: gc.github_id == ^github_id
query
|> Repo.delete_all(returning: true)
|> (fn {_count, comments} -> {:ok, comments} end).()
end
end
<|start_filename|>test/lib/code_corps_web/views/token_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.TokenViewTest do
use CodeCorpsWeb.ViewCase
test "renders show" do
token = "1<PASSWORD>"
user_id = 1
rendered_json = render(CodeCorpsWeb.TokenView, "show.json", %{token: token, user_id: user_id})
expected_json = %{
token: token,
user_id: user_id
}
assert expected_json == rendered_json
end
test "renders 401" do
message = "Silly wabbit, Trix are for kids!"
rendered_json = render(CodeCorpsWeb.TokenView, "401.json", %{message: message})
expected_json = %{
errors: [
%{
id: "UNAUTHORIZED",
title: "401 Unauthorized",
detail: message,
status: 401
}
]
}
assert expected_json == rendered_json
end
test "renders 403" do
message = "Silly wabbit, Trix are for kids!"
rendered_json = render(CodeCorpsWeb.TokenView, "403.json", %{message: message})
expected_json = %{
errors: [
%{
id: "FORBIDDEN",
title: "403 Forbidden",
detail: message,
status: 403
}
]
}
assert expected_json == rendered_json
end
test "renders delete" do
rendered_json = render(CodeCorpsWeb.TokenView, "delete.json", %{})
expected_json = %{
ok: true
}
assert expected_json == rendered_json
end
end
<|start_filename|>lib/code_corps/policy/stripe_platform_card.ex<|end_filename|>
defmodule CodeCorps.Policy.StripePlatformCard do
alias CodeCorps.StripePlatformCard
alias CodeCorps.User
@spec create?(User.t, map) :: boolean
def create?(user, params), do: user |> owns?(params)
@spec show?(User.t, StripePlatformCard.t) :: boolean
def show?(user, card), do: user |> owns?(card)
@spec owns?(User.t, StripePlatformCard.t | map) :: boolean
defp owns?(%User{id: current_user_id}, %StripePlatformCard{user_id: user_id}) do
current_user_id == user_id
end
defp owns?(%User{id: current_user_id}, %{"user_id" => user_id}) do
current_user_id == user_id
end
defp owns?(_, _), do: false
end
<|start_filename|>test/lib/code_corps/model/stripe_connect_subscription_test.exs<|end_filename|>
defmodule CodeCorps.StripeConnectSubscriptionTest do
use CodeCorps.ModelCase
alias CodeCorps.StripeConnectSubscription
@valid_attrs %{
application_fee_percent: 5,
id_from_stripe: "abc123",
plan_id_from_stripe: "abc123",
quantity: 1000
}
@invalid_attrs %{}
describe "create_changeset/2" do
test "reports as valid when attributes are valid" do
stripe_connect_plan_id = insert(:stripe_connect_plan).id
user_id = insert(:user).id
changes = Map.merge(@valid_attrs, %{stripe_connect_plan_id: stripe_connect_plan_id, user_id: user_id})
changeset = StripeConnectSubscription.create_changeset(%StripeConnectSubscription{}, changes)
assert changeset.valid?
end
test "reports as invalid when attributes are invalid" do
changeset = StripeConnectSubscription.create_changeset(%StripeConnectSubscription{}, @invalid_attrs)
refute changeset.valid?
assert_error_message(changeset, :application_fee_percent, "can't be blank")
assert_error_message(changeset, :id_from_stripe, "can't be blank")
assert_error_message(changeset, :plan_id_from_stripe, "can't be blank")
assert_error_message(changeset, :quantity, "can't be blank")
assert_error_message(changeset, :stripe_connect_plan_id, "can't be blank")
end
test "ensures stripe_connect_plan_id links to existing_record" do
user_id = insert(:user).id
attrs = @valid_attrs |> Map.merge(%{stripe_connect_plan_id: -1, user_id: user_id})
{result, changeset} =
StripeConnectSubscription.create_changeset(%StripeConnectSubscription{}, attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :stripe_connect_plan, "does not exist")
end
test "ensures user_id links to existing_record" do
stripe_connect_plan_id = insert(:stripe_connect_plan).id
attrs = @valid_attrs |> Map.merge(%{stripe_connect_plan_id: stripe_connect_plan_id, user_id: -1})
{result, changeset} =
StripeConnectSubscription.create_changeset(%StripeConnectSubscription{}, attrs)
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :user, "does not exist")
end
end
end
<|start_filename|>test/lib/code_corps/services/markdown_renderer_test.exs<|end_filename|>
defmodule CodeCorps.Services.MarkdownRendererServiceTest do
use ExUnit.Case, async: true
alias CodeCorps.Task
import CodeCorps.Services.MarkdownRendererService
@valid_attrs %{
title: "Test task",
task_list_id: 1,
markdown: "A **strong** body",
status: "open"
}
test "renders empty strings to nil" do
attrs = @valid_attrs |> Map.merge(%{markdown: ""})
changeset =
%Task{}
|> Task.changeset(attrs)
|> render_markdown_to_html(:markdown, :body)
assert changeset |> Ecto.Changeset.get_change(:body) == nil
end
test "returns unchanged changeset when nil" do
attrs = @valid_attrs |> Map.merge(%{markdown: nil})
changeset =
%Task{}
|> Task.changeset(attrs)
|> render_markdown_to_html(:markdown, :body)
assert changeset == %Task{} |> Task.changeset(attrs)
end
test "renders markdown to html" do
changeset =
%Task{}
|> Task.changeset(@valid_attrs)
|> render_markdown_to_html(:markdown, :body)
assert changeset |> Ecto.Changeset.get_change(:body) == "<p>A <strong>strong</strong> body</p>\n"
end
test "adds the right css class prefixes" do
attrs = @valid_attrs |> Map.merge(%{markdown: "```css\nspan {}\n```"})
changeset =
%Task{}
|> Task.changeset(attrs)
|> render_markdown_to_html(:markdown, :body)
assert changeset |> Ecto.Changeset.get_change(:body) == "<pre><code class=\"css language-css\">span {}</code></pre>\n"
end
test "returns changeset when changeset is invalid" do
changeset =
%Task{}
|> Task.changeset
|> Ecto.Changeset.put_change(:markdown, "")
|> render_markdown_to_html(:markdown, :body)
refute changeset.valid?
assert changeset |> Ecto.Changeset.get_change(:body) == nil
end
end
<|start_filename|>lib/code_corps/stripe_testing/fixtures/account_with_multiple_external_accounts.json<|end_filename|>
{
"id": "account_with_multiple_external_accounts",
"object": "account",
"business_logo": null,
"business_name": "<NAME> Inc.",
"business_url": "somecompany.org",
"charges_enabled": false,
"country": "US",
"debit_negative_balances": true,
"decline_charge_on": {
"avs_failure": false,
"cvc_failure": true
},
"default_currency": "usd",
"details_submitted": false,
"display_name": "<NAME>",
"email": "<EMAIL>",
"external_accounts": {
"object": "list",
"data": [
{
"id": "ba_111111111111111111111111",
"object": "bank_account",
"account": "account_with_multiple_external_accounts",
"account_holder_name": "<NAME>",
"account_holder_type": "individual",
"bank_name": "STRIPE TEST BANK",
"country": "US",
"currency": "usd",
"default_for_currency": false,
"fingerprint": "3wib8M6m7DxSV0PP",
"last4": "6789",
"metadata": {
},
"routing_number": "110000000",
"status": "new"
},
{
"id": "ba_222222222222222222222222",
"object": "bank_account",
"account": "account_with_multiple_external_accounts",
"account_holder_name": "<NAME>",
"account_holder_type": "individual",
"bank_name": "STRIPE TEST BANK",
"country": "US",
"currency": "usd",
"default_for_currency": false,
"fingerprint": "3wib8M6m7DxSV0PP",
"last4": "6789",
"metadata": {
},
"routing_number": "110000000",
"status": "new"
}
],
"has_more": false,
"total_count": 0,
"url": "/v1/accounts/account_with_multiple_external_accounts/external_accounts"
},
"legal_entity": {
"additional_owners": [
],
"address": {
"city": null,
"country": "US",
"line1": null,
"line2": null,
"postal_code": null,
"state": null
},
"business_name": "Some Company Inc.",
"business_tax_id_provided": false,
"business_vat_id_provided": false,
"dob": {
"day": null,
"month": null,
"year": null
},
"first_name": "John",
"last_name": "Doe",
"personal_address": {
"city": null,
"country": "US",
"line1": null,
"line2": null,
"postal_code": null,
"state": null
},
"personal_id_number_provided": false,
"ssn_last_4_provided": false,
"type": "sole_prop",
"verification": {
"details": null,
"details_code": "failed_other",
"document": "fil_12345",
"status": "unverified"
}
},
"metadata": null,
"product_description": "Some product description.",
"statement_descriptor": null,
"support_email": null,
"support_phone": "1234567890",
"timezone": "Europe/Zagreb",
"tos_acceptance": {
"date": null,
"ip": null,
"user_agent": null
},
"transfer_schedule": {
"delay_days": 2,
"interval": "daily"
},
"transfer_statement_descriptor": null,
"payouts_enabled": false,
"type": "custom",
"verification": {
"disabled_reason": "fields_needed",
"due_by": null,
"fields_needed": [
"business_url",
"external_account",
"tos_acceptance.date",
"tos_acceptance.ip"
]
}
}
<|start_filename|>lib/code_corps/github/event/issues/issues.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.Issues do
@moduledoc ~S"""
In charge of handling a GitHub Webhook payload for the Issues event type
[https://developer.github.com/v3/activity/events/types/#issuesevent](https://developer.github.com/v3/activity/events/types/#issuesevent)
"""
@behaviour CodeCorps.GitHub.Event.Handler
alias CodeCorps.{
GitHub.Sync,
GitHub.Event.Issues.Validator
}
@doc ~S"""
Handles the "Issues" GitHub webhook
The process is as follows:
- validate the payload is structured as expected
- validate the action is properly supported
- sync the issue using `CodeCorps.GitHub.Sync.Issue`
"""
@impl CodeCorps.GitHub.Event.Handler
@spec handle(map) ::
Sync.issue_event_outcome() | {:error, :unexpected_payload}
def handle(payload) do
with {:ok, :valid} <- validate_payload(payload) do
Sync.issue_event(payload)
else
{:error, error} -> {:error, error}
end
end
@spec validate_payload(map) :: {:ok, :valid} | {:error, :unexpected_payload}
defp validate_payload(%{} = payload) do
if Validator.valid?(payload) do
{:ok, :valid}
else
{:error, :unexpected_payload}
end
end
end
<|start_filename|>test/lib/code_corps/model/user_task_test.exs<|end_filename|>
defmodule CodeCorps.UserTaskTest do
@moduledoc false
use CodeCorps.ModelCase
alias CodeCorps.UserTask
describe "create_changeset/2" do
@required_attrs ~w(task_id user_id)
test "requires #{@required_attrs}" do
changeset = UserTask.create_changeset(%UserTask{}, %{})
assert_validation_triggered(changeset, :task_id, :required)
assert_validation_triggered(changeset, :user_id, :required)
end
test "ensures associated Task record exists" do
user = insert(:user)
changeset = UserTask.create_changeset(%UserTask{}, %{task_id: -1, user_id: user.id})
{:error, response_changeset} = Repo.insert(changeset)
assert_error_message(response_changeset, :task, "does not exist")
end
test "ensures associated User record exists" do
task = insert(:task)
changeset = UserTask.create_changeset(%UserTask{}, %{task_id: task.id, user_id: -1})
{:error, response_changeset} = Repo.insert(changeset)
assert_error_message(response_changeset, :user, "does not exist")
end
test "ensures uniqueness of User/Task combination" do
user_task = insert(:user_task)
changeset = UserTask.create_changeset(%UserTask{}, %{task_id: user_task.task_id, user_id: user_task.user_id})
{:error, response_changeset} = Repo.insert(changeset)
assert_error_message(response_changeset, :user, "has already been taken")
end
end
describe "update_changeset/2" do
@required_attrs ~w(user_id)
test "requires #{@required_attrs}" do
user_task = insert(:user_task)
changeset = UserTask.update_changeset(user_task, %{user_id: nil})
assert_validation_triggered(changeset, :user_id, :required)
end
test "ensures associated User record exists" do
user_task = insert(:user_task)
changeset = UserTask.update_changeset(user_task, %{user_id: -1})
{:error, response_changeset} = Repo.update(changeset)
assert_error_message(response_changeset, :user, "does not exist")
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/token_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.TokenControllerTest do
use CodeCorpsWeb.ConnCase
setup do
conn =
%{build_conn() | host: "api."}
|> put_req_header("accept", "application/vnd.api+json")
|> put_req_header("content-type", "application/vnd.api+json")
{:ok, conn: conn}
end
defp create_payload(email, password) do
%{
"username" => email,
"password" => password
}
end
describe "create" do
test "authenticates and returns JWT and user ID when data is valid", %{conn: conn} do
user = build(:user, %{password: "password"}) |> set_password("password") |> insert
conn = post conn, token_path(conn, :create), create_payload(user.email, user.password)
user_id = user.id
response = json_response(conn, 201)
assert response["token"]
assert response["user_id"] == user_id
assert_received {:track, ^user_id, "Signed In", %{}}
end
test "does not authenticate and renders errors when the email and password are missing", %{conn: conn} do
conn = post conn, token_path(conn, :create), %{"username" => ""}
response = json_response(conn, 401)
[error | _] = response["errors"]
assert error["detail"] == "Please enter your email and password."
assert renders_401_unauthorized?(error)
refute response["token"]
refute response["user_id"]
end
test "does not authenticate and renders errors when only the password is missing", %{conn: conn} do
conn = post conn, token_path(conn, :create), %{"username" => "<EMAIL>"}
response = json_response(conn, 401)
[error | _] = response["errors"]
assert error["detail"] == "Please enter your password."
assert renders_401_unauthorized?(error)
refute response["token"]
refute response["user_id"]
end
test "does not authenticate and renders errors when the password is wrong", %{conn: conn} do
user = build(:user, %{password: "password"}) |> set_password("password") |> insert
conn = post conn, token_path(conn, :create), create_payload(user.email, "wrong password")
response = json_response(conn, 401)
[error | _] = response["errors"]
assert error["detail"] == "Your password doesn't match the email #{user.email}."
assert renders_401_unauthorized?(error)
refute response["token"]
refute response["user_id"]
end
test "does not authenticate and renders errors when the user doesn't exist", %{conn: conn} do
conn = post conn, token_path(conn, :create), create_payload("<EMAIL>", "password")
response = json_response(conn, 401)
[error | _] = response["errors"]
assert error["detail"] == "We couldn't find a user with the email <EMAIL>."
assert renders_401_unauthorized?(error)
refute response["token"]
refute response["user_id"]
end
end
describe "refresh" do
test "refreshes JWT and returns JWT and user ID when data is valid", %{conn: conn} do
user = build(:user, %{password: "password"}) |> set_password("password") |> insert()
{:ok, token, _claims} = user |> CodeCorps.Guardian.encode_and_sign()
conn = post conn, token_path(conn, :refresh), %{token: token}
response = json_response(conn, 201)
assert response["token"]
assert response["user_id"] == user.id
end
test "does not authenticate and renders errors when the token is expired", %{conn: conn} do
user = build(:user, %{password: "password"}) |> set_password("password") |> insert()
claims = %{ "exp" => Guardian.timestamp - 10}
{:ok, token, _claims} = user |> CodeCorps.Guardian.encode_and_sign(claims)
conn = post conn, token_path(conn, :refresh), %{token: token}
response = json_response(conn, 401)
refute response["token"]
refute response["user_id"]
[error | _] = response["errors"]
assert renders_401_unauthorized?(error)
assert error["detail"] == "token_expired"
end
end
defp renders_401_unauthorized?(%{"id" => "UNAUTHORIZED", "title" => "401 Unauthorized", "status" => 401}), do: true
defp renders_401_unauthorized?(_), do: false
end
<|start_filename|>test/lib/code_corps_web/controllers/project_user_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.ProjectUserControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :project_user
use Bamboo.Test
describe "index" do
test "lists all resources", %{conn: conn} do
[record_1, record_2] = insert_pair(:project_user)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
test "filters resources by record id", %{conn: conn} do
[record_1, record_2 | _] = insert_list(3, :project_user)
path = "project-users/?filter[id]=#{record_1.id},#{record_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([record_1.id, record_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
record = insert(:project_user)
conn
|> request_show(record)
|> json_response(200)
|> assert_id_from_response(record.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: user} do
project = insert(:project)
insert(:project_user, project: project, role: "owner")
attrs = %{role: "pending", project: project, user: user}
assert conn |> request_create(attrs) |> json_response(201)
user_id = user.id
project_id = "project_#{project.id}"
tracking_properties = %{
project: project.title,
project_id: project.id,
member: user.username,
member_id: user.id
}
assert_received {:track, ^user_id, "Requested Membership (User)", ^tracking_properties}
assert_received {:track, ^project_id, "Membership Requested (Project)", ^tracking_properties}
email =
CodeCorps.ProjectUser
|> CodeCorps.Repo.get_by(role: "pending")
|> CodeCorps.Repo.preload([:project, :user])
|> CodeCorps.Emails.ProjectUserRequestEmail.create()
assert_delivered_email(email)
end
@tag :authenticated
test "does not create resource and renders 422 when data is invalid", %{
conn: conn,
current_user: user
} do
# only way to trigger a validation error is to provide a non-existent project
# anything else will fail on authorization level
project = build(:project)
attrs = %{role: "pending", project: project, user: user}
assert conn |> request_create(attrs) |> json_response(422)
end
test "does not create resource and renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "update" do
@tag :authenticated
test "updates and renders resource when data is valid", %{
conn: conn,
current_user: current_user
} do
project = insert(:project)
record = insert(:project_user, project: project, role: "pending")
insert(:project_user, project: project, user: current_user, role: "owner")
params = %{role: "contributor"}
json = conn |> request_update(record, params) |> json_response(200)
assert json["data"]["attributes"]["role"] == "contributor"
user_id = current_user.id
project_id = "project_#{project.id}"
tracking_properties = %{
project: project.title,
project_id: project.id,
member: record.user.username,
member_id: record.user.id,
acceptor: current_user.username,
acceptor_id: current_user.id
}
assert_received {:track, ^user_id, "Membership Approved (User)", ^tracking_properties}
assert_received {:track, ^project_id, "Approved Membership (Project)", ^tracking_properties}
email =
CodeCorps.ProjectUser
|> CodeCorps.Repo.get_by(role: "contributor")
|> CodeCorps.Repo.preload([:project, :user])
|> CodeCorps.Emails.ProjectUserAcceptanceEmail.create()
assert_delivered_email(email)
end
test "doesn't update and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "doesn't update and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_update |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent on update", %{conn: conn} do
assert conn |> request_update(:not_found) |> json_response(404)
end
end
describe "delete" do
@tag :authenticated
test "deletes resource", %{conn: conn, current_user: current_user} do
project = insert(:project)
record = insert(:project_user, project: project)
insert(:project_user, project: project, user: current_user, role: "owner")
assert conn |> request_delete(record) |> response(204)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag :authenticated
test "renders 404 when id is nonexistent on delete", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>lib/code_corps/policy/preview.ex<|end_filename|>
defmodule CodeCorps.Policy.Preview do
alias CodeCorps.User
@spec create?(User.t, map) :: boolean
def create?(%User{} = user, %{"user_id" => author_id}), do: user.id == author_id
def create?(%User{}, %{}), do: false
end
<|start_filename|>test/lib/code_corps/model/task_skill_test.exs<|end_filename|>
defmodule CodeCorps.TaskSkillTest do
@moduledoc false
use CodeCorps.ModelCase
alias CodeCorps.TaskSkill
describe "create_changeset/2" do
@required_attrs ~w(task_id skill_id)
test "requires #{@required_attrs}" do
changeset = TaskSkill.create_changeset(%TaskSkill{}, %{})
assert_validation_triggered(changeset, :task_id, :required)
assert_validation_triggered(changeset, :skill_id, :required)
end
test "ensures associated Task record exists" do
skill = insert(:skill)
changeset = TaskSkill.create_changeset(%TaskSkill{}, %{task_id: -1, skill_id: skill.id})
{:error, response_changeset} = Repo.insert(changeset)
assert_error_message(response_changeset, :task, "does not exist")
end
test "ensures associated Skill record exists" do
task = insert(:task)
changeset = TaskSkill.create_changeset(%TaskSkill{}, %{task_id: task.id, skill_id: -1})
{:error, response_changeset} = Repo.insert(changeset)
assert_error_message(response_changeset, :skill, "does not exist")
end
test "ensures uniqueness of Skill/Task combination" do
task_skill = insert(:task_skill)
changeset = TaskSkill.create_changeset(%TaskSkill{}, %{task_id: task_skill.task_id, skill_id: task_skill.skill_id})
{:error, response_changeset} = Repo.insert(changeset)
assert_error_message(response_changeset, :skill, "has already been taken")
end
end
end
<|start_filename|>lib/code_corps/model/stripe_connect_customer.ex<|end_filename|>
defmodule CodeCorps.StripeConnectCustomer do
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "stripe_connect_customers" do
field :id_from_stripe, :string, null: false
belongs_to :stripe_connect_account, CodeCorps.StripeConnectAccount
belongs_to :stripe_platform_customer, CodeCorps.StripePlatformCustomer
belongs_to :user, CodeCorps.User
timestamps()
end
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:id_from_stripe, :stripe_connect_account_id, :stripe_platform_customer_id, :user_id])
|> validate_required([:id_from_stripe, :stripe_connect_account_id, :stripe_platform_customer_id, :user_id])
|> assoc_constraint(:stripe_connect_account)
|> assoc_constraint(:stripe_platform_customer)
|> assoc_constraint(:user)
|> unique_constraint(:id_from_stripe)
|> unique_constraint(:stripe_connect_account_id, name: :index_projects_on_user_id_role_id)
end
end
<|start_filename|>test/lib/code_corps_web/controllers/donation_goal_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.DonationGoalControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :donation_goal
@valid_attrs %{amount: 200, description: "A description"}
@invalid_attrs %{description: nil}
describe "index" do
test "lists all entries on index", %{conn: conn} do
[donation_goal_1, donation_goal_2] = insert_pair(:donation_goal)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([donation_goal_1.id, donation_goal_2.id])
end
test "filters resources on index", %{conn: conn} do
[donation_goal_1, donation_goal_2 | _] = insert_list(3, :donation_goal)
path = "donation-goals/?filter[id]=#{donation_goal_1.id},#{donation_goal_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([donation_goal_1.id, donation_goal_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
donation_goal = insert(:donation_goal)
conn
|> request_show(donation_goal)
|> json_response(200)
|> assert_id_from_response(donation_goal.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
attrs = @valid_attrs |> Map.merge(%{project_id: project.id})
assert conn |> request_create(attrs) |> json_response(201)
user_id = current_user.id
assert_received {:track, ^user_id, "Created Donation Goal", %{}}
end
@tag :authenticated
test "does not create resource and renders errors when data is invalid", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
attrs = @invalid_attrs |> Map.merge(%{project_id: project.id})
assert conn |> request_create(attrs) |> json_response(422)
end
test "renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "update" do
@tag :authenticated
test "updates and renders chosen resource when data is valid", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
donation_goal = insert(:donation_goal, project: project)
attrs = @valid_attrs |> Map.merge(%{project_id: project.id})
assert conn |> request_update(donation_goal, attrs) |> json_response(200)
user_id = current_user.id
assert_received {:track, ^user_id, "Updated Donation Goal", %{}}
end
@tag authenticated: :admin
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
donation_goal = insert(:donation_goal, project: project)
assert conn |> request_update(donation_goal, @invalid_attrs) |> json_response(422)
end
@tag authenticated: :admin
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_update(:not_found) |> json_response(404)
end
test "renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_update |> json_response(403)
end
end
describe "delete" do
@tag :authenticated
test "deletes chosen resource", %{conn: conn, current_user: current_user} do
project = insert(:project)
insert(:project_user, project: project, user: current_user, role: "owner")
donation_goal = insert(:donation_goal, project: project)
assert conn |> request_delete(donation_goal) |> response(204)
end
test "renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag authenticated: :admin
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>lib/code_corps_web/views/github_issue_view.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubIssueView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [
:body, :closed_at, :comments_url, :events_url, :github_created_at,
:github_id, :github_updated_at, :html_url, :labels_url, :locked, :number,
:state, :title, :url
]
has_one :github_pull_request, type: "github-pull-request", field: :github_pull_request_id
has_one :github_repo, type: "github-repo", field: :github_repo_id
end
<|start_filename|>test/lib/code_corps/github/event/issues/issues_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.IssuesTest do
@moduledoc false
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
GithubIssue,
GitHub.Event.Issues,
Repo,
Task,
User
}
@implemented_actions ~w(opened closed edited reopened)
@implemented_actions |> Enum.each(fn action ->
describe "handle/1 for Issues::#{action}" do
@payload load_event_fixture("issues_#{action}")
test "creates or updates associated records" do
%{"repository" => %{"id" => repo_github_id}} = @payload
project = insert(:project)
insert(:github_repo, github_id: repo_github_id, project: project)
insert(:task_list, project: project, done: true)
insert(:task_list, project: project, inbox: true)
insert(:task_list, project: project, pull_requests: true)
{:ok, %Task{}} = Issues.handle(@payload)
assert Repo.aggregate(GithubIssue, :count, :id) == 1
assert Repo.aggregate(Task, :count, :id) == 1
end
test "returns error if unmatched repository" do
assert Issues.handle(@payload) == {:error, :repo_not_found}
refute Repo.one(User)
end
test "returns error if payload is wrong" do
assert {:error, :unexpected_payload} == Issues.handle(%{})
end
test "returns error if repo payload is wrong" do
assert {:error, :unexpected_payload} == Issues.handle(@payload |> Map.put("repository", "foo"))
end
test "returns error if issue payload is wrong" do
assert {:error, :unexpected_payload} == Issues.handle(@payload |> Map.put("issue", "foo"))
end
end
end)
end
<|start_filename|>lib/code_corps/policy/organization_github_app_installation.ex<|end_filename|>
defmodule CodeCorps.Policy.OrganizationGithubAppInstallation do
@moduledoc """
Handles `User` authorization of actions on `OrganizationGithubAppInstallation` records
"""
import CodeCorps.Policy.Helpers, only: [get_organization: 1, owned_by?: 2]
alias CodeCorps.{OrganizationGithubAppInstallation, User}
def create?(%User{} = user, params) do
params |> get_organization |> owned_by?(user)
end
def delete?(%User{} = user, %OrganizationGithubAppInstallation{} = github_app_installation),
do: github_app_installation |> get_organization |> owned_by?(user)
end
<|start_filename|>lib/code_corps_web/views/user_task_view.ex<|end_filename|>
defmodule CodeCorpsWeb.UserTaskView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
has_one :task, type: "task", field: :task_id
has_one :user, type: "user", field: :user_id
end
<|start_filename|>lib/code_corps/model/model.ex<|end_filename|>
defmodule CodeCorps.Model do
@moduledoc ~S"""
A temporary module to be used by existing Model modules, before we switch to
an intent based structure which Phoenix 1.3 pushes.
"""
@doc ~S"""
When used import appropriate helper modules.
"""
defmacro __using__(_opts) do
quote do
use Ecto.Schema
import Ecto
import Ecto.Changeset
import Ecto.Query
use Timex.Ecto.Timestamps
end
end
end
<|start_filename|>test/lib/code_corps_web/views/task_list_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.TaskListViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
project = insert(:project)
task_list = insert(:task_list, order: 1000, project: project)
task = insert(:task, order: 1000, task_list: task_list)
task_list = CodeCorpsWeb.TaskListController.preload(task_list)
rendered_json = render(CodeCorpsWeb.TaskListView, "show.json-api", data: task_list)
expected_json = %{
"data" => %{
"attributes" => %{
"done" => task_list.done,
"inbox" => task_list.inbox,
"name" => task_list.name,
"order" => 1000,
"pull-requests" => task_list.pull_requests,
"inserted-at" => task_list.inserted_at,
"updated-at" => task_list.updated_at,
},
"id" => task_list.id |> Integer.to_string,
"relationships" => %{
"project" => %{
"data" => %{
"id" => task_list.project_id |> Integer.to_string,
"type" => "project"
}
},
"tasks" => %{
"data" => [%{
"id" => task.id |> Integer.to_string,
"type" => "task"
}]
}
},
"type" => "task-list",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps_web/controllers/password_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.PasswordController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Services.ForgotPasswordService}
@doc """
Generates a `CodeCorps.AuthToken` model to verify against and sends an email.
"""
def forgot_password(conn, %{"email" => email}) do
ForgotPasswordService.forgot_password(email)
conn
|> CodeCorps.Guardian.Plug.sign_out()
|> put_status(:ok)
|> render("show.json", email: email)
end
end
<|start_filename|>test/lib/code_corps/model/user_role_test.exs<|end_filename|>
defmodule CodeCorps.UserRoleTest do
use CodeCorps.ModelCase
alias CodeCorps.UserRole
test "valid_changeset_is_valid" do
user_id = insert(:user).id
role_id = insert(:role).id
changeset = UserRole.create_changeset(%UserRole{}, %{user_id: user_id, role_id: role_id})
assert changeset.valid?
end
test "changeset requires user_id" do
role_id = insert(:role).id
changeset = UserRole.create_changeset(%UserRole{}, %{role_id: role_id})
refute changeset.valid?
assert_error_message(changeset, :user_id, "can't be blank")
end
test "changeset requires role_id" do
user_id = insert(:user).id
changeset = UserRole.create_changeset(%UserRole{}, %{user_id: user_id})
refute changeset.valid?
assert_error_message(changeset, :role_id, "can't be blank")
end
test "changeset requires id of actual user" do
user_id = -1
role_id = insert(:role).id
{result, changeset} =
UserRole.create_changeset(%UserRole{}, %{user_id: user_id, role_id: role_id})
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :user, "does not exist")
end
test "changeset requires id of actual role" do
user_id = insert(:user).id
role_id = -1
{result, changeset} =
UserRole.create_changeset(%UserRole{}, %{user_id: user_id, role_id: role_id})
|> Repo.insert
assert result == :error
refute changeset.valid?
assert_error_message(changeset, :role, "does not exist")
end
end
<|start_filename|>test/lib/code_corps/github/event/issue_comment/validator_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.IssueComment.ValidatorTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Event.IssueComment.Validator
describe "valid?/1" do
test "returns true for any Issues event fixture" do
assert "issue_comment_created" |> load_event_fixture() |> Validator.valid?
assert "issue_comment_deleted" |> load_event_fixture() |> Validator.valid?
assert "issue_comment_edited" |> load_event_fixture() |> Validator.valid?
end
test "returns false for an unsupported structure" do
refute Validator.valid?("foo")
refute Validator.valid?(%{"foo" => "bar"})
refute Validator.valid?(%{"issue" => %{"bar" => "baz"}})
end
end
end
<|start_filename|>test/lib/code_corps/github/adapters/user_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Adapters.UserTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Adapters.User
defp expected_payload(type) do
%{
email: nil,
github_id: nil,
github_username: nil,
github_avatar_url: nil,
type: type
}
end
describe "to_user/1" do
test "maps API payload" do
%{"issue" => %{"user" => payload}} = load_event_fixture("issues_opened")
assert User.to_user(payload) == %{
email: nil,
github_id: payload["id"],
github_username: payload["login"],
github_avatar_url: payload["avatar_url"],
type: "user" # type gets transformed
}
end
test "maps Bot type" do
assert User.to_user(%{"type" => "Bot"}) == expected_payload("bot")
end
test "maps User type" do
assert User.to_user(%{"type" => "User"}) == expected_payload("user")
end
test "maps Organization type" do
assert User.to_user(%{"type" => "Organization"}) == expected_payload("organization")
end
end
end
<|start_filename|>test/support/test_environment_helper.ex<|end_filename|>
defmodule CodeCorps.TestEnvironmentHelper do
def modify_env(app, overrides) do
original_env = Application.get_all_env(app)
Enum.each(overrides, fn {key, value} -> Application.put_env(app, key, value) end)
ExUnit.Callbacks.on_exit(fn ->
Enum.each overrides, fn {key, _} ->
if Keyword.has_key?(original_env, key) do
Application.put_env(app, key, Keyword.fetch!(original_env, key))
else
Application.delete_env(app, key)
end
end
end)
end
end
<|start_filename|>test/lib/code_corps/emails/forgot_password_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.ForgotPasswordEmailTest do
use CodeCorps.ModelCase
use Bamboo.Test
alias CodeCorps.{AuthToken, Emails.ForgotPasswordEmail, WebClient}
test "forgot password email works" do
user = insert(:user)
{ :ok, %AuthToken{ value: token } } = AuthToken.changeset(%AuthToken{}, user) |> Repo.insert
email = ForgotPasswordEmail.create(user, token)
assert email.from == "Code Corps<<EMAIL>>"
assert email.to == user.email
{ :link, encoded_link } = email.private.template_model |> Enum.at(0)
assert "#{WebClient.url()}/password/reset?token=#{token}" == encoded_link
end
end
<|start_filename|>lib/code_corps/github/event/installation/installation.ex<|end_filename|>
defmodule CodeCorps.GitHub.Event.Installation do
@moduledoc """
In charge of handling a GitHub Webhook payload for the Installation event type
[https://developer.github.com/v3/activity/events/types/#installationevent](https://developer.github.com/v3/activity/events/types/#installationevent)
"""
@behaviour CodeCorps.GitHub.Event.Handler
alias CodeCorps.{
GitHub.Sync,
GitHub.Event.Installation
}
@doc """
Handles the "Installation" GitHub Webhook event.
This is done by first validating the payload is in the format the system
expects, followed by piping it into
`CodeCorps.GitHub.Sync.installation_event/1`
"""
@impl CodeCorps.GitHub.Event.Handler
@spec handle(map) ::
Sync.installation_event_outcome() | {:error, :unexpected_payload}
def handle(payload) do
with {:ok, :valid} <- payload |> validate_payload() do
Sync.installation_event(payload)
else
{:error, :invalid} -> {:error, :unexpected_payload}
end
end
@spec validate_payload(map) :: {:ok, :valid} | {:error, :invalid}
defp validate_payload(%{} = payload) do
case payload |> Installation.Validator.valid? do
true -> {:ok, :valid}
false -> {:error, :invalid}
end
end
end
<|start_filename|>lib/code_corps_web/controllers/task_list_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.TaskListController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{Helpers.Query, TaskList}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
task_lists =
TaskList
|> Query.id_filter(params)
|> Query.project_filter(params)
|> Query.sort_by_order()
|> Repo.all()
|> preload()
conn |> render("index.json-api", data: task_lists)
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %TaskList{} = task_list <- TaskList |> Repo.get(id) |> preload() do
conn |> render("show.json-api", data: task_list)
end
end
@preloads [:tasks]
def preload(data) do
Repo.preload(data, @preloads)
end
end
<|start_filename|>test/lib/code_corps_web/controllers/user_skill_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.UserSkillControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :user_skill
describe "index" do
test "lists all entries on index", %{conn: conn} do
[user_skill_1, user_skill_2] = insert_pair(:user_skill)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([user_skill_1.id, user_skill_2.id])
end
test "filters resources on index", %{conn: conn} do
[user_skill_1, user_skill_2 | _] = insert_list(3, :user_skill)
path = "user-skills/?filter[id]=#{user_skill_1.id},#{user_skill_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([user_skill_1.id, user_skill_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
user_skill = insert(:user_skill)
conn
|> request_show(user_skill)
|> json_response(200)
|> assert_id_from_response(user_skill.id)
end
test "renders 404 error when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag :authenticated
test "creates and renders resource when data is valid", %{conn: conn, current_user: current_user} do
skill = insert(:skill, title: "test-skill")
attrs = %{user: current_user, skill: skill}
assert conn |> request_create(attrs) |> json_response(201)
user_id = current_user.id
tracking_properties = %{
skill: skill.title,
skill_id: skill.id
}
assert_received {:track, ^user_id, "Added User Skill", ^tracking_properties}
end
@tag authenticated: :admin
test "renders 422 when data is invalid", %{conn: conn} do
invalid_attrs = %{}
assert conn |> request_create(invalid_attrs) |> json_response(422)
end
test "renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "delete" do
@tag authenticated: :admin
test "deletes resource", %{conn: conn, current_user: current_user} do
user_skill = insert(:user_skill)
assert conn |> request_delete(user_skill.id) |> response(204)
user_id = current_user.id
tracking_properties = %{
skill: user_skill.skill.title,
skill_id: user_skill.skill.id
}
assert_received {:track, ^user_id, "Removed User Skill", ^tracking_properties}
end
test "does not delete resource and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_delete |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_delete |> json_response(403)
end
@tag :authenticated
test "renders page not found when id is nonexistent on delete", %{conn: conn} do
assert conn |> request_delete(:not_found) |> json_response(404)
end
end
end
<|start_filename|>lib/code_corps/validators/slug_validator.ex<|end_filename|>
defmodule CodeCorps.Validators.SlugValidator do
@moduledoc """
Used for validating slug fields in a given changeset.
"""
alias Ecto.Changeset
@doc """
Validates a slug.
Matches slugs with:
- only letters
- prefixed/suffixed underscores
- prefixed/suffixed numbers
- single inside dashes
- single/multiple inside underscores
- one character
Prevents slugs with:
- prefixed symbols
- prefixed/suffixed dashes
- multiple consecutive dashes
- single/multiple/multiple consecutive slashes
Also prevents slugs that conflict with reserved routes for either the API or the web.
"""
def validate_slug(changeset, field_name) do
valid_slug_pattern = ~r/\A((?:(?:(?:[^-\W]-?))*)(?:(?:(?:[^-\W]-?))*)\w+)\z/
# Routes for the API – api. subdomain
api_routes = ~w(
api
categories comments contributors connect conversations conversation-parts
donation-goals
email_available
forgot
github-app-installations github-events github-issues github-pull-requests
github-repos
images issues
mentions
messages
notifications
oauth oauth_clients organizations organization-github-app-installations
organization-invites
password ping platform previews projects project-categories project-skills
project-users
refresh repositories reset roles role-skills
skills slugged-route stars stripe stripe-connect-accounts
stripe-connect-plans stripe-connect-subscriptions stripe-platform-cards
stripe-platform-customers
tags tasks task-images task-likes task-lists task-skills
teams token tokens
user-categories user-roles user-skills user-tasks user username_available
users
webhooks
)
# Routes for the web – www. subdomain
web_routes = ~w(
about account admin android app apps
blog
charter contact cookies
developer developers discover donate
engineering enterprise explore
facebook favorites feed followers following
github
help home
integrations invite invitations ios
jobs
learn likes lists log-in log-out login logout
news notifications
popular press pricing privacy projects
search security session sessions settings shop showcases
sign-in sign-out signin signout signup sitemap spotlight start
team terms training trends trust tour twitter
watching
year
)
reserved_routes = api_routes ++ web_routes
changeset
|> Changeset.validate_format(field_name, valid_slug_pattern)
|> Changeset.validate_exclusion(field_name, reserved_routes)
end
end
<|start_filename|>test/lib/code_corps/conn_utils_test.exs<|end_filename|>
defmodule CodeCorps.ConnUtilsTest do
use CodeCorpsWeb.ConnCase
alias CodeCorps.ConnUtils
defp conn_with_ip(ip) do
%Plug.Conn{remote_ip: ip}
end
describe "extract_ip/1" do
test "extracts IP address from the request properly" do
assert conn_with_ip({151, 236, 219, 228}) |> ConnUtils.extract_ip == "192.168.127.12"
end
end
describe "extract_user_agent/1" do
test "extracts User Agent from the request properly", %{conn: conn} do
assert conn |> put_req_header("user-agent", "Some agent") |> ConnUtils.extract_user_agent == "Some agent"
end
end
end
<|start_filename|>test/lib/code_corps/comment/service_test.exs<|end_filename|>
defmodule CodeCorps.Comment.ServiceTest do
use CodeCorps.DbAccessCase
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{
Comment,
GithubComment
}
@base_attrs %{"markdown" => "A test task"}
defp valid_attrs() do
user = insert(:user)
task = insert(:task)
@base_attrs
|> Map.put("user_id", user.id)
|> Map.put("task_id", task.id)
end
describe "create/2" do
test "creates comment" do
{:ok, comment} = valid_attrs() |> Comment.Service.create
assert comment.markdown == @base_attrs["markdown"]
assert comment.body
refute_received({:post, "https://api.github.com/" <> _rest, _body, _headers, _options})
end
test "sets modified_from to 'code_corps'" do
{:ok, comment} = valid_attrs() |> Comment.Service.create
assert comment.modified_from == "code_corps"
end
test "returns errored changeset if attributes are invalid" do
{:error, changeset} = Comment.Service.create(@base_attrs)
refute changeset.valid?
refute Repo.one(Comment)
refute_received({:post, "https://api.github.com/" <> _rest, _body, _headers, _options})
end
test "if comment is assigned a github repo, creates github comment on assigned issue" do
user = insert(:user)
github_repo = insert(:github_repo, github_account_login: "foo", name: "bar")
github_issue = insert(:github_issue, number: 5)
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
{:ok, comment} =
@base_attrs
|> Map.put("task_id", task.id)
|> Map.put("user_id", user.id)
|> Comment.Service.create
assert comment.markdown == @base_attrs["markdown"]
assert comment.body
assert comment.github_comment_id
assert Repo.one(GithubComment)
assert_received({:post, "https://api.github.com/repos/foo/bar/issues/5/comments", _body, _headers, _options})
end
test "if github process fails, returns {:error, :github}" do
user = insert(:user)
github_repo = insert(:github_repo, github_account_login: "foo", name: "bar")
github_issue = insert(:github_issue, number: 5)
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
with_mock_api(CodeCorps.GitHub.FailureAPI) do
assert {:error, :github} ==
@base_attrs
|> Map.put("task_id", task.id)
|> Map.put("user_id", user.id)
|> Comment.Service.create
end
refute Repo.one(Comment)
refute Repo.one(GithubComment)
assert_received({:post, "https://api.github.com/repos/foo/bar/issues/5/comments", _body, _headers, _options})
end
end
describe "update/2" do
@update_attrs %{"markdown" => "bar"}
test "updates comment" do
comment = insert(:comment)
{:ok, updated_comment} = comment |> Comment.Service.update(@update_attrs)
assert updated_comment.id == comment.id
assert updated_comment.markdown == @update_attrs["markdown"]
assert updated_comment.body != comment.body
refute updated_comment.github_comment_id
refute_received({:patch, "https://api.github.com/" <> _rest, _body, _headers, _options})
end
test "sets modified_from to 'code_corps'" do
comment = insert(:comment, modified_from: "github")
{:ok, updated_comment} = comment |> Comment.Service.update(@update_attrs)
assert updated_comment.modified_from == "code_corps"
end
@preloads [task: [github_repo: :github_app_installation]]
test "propagates changes to github if comment is synced to github comment" do
github_repo =
:github_repo
|> insert(github_account_login: "foo", name: "bar")
github_issue = insert(:github_issue, number: 5, github_repo: github_repo)
github_comment = insert(:github_comment, github_id: 6, github_issue: github_issue)
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
comment = insert(:comment, task: task, github_comment: github_comment) |> Repo.preload(@preloads)
{:ok, updated_comment} = comment |> Comment.Service.update(@update_attrs)
assert updated_comment.id == comment.id
assert updated_comment.markdown == @update_attrs["markdown"]
assert updated_comment.body != comment.body
assert updated_comment.github_comment_id == github_comment.id
assert_received({:patch, "https://api.github.com/repos/foo/bar/issues/comments/6", _body, _headers, _options})
end
test "reports {:error, :github}, makes no changes at all if there is a github api error" do
github_repo =
:github_repo
|> insert(github_account_login: "foo", name: "bar")
github_issue = insert(:github_issue, number: 5)
github_comment = insert(:github_comment, github_id: 6)
task = insert(:task, github_issue: github_issue, github_repo: github_repo)
comment = insert(:comment, github_comment: github_comment, task: task)
with_mock_api(CodeCorps.GitHub.FailureAPI) do
assert {:error, :github} == comment |> Comment.Service.update(@update_attrs)
end
updated_comment = Repo.one(Comment)
assert updated_comment.id == comment.id
assert updated_comment.markdown == comment.markdown
assert updated_comment.body == comment.body
assert updated_comment.github_comment_id == github_comment.id
assert_received({:patch, "https://api.github.com/repos/foo/bar/issues/comments/6", _body, _headers, _options})
end
end
end
<|start_filename|>lib/code_corps_web/views/stripe_connect_plan_view.ex<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectPlanView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
attributes [:amount, :created, :id_from_stripe, :inserted_at, :name, :updated_at]
has_one :project, type: "project", field: :project_id
end
<|start_filename|>test/lib/code_corps/policy/conversation_part_test.exs<|end_filename|>
defmodule CodeCorps.Policy.ConversationPartTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.ConversationPart, only: [create?: 2, scope: 2, show?: 2]
alias CodeCorps.{ConversationPart, Repo}
defp params(user, conversation) do
%{
"author_id" => user.id,
"body" => "<PASSWORD>",
"conversation_id" => conversation.id
}
end
describe "scope" do
test "returns all records for admin user" do
insert_list(3, :conversation_part)
user = insert(:user, admin: true)
assert ConversationPart |> scope(user) |> Repo.all |> Enum.count == 3
end
test "returns records where user is the author or they administer the project" do
user = insert(:user, admin: false)
%{project: project_user_applied_to} =
insert(:project_user, user: user, role: "pending")
%{project: project_user_contributes_to} =
insert(:project_user, user: user, role: "contributor")
%{project: project_user_administers} =
insert(:project_user, user: user, role: "admin")
%{project: project_user_owns} =
insert(:project_user, user: user, role: "owner")
message_in_project_applied_to =
insert(:message, project: project_user_applied_to)
message_in_contributing_project =
insert(:message, project: project_user_contributes_to)
message_in_administered_project =
insert(:message, project: project_user_administers)
message_in_owned_project =
insert(:message, project: project_user_owns)
conversation_when_target = insert(:conversation, user: user)
conversation_when_pending =
insert(:conversation, message: message_in_project_applied_to)
conversation_when_contributor =
insert(:conversation, message: message_in_contributing_project)
conversation_when_admin =
insert(:conversation, message: message_in_administered_project)
conversation_when_owner =
insert(:conversation, message: message_in_owned_project)
some_other_conversation = insert(:conversation)
part_in_conversation_when_target =
insert(:conversation_part, conversation: conversation_when_target)
part_in_project_applied_to =
insert(:conversation_part, conversation: conversation_when_pending)
part_in_contributing_project =
insert(:conversation_part, conversation: conversation_when_contributor)
part_in_administered_project =
insert(:conversation_part, conversation: conversation_when_admin)
part_in_owned_project =
insert(:conversation_part, conversation: conversation_when_owner)
part_in_some_other_conversation =
insert(:conversation_part, conversation: some_other_conversation)
part_closed =
insert(:conversation_part, conversation: conversation_when_target, part_type: "closed")
result_ids =
ConversationPart
|> scope(user)
|> Repo.all
|> Enum.map(&Map.get(&1, :id))
assert part_in_conversation_when_target.id in result_ids
refute part_in_project_applied_to.id in result_ids
refute part_in_contributing_project.id in result_ids
assert part_in_administered_project.id in result_ids
assert part_in_owned_project.id in result_ids
refute part_in_some_other_conversation.id in result_ids
refute part_closed.id in result_ids
end
end
describe "create?" do
test "returns true when user is the target" do
user = insert(:user)
message = insert(:message)
conversation = insert(:conversation, message: message, user: user)
params = params(user, conversation)
assert create?(user, params)
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
params = params(user, conversation)
refute create?(user, params)
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
params = params(user, conversation)
refute create?(user, params)
end
test "returns true when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
params = params(user, conversation)
assert create?(user, params)
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
params = params(user, conversation)
assert create?(user, params)
end
end
describe "show?" do
test "returns true when user is the target" do
user = insert(:user)
message = insert(:message)
conversation = insert(:conversation, message: message, user: user)
conversation_part = insert(:conversation_part, conversation: conversation)
assert show?(user, conversation_part)
end
test "returns false when user is a pending project member" do
%{project: project, user: user} = insert(:project_user, role: "pending")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
conversation_part = insert(:conversation_part, conversation: conversation)
refute show?(user, conversation_part)
end
test "returns false when user is a project contributor" do
%{project: project, user: user} = insert(:project_user, role: "contributor")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
conversation_part = insert(:conversation_part, conversation: conversation)
refute show?(user, conversation_part)
end
test "returns true when user is a project admin" do
%{project: project, user: user} = insert(:project_user, role: "admin")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
conversation_part = insert(:conversation_part, conversation: conversation)
assert show?(user, conversation_part)
end
test "returns true when user is project owner" do
%{project: project, user: user} = insert(:project_user, role: "owner")
message = insert(:message, project: project)
conversation = insert(:conversation, message: message)
conversation_part = insert(:conversation_part, conversation: conversation)
assert show?(user, conversation_part)
end
end
end
<|start_filename|>lib/code_corps_web/controllers/role_skill_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.RoleSkillController do
@moduledoc false
use CodeCorpsWeb, :controller
alias CodeCorps.{RoleSkill, User, Helpers.Query}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
with role_skills <- RoleSkill |> Query.id_filter(params) |> Repo.all do
conn |> render("index.json-api", data: role_skills)
end
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %RoleSkill{} = role_skill <- RoleSkill |> Repo.get(id) do
conn |> render("show.json-api", data: role_skill)
end
end
@spec create(Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %RoleSkill{}, params),
{:ok, %RoleSkill{} = role_skill} <- %RoleSkill{} |> RoleSkill.create_changeset(params) |> Repo.insert
do
conn |> put_status(:created) |> render("show.json-api", data: role_skill)
end
end
@spec delete(Conn.t, map) :: Conn.t
def delete(%Conn{} = conn, %{"id" => id} = _params) do
with %RoleSkill{} = role_skill <- RoleSkill |> Repo.get(id),
%User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:delete, role_skill),
{:ok, %RoleSkill{} = _role_skill} <- role_skill |> Repo.delete
do
conn |> Conn.assign(:role_skill, role_skill) |> send_resp(:no_content, "")
end
end
end
<|start_filename|>lib/code_corps/model/organization.ex<|end_filename|>
defmodule CodeCorps.Organization do
@moduledoc """
Represents an organization on Code Corps, e.g. "Code Corps" itself.
"""
use CodeCorps.Model
import CodeCorps.Helpers.RandomIconColor
import CodeCorps.Helpers.Slug
import CodeCorps.Validators.SlugValidator
alias CodeCorps.SluggedRoute
alias Ecto.Changeset
@type t :: %__MODULE__{}
schema "organizations" do
field :approved, :boolean
field :cloudinary_public_id
field :default_color
field :description, :string
field :invite_code, :string, virtual: true
field :name, :string
field :slug, :string
belongs_to :owner, CodeCorps.User
has_one :organization_invite, CodeCorps.OrganizationInvite
has_one :slugged_route, CodeCorps.SluggedRoute
has_one :stripe_connect_account, CodeCorps.StripeConnectAccount
has_many :organization_github_app_installations, CodeCorps.OrganizationGithubAppInstallation
has_many :projects, CodeCorps.Project
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:cloudinary_public_id, :description, :default_color, :name, :slug])
|> validate_required([:description, :name])
end
@doc """
Builds a changeset for creating an organization.
"""
def create_changeset(struct, params) do
struct
|> changeset(params)
|> cast(params, [:invite_code, :owner_id])
|> maybe_generate_slug()
|> validate_required([:cloudinary_public_id, :description, :owner_id, :slug])
|> assoc_constraint(:owner)
|> validate_slug(:slug)
|> unique_constraint(:slug, name: :organizations_lower_slug_index)
|> put_slugged_route()
|> generate_icon_color(:default_color)
|> put_change(:approved, false)
end
@spec update_changeset(struct, map) :: Changeset.t
def update_changeset(struct, params \\ %{}) do
struct
|> changeset(params)
|> cast(params, [:approved])
end
defp maybe_generate_slug(%Changeset{changes: %{slug: _}} = changeset) do
changeset
end
defp maybe_generate_slug(%Changeset{} = changeset) do
changeset |> generate_slug(:name, :slug)
end
defp put_slugged_route(%Changeset{} = changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: %{slug: slug}} ->
slugged_route_changeset = SluggedRoute.create_changeset(%SluggedRoute{}, %{slug: slug})
put_assoc(changeset, :slugged_route, slugged_route_changeset)
_ ->
changeset
end
end
end
<|start_filename|>test/lib/code_corps/github/event/issues/validator_test.exs<|end_filename|>
defmodule CodeCorps.GitHub.Event.Issues.ValidatorTest do
@moduledoc false
use ExUnit.Case, async: true
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.GitHub.Event.Issues.Validator
describe "valid?/1" do
test "returns true for any Issues event fixture" do
assert "issues_opened" |> load_event_fixture() |> Validator.valid?
assert "issues_closed" |> load_event_fixture() |> Validator.valid?
assert "issues_edited" |> load_event_fixture() |> Validator.valid?
assert "issues_reopened" |> load_event_fixture() |> Validator.valid?
end
test "returns false for an unsupported structure" do
refute Validator.valid?("foo")
refute Validator.valid?(%{"foo" => "bar"})
refute Validator.valid?(%{"issue" => %{"bar" => "baz"}})
end
end
end
<|start_filename|>lib/code_corps/github/api/jwt.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.JWT do
@moduledoc """
In charge of loading a a GitHub app .pem and generating a JSON Web Token from
it.
"""
@doc """
Generates a JWT from the GitHub App's generated RSA private key using the
RS256 algo, where the issuer is the GitHub App's ID.
Used to exchange the JWT for an access token for a given integration, or
for the GitHub App itself.
Expires in 5 minutes.
"""
def generate do
signer = rsa_key() |> Joken.rs256()
%{}
|> Joken.token
|> Joken.with_exp(Timex.now |> Timex.shift(minutes: 5) |> Timex.to_unix)
|> Joken.with_iss(app_id())
|> Joken.with_iat(Timex.now |> Timex.to_unix)
|> Joken.with_signer(signer)
|> Joken.sign
|> Joken.get_compact
end
defp rsa_key do
:code_corps
|> Application.get_env(:github_app_pem)
|> JOSE.JWK.from_pem()
end
defp app_id(), do: Application.get_env(:code_corps, :github_app_id)
end
<|start_filename|>test/lib/code_corps_web/controllers/category_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.CategoryControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :category
@valid_attrs %{name: "Technology"}
@invalid_attrs %{name: nil}
describe "index" do
test "lists all entries on index", %{conn: conn} do
[category_1, category_2] = insert_pair(:category)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([category_1.id, category_2.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
category = insert(:category)
conn
|> request_show(category)
|> json_response(200)
|> assert_id_from_response(category.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag authenticated: :admin
test "creates and renders resource when data is valid", %{conn: conn} do
assert conn |> request_create(@valid_attrs) |> json_response(201)
end
@tag authenticated: :admin
test "renders 422 when data is invalid", %{conn: conn} do
assert conn |> request_create(@invalid_attrs) |> json_response(422)
end
test "renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
describe "update" do
@tag authenticated: :admin
test "updates and renders chosen resource when data is valid", %{conn: conn} do
assert conn |> request_update(@valid_attrs) |> json_response(200)
end
@tag authenticated: :admin
test "renders 422 when data is invalid", %{conn: conn} do
assert conn |> request_update(@invalid_attrs) |> json_response(422)
end
test "renders 401 when not authenticated", %{conn: conn} do
assert conn |> request_update |> json_response(401)
end
@tag :authenticated
test "renders 403 when not authorized", %{conn: conn} do
assert conn |> request_update |> json_response(403)
end
@tag authenticated: :admin
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_update(:not_found) |> json_response(404)
end
end
end
<|start_filename|>test/lib/code_corps_web/controllers/skill_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.SkillControllerTest do
use CodeCorpsWeb.ApiCase, resource_name: :skill
@valid_attrs %{
description: "Elixir is a functional, concurrent, general-purpose programming language that runs on the Erlang virtual machine (BEAM).",
original_row: 1,
title: "Elixir"
}
@invalid_attrs %{title: nil}
describe "index" do
test "lists all entries on index", %{conn: conn} do
[skill_1, skill_2] = insert_pair(:skill)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([skill_1.id, skill_2.id])
end
test "filters resources on index", %{conn: conn} do
[skill_1, skill_2 | _] = insert_list(3, :skill)
path = "skills/?filter[id]=#{skill_1.id},#{skill_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([skill_1.id, skill_2.id])
end
test "returns search results on index", %{conn: conn} do
ruby = insert(:skill, title: "Ruby")
rails = insert(:skill, title: "Rails")
insert(:skill, title: "Phoenix")
params = %{"query" => "r"}
path = conn |> skill_path(:index, params)
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([ruby.id, rails.id])
end
test "limit filter limits results on index", %{conn: conn} do
insert_list(6, :skill)
params = %{"limit" => 5}
path = conn |> skill_path(:index, params)
json = conn |> get(path) |> json_response(200)
returned_skills_length = json["data"] |> length
assert returned_skills_length == 5
end
test "lists popular skills", %{conn: conn} do
[skill_1, skill_2] = insert_pair(:skill)
insert(:user_skill, skill: skill_1)
insert_list(2, :user_skill, skill: skill_2)
params = %{"popular" => "true"}
path = conn |> skill_path(:index, params)
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([skill_2.id, skill_1.id])
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
skill = insert(:skill)
conn
|> request_show(skill)
|> json_response(200)
|> assert_id_from_response(skill.id)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
@tag authenticated: :admin
test "creates and renders resource when data is valid", %{conn: conn} do
assert conn |> request_create(@valid_attrs) |> json_response(201)
end
@tag authenticated: :admin
test "does not create resource and renders 422 when data is invalid", %{conn: conn} do
assert conn |> request_create(@invalid_attrs) |> json_response(422)
end
test "does not create resource and renders 401 when unauthenticated", %{conn: conn} do
assert conn |> request_create |> json_response(401)
end
@tag :authenticated
test "does not create resource and renders 403 when not authorized", %{conn: conn} do
assert conn |> request_create |> json_response(403)
end
end
end
<|start_filename|>priv/repo/migrations/20170925230419_add_payload_to_git_hub_events.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.AddPayloadToGitHubEvents do
use Ecto.Migration
def change do
alter table(:github_events) do
add :payload, :map
end
end
end
<|start_filename|>test/lib/code_corps/policy/organization_test.exs<|end_filename|>
defmodule CodeCorps.Policy.OrganizationTest do
use CodeCorps.PolicyCase
import CodeCorps.Policy.Organization, only: [create?: 2, update?: 3]
describe "create" do
test "returns true when user is an admin" do
user = build(:user, admin: true)
assert create?(user, %{})
end
test "returns true when the code is correct" do
user = build(:user, admin: false)
organization_invite = insert(:organization_invite)
params = %{"invite_code" => organization_invite.code}
assert create?(user, params)
end
test "returns false when code is incorrect" do
user = build(:user, admin: false)
insert(:organization_invite)
params = %{"invite_code" => "incorrect"}
refute create?(user, params)
end
test "returns false when code is correct but is associated with an organization" do
user = build(:user, admin: false)
organization = insert(:organization);
organization_invite = build(:organization_invite, organization: organization)
params = %{"invite_code" => organization_invite.code}
refute create?(user, params)
end
end
describe "update" do
test "returns true when user is an admin" do
user = insert(:user, admin: true)
organization = insert(:organization)
assert update?(user, organization, %{})
end
test "returns false when user is approving as the admin" do
user = insert(:user, admin: true)
organization = build(:organization, owner_id: user.id)
assert update?(user, organization, %{"approved" => "true"})
end
test "returns false when user is approving as the organization owner" do
user = insert(:user)
organization = build(:organization, owner_id: user.id)
assert update?(user, organization, %{"approved" => "true"})
end
test "returns true when user is the organization owner" do
user = insert(:user)
organization = build(:organization, owner_id: user.id)
assert update?(user, organization, %{})
end
test "returns false when user is not the organization owner" do
user = insert(:user)
organization = build(:organization)
refute update?(user, organization, %{})
end
end
end
<|start_filename|>lib/code_corps/github/api/installation.ex<|end_filename|>
defmodule CodeCorps.GitHub.API.Installation do
@moduledoc """
Functions for performing installation actions on the GitHub API.
"""
alias CodeCorps.{
GitHub,
GithubAppInstallation,
Repo
}
alias Ecto.Changeset
@doc """
List repositories that are accessible to the authenticated installation.
All pages of records are retrieved.
https://developer.github.com/v3/apps/installations/#list-repositories
"""
@spec repositories(GithubAppInstallation.t) :: {:ok, list(map)} | {:error, GitHub.paginated_endpoint_error}
def repositories(%GithubAppInstallation{} = installation) do
with {:ok, access_token} <- installation |> get_access_token(),
{:ok, responses} <- access_token |> fetch_repositories() do
{:ok, responses |> extract_repositories}
else
{:error, error} -> {:error, error}
end
end
@spec fetch_repositories(String.t) :: {:ok, list(map)} | {:error, GitHub.paginated_endpoint_error}
defp fetch_repositories(access_token) do
"installation/repositories"
|> GitHub.get_all(%{}, [access_token: access_token, params: [per_page: 100]])
end
@spec extract_repositories(list(map)) :: list(map)
defp extract_repositories(responses) do
responses
|> Enum.map(&Map.get(&1, "repositories"))
|> List.flatten
end
@doc """
Get the access token for the installation.
Returns either the current access token stored in the database because
it has not yet expired, or makes a request to the GitHub API for a new
access token using the GitHub App's JWT.
https://developer.github.com/apps/building-integrations/setting-up-and-registering-github-apps/about-authentication-options-for-github-apps/#authenticating-as-an-installation
"""
@spec get_access_token(GithubAppInstallation.t) :: {:ok, String.t} | {:error, GitHub.api_error_struct} | {:error, Changeset.t}
def get_access_token(%GithubAppInstallation{access_token: token, access_token_expires_at: expires_at} = installation) do
case token_expired?(expires_at) do
true -> installation |> refresh_token()
false -> {:ok, token} # return the existing token
end
end
@doc """
Refreshes the access token for the installation.
Makes a request to the GitHub API for a new access token using the GitHub
App's JWT.
https://developer.github.com/apps/building-integrations/setting-up-and-registering-github-apps/about-authentication-options-for-github-apps/#authenticating-as-an-installation
"""
@spec refresh_token(GithubAppInstallation.t) :: {:ok, String.t} | {:error, GitHub.api_error_struct} | {:error, Changeset.t}
def refresh_token(%GithubAppInstallation{github_id: installation_id} = installation) do
endpoint = "installations/#{installation_id}/access_tokens"
with {:ok, %{"token" => token, "expires_at" => expires_at}} <-
GitHub.integration_request(:post, endpoint, %{}, %{}, []),
{:ok, %GithubAppInstallation{}} <-
update_token(installation, token, expires_at)
do
{:ok, token}
else
{:error, error} -> {:error, error}
end
end
@spec update_token(GithubAppInstallation.t, String.t, String.t) :: {:ok, GithubAppInstallation.t} | {:error, Changeset.t}
defp update_token(%GithubAppInstallation{} = installation, token, expires_at) do
installation
|> GithubAppInstallation.access_token_changeset(%{access_token: token, access_token_expires_at: expires_at})
|> Repo.update
end
@doc false
@spec token_expired?(String.t | DateTime.t | nil) :: true | false
def token_expired?(expires_at) when is_binary(expires_at) do
expires_at
|> Timex.parse!("{ISO:Extended:Z}")
|> token_expired?()
end
def token_expired?(%DateTime{} = expires_at) do
Timex.before?(expires_at, Timex.now)
end
def token_expired?(nil), do: true
end
<|start_filename|>lib/code_corps/policy/comment.ex<|end_filename|>
defmodule CodeCorps.Policy.Comment do
@moduledoc ~S"""
Authorization policies for performing actions on `Comment` records
"""
alias CodeCorps.{Comment, User}
def create?(%User{id: user_id}, %{"user_id" => author_id})
when user_id == author_id and not is_nil(user_id), do: true
def create?(%User{}, %{}), do: false
def update?(%User{id: user_id}, %Comment{user_id: author_id})
when user_id == author_id and not is_nil(user_id), do: true
def update?(%User{}, %Comment{}), do: false
end
<|start_filename|>test/lib/code_corps/cloudex/cloudinary_url_test.exs<|end_filename|>
defmodule CodeCorps.Cloudex.CloudinaryUrlTest do
alias CodeCorps.Cloudex.CloudinaryUrl
use ExUnit.Case, async: true
test "calls Cloudex.Url.for with correct arguments" do
expected_url = "https://placehold.it/100x100"
url = CloudinaryUrl.for(:test_public_id, %{height: 100, width: 100}, nil, nil, nil)
assert expected_url == url
end
test "call Cloudex.Url.for insert https://" do
expected_url = "https://placehold.it/100x100"
url = CloudinaryUrl.for("//placehold.it/100x100", %{height: 100, width: 100}, nil, nil, nil)
assert expected_url == url
end
test "returns correct url if called without public_id" do
expected_url = "#{Application.get_env(:code_corps, :asset_host)}/icons/type1_default_version1_color1.png"
url = CloudinaryUrl.for(nil, %{}, "version1", "color1", "type1")
assert expected_url == url
end
end
<|start_filename|>test/lib/code_corps_web/views/stripe_connect_subscription_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripeConnectSubscriptionViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly" do
project = insert(:project)
plan = insert(:stripe_connect_plan, project: project)
user = insert(:user)
subscription = insert(:stripe_connect_subscription, stripe_connect_plan: plan, user: user)
subscription = CodeCorpsWeb.StripeConnectSubscriptionController.preload(subscription)
rendered_json = render(CodeCorpsWeb.StripeConnectSubscriptionView, "show.json-api", data: subscription)
expected_json = %{
"data" => %{
"attributes" => %{
"inserted-at" => subscription.inserted_at,
"quantity" => subscription.quantity,
"updated-at" => subscription.updated_at
},
"id" => subscription.id |> Integer.to_string,
"relationships" => %{
"project" => %{
"data" => %{"id" => project.id |> Integer.to_string, "type" => "project"}
},
"user" => %{
"data" => %{"id" => user.id |> Integer.to_string, "type" => "user"}
}
},
"type" => "stripe-connect-subscription",
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>test/lib/code_corps/emails/project_user_acceptance_email_test.exs<|end_filename|>
defmodule CodeCorps.Emails.ProjectUserAcceptanceEmailTest do
use CodeCorps.ModelCase
use Bamboo.Test
alias CodeCorps.Emails.ProjectUserAcceptanceEmail
test "acceptance email works" do
%{project: project, user: user} = project_user = insert(:project_user)
email = ProjectUserAcceptanceEmail.create(project_user)
assert email.from == "Code Corps<<EMAIL>>"
assert email.to == user.email
template_model = email.private.template_model
assert template_model == %{
project_title: project.title,
project_url: "http://localhost:4200/#{project.organization.slug}/#{project.slug}",
project_logo_url: "#{Application.get_env(:code_corps, :asset_host)}/icons/project_default_large_.png",
user_image_url: "#{Application.get_env(:code_corps, :asset_host)}/icons/user_default_large_.png",
user_first_name: user.first_name,
subject: "#{project.title} just added you as a contributor"
}
end
end
<|start_filename|>lib/code_corps_web/views/role_skill_view.ex<|end_filename|>
defmodule CodeCorpsWeb.RoleSkillView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
has_one :role, type: "role", field: :role_id
has_one :skill, type: "skill", field: :skill_id
end
<|start_filename|>test/lib/code_corps_web/views/slugged_route_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.SluggedRouteViewTest do
use CodeCorpsWeb.ViewCase
test "renders all attributes and relationships properly for organization" do
organization = insert(:organization)
slugged_route = insert(:slugged_route, organization: organization)
rendered_json = render(CodeCorpsWeb.SluggedRouteView, "show.json-api", data: slugged_route)
expected_json = %{
"data" => %{
"id" => slugged_route.id |> Integer.to_string,
"type" => "slugged-route",
"attributes" => %{
"inserted-at" => slugged_route.inserted_at,
"slug" => slugged_route.slug,
"updated-at" => slugged_route.updated_at,
},
"relationships" => %{
"organization" => %{
"data" => %{"id" => slugged_route.organization_id |> Integer.to_string, "type" => "organization"}
},
"user" => %{
"data" => nil
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
test "renders all attributes and relationships properly for user" do
user = insert(:user)
slugged_route = insert(:slugged_route, user: user)
rendered_json = render(CodeCorpsWeb.SluggedRouteView, "show.json-api", data: slugged_route)
expected_json = %{
"data" => %{
"id" => slugged_route.id |> Integer.to_string,
"type" => "slugged-route",
"attributes" => %{
"inserted-at" => slugged_route.inserted_at,
"slug" => slugged_route.slug,
"updated-at" => slugged_route.updated_at,
},
"relationships" => %{
"organization" => %{
"data" => nil
},
"user" => %{
"data" => %{"id" => slugged_route.user_id |> Integer.to_string, "type" => "user"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps_web/views/changeset_view.ex<|end_filename|>
defmodule CodeCorpsWeb.ChangesetView do
@moduledoc false
use CodeCorpsWeb, :view
use JaSerializer.PhoenixView
import CodeCorpsWeb.Gettext
alias Ecto.Changeset
alias JaSerializer.Formatter.Utils
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
`CodeCorpsWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(%Ecto.Changeset{} = changeset) do
errors =
changeset
|> Changeset.traverse_errors(&translate_error/1)
|> format_errors()
errors
end
defp format_errors(errors) do
errors
|> Map.keys
|> Enum.map(fn(attribute) -> format_attribute_errors(errors, attribute) end)
|> Enum.flat_map(fn(error) -> error end)
end
defp format_attribute_errors(errors, attribute) do
errors
|> Map.get(attribute)
|> Enum.map(&create_error(attribute, &1))
end
def create_error(attribute, message) do
%{
detail: format_detail(attribute, message),
title: message,
source: %{
pointer: "data/attributes/#{Utils.format_key(attribute)}"
},
status: "422"
}
end
def render("422.json", %{changeset: changeset}) do
# When encoded, the changeset returns its errors
# as a JSON object. So we just pass it forward.
%{
errors: translate_errors(changeset),
jsonapi: %{
version: "1.0"
}
}
end
defp format_detail(attribute, message) do
"#{attribute |> Utils.humanize |> translate_attribute} #{message}"
end
defp translate_attribute("Cloudinary public"), do: dgettext("errors", "Cloudinary public")
defp translate_attribute("Github"), do: dgettext("errors", "Github")
defp translate_attribute("Slug"), do: dgettext("errors", "Slug")
defp translate_attribute(attribute), do: attribute
end
<|start_filename|>priv/repo/migrations/20171016223356_create_github_pull_requests.exs<|end_filename|>
defmodule CodeCorps.Repo.Migrations.CreateGithubPullRequests do
use Ecto.Migration
def change do
create table(:github_pull_requests) do
add :additions, :integer
add :body, :text
add :changed_files, :integer
add :closed_at, :utc_datetime
add :comments, :integer
add :comments_url, :text
add :commits, :integer
add :commits_url, :text
add :deletions, :integer
add :diff_url, :text
add :github_created_at, :utc_datetime
add :github_id, :integer
add :github_updated_at, :utc_datetime
add :html_url, :text
add :issue_url, :text
add :locked, :boolean, default: false, null: false
add :merge_commit_sha, :text
add :mergeable_state, :text
add :merged, :boolean, default: false, null: false
add :merged_at, :utc_datetime
add :number, :integer
add :patch_url, :text
add :review_comment_url, :text
add :review_comments, :integer
add :review_comments_url, :text
add :state, :string
add :statuses_url, :text
add :title, :text
add :url, :text
timestamps()
add :github_repo_id, references(:github_repos)
end
create unique_index(:github_pull_requests, [:github_id])
end
end
<|start_filename|>lib/code_corps/model/stripe_connect_charge.ex<|end_filename|>
defmodule CodeCorps.StripeConnectCharge do
use CodeCorps.Model
@type t :: %__MODULE__{}
schema "stripe_connect_charges" do
field :amount, :integer
field :amount_refunded, :integer
field :application_id_from_stripe, :string
field :application_fee_id_from_stripe, :string
field :balance_transaction_id_from_stripe, :string
field :captured, :boolean
field :created, :integer
field :currency, :string
field :customer_id_from_stripe, :string
field :description, :string
field :failure_code, :string
field :failure_message, :string
field :id_from_stripe, :string, null: false
field :invoice_id_from_stripe, :string
field :paid, :boolean
field :refunded, :boolean
field :review_id_from_stripe, :string
field :source_transfer_id_from_stripe, :string
field :statement_descriptor, :string
field :status, :string
belongs_to :stripe_connect_account, CodeCorps.StripeConnectAccount
belongs_to :stripe_connect_customer, CodeCorps.StripeConnectCustomer
belongs_to :user, CodeCorps.User
timestamps()
end
@create_attributes [
# attributes
:amount, :amount_refunded, :application_id_from_stripe,
:application_fee_id_from_stripe, :balance_transaction_id_from_stripe,
:captured, :created, :currency, :customer_id_from_stripe, :description,
:failure_code, :failure_message, :id_from_stripe, :invoice_id_from_stripe,
:paid, :refunded, :review_id_from_stripe, :source_transfer_id_from_stripe,
:statement_descriptor, :status,
# association ids
:stripe_connect_account_id, :stripe_connect_customer_id, :user_id
]
@required_attributes [
:id_from_stripe, :stripe_connect_account_id, :stripe_connect_customer_id, :user_id
]
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, @create_attributes)
|> validate_required(@required_attributes)
|> assoc_constraint(:stripe_connect_account)
|> assoc_constraint(:stripe_connect_customer)
|> assoc_constraint(:user)
|> unique_constraint(:id_from_stripe)
end
end
<|start_filename|>lib/code_corps_web/controllers/github_app_installation_controller.ex<|end_filename|>
defmodule CodeCorpsWeb.GithubAppInstallationController do
@moduledoc false
use CodeCorpsWeb, :controller
import CodeCorps.Helpers.Query, only: [id_filter: 2]
alias CodeCorps.{Analytics.SegmentTracker, GithubAppInstallation, User}
action_fallback CodeCorpsWeb.FallbackController
plug CodeCorpsWeb.Plug.DataToAttributes
plug CodeCorpsWeb.Plug.IdsToIntegers
@spec index(Conn.t, map) :: Conn.t
def index(%Conn{} = conn, %{} = params) do
installations =
GithubAppInstallation
|> id_filter(params)
|> Repo.all()
|> preload()
conn |> render("index.json-api", data: installations)
end
@spec show(Conn.t, map) :: Conn.t
def show(%Conn{} = conn, %{"id" => id}) do
with %GithubAppInstallation{} = installation <- GithubAppInstallation |> Repo.get(id) |> preload() do
conn |> render("show.json-api", data: installation)
end
end
@spec create(Plug.Conn.t, map) :: Conn.t
def create(%Conn{} = conn, %{} = params) do
with %User{} = current_user <- conn |> CodeCorps.Guardian.Plug.current_resource,
{:ok, :authorized} <- current_user |> Policy.authorize(:create, %GithubAppInstallation{}, params),
{:ok, %GithubAppInstallation{} = installation} <- %GithubAppInstallation{} |> GithubAppInstallation.create_changeset(params) |> Repo.insert,
installation <- preload(installation)
do
current_user |> track_created(installation)
conn |> put_status(:created) |> render("show.json-api", data: installation)
end
end
@preloads [:github_repos, :organization_github_app_installations]
def preload(data) do
Repo.preload(data, @preloads)
end
@spec track_created(User.t, GithubAppInstallation.t) :: any
defp track_created(%User{id: user_id}, %GithubAppInstallation{} = installation) do
user_id |> SegmentTracker.track("Created GitHub App Installation", installation)
end
end
<|start_filename|>test/lib/code_corps_web/controllers/stripe_platform_events_controller_test.exs<|end_filename|>
defmodule CodeCorpsWeb.StripePlatformEventsControllerTest do
use CodeCorpsWeb.ConnCase
alias CodeCorps.StripeEvent
setup do
conn =
%{build_conn() | host: "api."}
|> put_req_header("accept", "application/json")
|> put_req_header("content-type", "application/json")
{:ok, conn: conn}
end
test "responds with 200 when the event will be processed", %{conn: conn} do
event = %{"id" => "evt_123", "livemode" => false, "type" => "any.event"}
path = conn |> stripe_platform_events_path(:create)
assert conn |> post(path, event) |> response(200)
assert StripeEvent |> Repo.aggregate(:count, :id) == 1
end
# TODO: The following two can be merged into one and actual environment matching behavior
# can be added to the EnvironmentFilter test module
#
# TODO: Can also probably move the supervisor stuff to the webhook processor module test
# (the group of tests which will eventually test async behavior)
test "returns 400, does nothing if event is livemode and env is not :prod", %{conn: conn} do
Application.put_env(:code_corps, :stripe_env, :other)
event = %{"id" => "evt_123", "livemode" => true, "type" => "any.event"}
path = conn |> stripe_platform_events_path(:create)
assert conn |> post(path, event) |> response(400)
assert StripeEvent |> Repo.aggregate(:count, :id) == 0
# put env back to original state
Application.put_env(:code_corps, :stripe_env, :test)
end
test "returns 400, does nothing if event is not livemode and env is :prod", %{conn: conn} do
Application.put_env(:code_corps, :stripe_env, :prod)
event = %{"id" => "evt_123", "livemode" => false, "type" => "any.event"}
path = conn |> stripe_platform_events_path(:create)
assert conn |> post(path, event) |> response(400)
assert StripeEvent |> Repo.aggregate(:count, :id) == 0
# put env back to original state
Application.put_env(:code_corps, :stripe_env, :test)
end
end
<|start_filename|>lib/code_corps/policy/organization.ex<|end_filename|>
defmodule CodeCorps.Policy.Organization do
@moduledoc ~S"""
Authorization policies for performing actions on `Organization` records
"""
import CodeCorps.Policy.Helpers, only: [owned_by?: 2]
import Ecto.Query
alias CodeCorps.{Organization, OrganizationInvite, Repo, User}
@doc ~S"""
Returns a boolean indicating if the specified user is allowed to create the
organization specified by a map of attributes.
"""
@spec create?(User.t, map) :: boolean
def create?(%User{admin: true}, %{}), do: true
def create?(%User{}, %{"invite_code" => invite_code}) do
case invite_code |> get_invite() do
nil -> false
_invite -> true
end
end
def create?(%User{}, %{}), do: false
@doc ~S"""
Returns a boolean indicating if the specified user is allowed to update the
specified organization.
"""
@spec update?(User.t, Organization.t, map) :: boolean
def update?(%User{admin: true}, %Organization{}, %{}), do: true
def update?(%User{}, %Organization{}, %{"approved" => true}), do: false
def update?(%User{} = user, %Organization{} = organization, %{}), do: organization |> owned_by?(user)
@spec get_invite(String.t) :: OrganizationInvite.t | nil
defp get_invite(code) do
OrganizationInvite
|> where([oi], is_nil(oi.organization_id))
|> Repo.get_by(code: code)
end
end
<|start_filename|>test/lib/code_corps_web/views/message_view_test.exs<|end_filename|>
defmodule CodeCorpsWeb.MessageViewTest do
use CodeCorpsWeb.ViewCase
alias CodeCorps.Repo
test "renders all attributes and relationships properly" do
project = insert(:project)
user = insert(:user)
message = insert(:message, author: user, project: project)
conversation = insert(:conversation, message: message)
rendered_json = render(CodeCorpsWeb.MessageView, "show.json-api", data: message |> Repo.preload(:conversations))
expected_json = %{
"data" => %{
"id" => message.id |> Integer.to_string,
"type" => "message",
"attributes" => %{
"body" => message.body,
"initiated-by" => message.initiated_by,
"inserted-at" => message.inserted_at,
"subject" => message.subject,
"updated-at" => message.updated_at
},
"relationships" => %{
"author" => %{
"data" => %{"id" => message.author_id |> Integer.to_string, "type" => "user"}
},
"conversations" => %{
"data" => [%{"id" => conversation.id |> Integer.to_string, "type" => "conversation"}]
},
"project" => %{
"data" => %{"id" => message.project_id |> Integer.to_string, "type" => "project"}
}
}
},
"jsonapi" => %{
"version" => "1.0"
}
}
assert rendered_json == expected_json
end
end
<|start_filename|>lib/code_corps/policy/conversation_part.ex<|end_filename|>
defmodule CodeCorps.Policy.ConversationPart do
@moduledoc ~S"""
Handles `CodeCorps.User` authorization of actions on `CodeCorps.Conversation`
records.
"""
import CodeCorps.Policy.Helpers,
only: [
administered_by?: 2, get_conversation: 1, get_message: 1, get_project: 1
]
import Ecto.Query
alias CodeCorps.{Conversation, ConversationPart, Policy, Repo, User}
@spec scope(Ecto.Queryable.t, User.t) :: Ecto.Queryable.t
def scope(queryable, %User{admin: true}), do: queryable
def scope(queryable, %User{id: id} = current_user) do
scoped_conversation_ids =
Conversation
|> Policy.Conversation.scope(current_user)
|> select([c], c.id)
|> Repo.all()
queryable
|> where(author_id: ^id)
|> or_where([cp], cp.conversation_id in ^scoped_conversation_ids)
|> where(part_type: "comment")
end
def create?(%User{} = user, %{"conversation_id" => _} = params) do
authorize(user, params)
end
def create?(_, _), do: false
def show?(%User{} = user, %ConversationPart{conversation_id: _} = part) do
authorize(user, part)
end
def show?(_, _), do: false
@spec authorize(User.t, ConversationPart.t | map) :: boolean
defp authorize(%User{} = user, attrs) do
%Conversation{} = conversation = attrs |> get_conversation()
is_target? = conversation |> conversation_target?(user)
is_admin? =
conversation
|> get_message()
|> get_project()
|> administered_by?(user)
is_target? or is_admin?
end
defp conversation_target?(%Conversation{user_id: target_id}, %User{id: user_id}) do
target_id == user_id
end
end
| fikape/code-corps-api |
<|start_filename|>app/components/AR.js<|end_filename|>
import React, { Component, StyleSheet, Dimensions, View } from 'react-native';
import Camera from 'react-native-camera';
import WebViewBridge from 'react-native-webview-bridge';
import THREE_RENDER_MARKER from '../lib/threejs/marker.js';
import THREE_RENDER_TEXT from '../lib/threejs/text.js';
import HANDLE_ORIENTATION from '../lib/orientation/orientationHandler.js';
import Location from '../lib/orientation/locationMath.js';
import * as geoAction from '../lib/orientation/utils';
import _ from 'underscore';
const REF_WEBVIEW_BRIDGE = 'webviewbridge';
const WEBVIEW_STYLE = `
* {
color: white;
margin: 0;
padding: 0;
font: 62.5% arial, sans-serif;
background: transparent;
}
html, body {
width: 100%;
height: 100%;
overflow: hidden;
}
.direction-marker {
position: fixed;
width: 30px;
height: 100vh;
}
.left {
z-index: 1;
float: left;
left: 0;
background: linear-gradient(to right, rgba(29,147,145,1) 0%,rgba(125,185,232,0) 100%);
}
.right {
z-index: 1;
float: right;
right: 0;
background: linear-gradient(to left, rgba(29,147,145,1) 0%,rgba(125,185,232,0) 100%);
}
.hidden {
display: none;
}
`;
const WEBVIEW_SCRIPTS = `
<script src="http://code.jquery.com/jquery-1.10.2.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r74/three.min.js"></script>
${ THREE_RENDER_MARKER }
${ THREE_RENDER_TEXT }
${ HANDLE_ORIENTATION }
`;
const HTML = `
<!DOCTYPE html>\n
<html>
<head>
<title>findAR WebView</title>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="viewport" content="width=320, user-scalable=no">
<style type="text/css">
${ WEBVIEW_STYLE }
</style>
</head>
<body>
<div class="direction-marker left hidden"></div>
<div class="direction-marker right hidden"></div>
<p id="alpha"></p>
<p id="target"></p>
${ WEBVIEW_SCRIPTS }
</body>
</html>
`;
const BRIDGE_INJECT_SCRIPT = `
var targetLocIdx = 0;
var targetPinId;
function webViewBridgeReady(cb) {
//checks whether WebViewBridge exists in global scope.
if (window.WebViewBridge) {
cb(window.WebViewBridge);
return;
}
function handler() {
//remove the handler from listener since we don't need it anymore
document.removeEventListener('WebViewBridge', handler, false);
//pass the WebViewBridge object to the callback
cb(window.WebViewBridge);
}
//if WebViewBridge doesn't exist in global scope attach itself to document
//event system. Once the code is being injected by extension, the handler will
//be called.
document.addEventListener('WebViewBridge', handler, false);
}
webViewBridgeReady( function (webViewBridge) {
webViewBridge.send( "BRIDGE_READY" );
webViewBridge.onMessage = function (message) {
// Message is an array of all of the pins we want to display,
// where x and z on each pin is the relative location to the
// device in feet.
var message = JSON.parse( message );
mesh.visible = false;
if( message.targetPinId !== targetPinId ) {
targetPinId = message.targetPinId;
// TODO: Color targeted pin differently
}
message.locs.forEach( function( loc, i ) {
if( !( meshes[i] instanceof THREE.Mesh ) ) {
meshes[i] = mesh.clone();
meshes[i].visible = true;
scene.add( meshes[i] );
}
// TODO: instantiate a new text model
if( !( textmodels[i] instanceof THREE.Mesh ) ) {
textmodels[i] = createTextModel( loc.title );
textmodels[i].visible = true;
scene.add( textmodels[i] );
}
textmodels[i].position.y = -8;
textmodels[i].lookAt( new THREE.Vector3( 0, 0, 0 ) );
textmodels[i].position.x = loc.x;
textmodels[i].position.z = loc.z;
meshes[i].title = loc.title;
meshes[i].position.x = loc.x;
meshes[i].position.z = loc.z;
if( loc.id === targetPinId ) {
targetLocIdx = i;
}
});
// TODO: Delete any meshes in indices greater than or equal to locs.length;
};
});
`;
export default class AR extends Component {
constructor(props) {
super(props);
}
componentDidMount() {
var self = this;
this.watchID = geoAction.setWatch((loc) =>{
self.sendLocsToBridge.call(self, loc);
})
}
componentWillUnmount(){
geoAction.clearWatch(this.watchID)
}
calculateLocs( currentLocation, objectOfPins ) {
var locs = [];
// For each pin in the array of pins,
// Calculate the relative x and z ( where -x is west, x is east, -z is north, and z is south )
// Each unit being a foot.
_.each( objectOfPins, function( pin ) {
locs.push( Location.relativeLocsInFeet( currentLocation, pin ) );
});
return locs;
}
sendLocsToBridge( coordinates ) {
let message = {}
const { pins, targetPin } = this.props;
message.targetPinId = targetPin.id;
message.locs = this.calculateLocs( coordinates, pins );
this.refs.webviewbridge.sendToBridge( JSON.stringify( message ) );
}
onBridgeMessage( message ) {
if( message === "BRIDGE_READY" ) {
var self = this;
geoAction.getCurrent((loc)=>{
self.sendLocsToBridge.call(self, loc);
});
}
}
render() {
return (
<View
style={styles.container}
>
<Camera
ref={(cam) => {
this.camera = cam;
}}
captureQuality={ 'low' }
style={styles.preview}
aspect={Camera.constants.Aspect.fill}>
</Camera>
<View style={styles.webviewcont}>
<WebViewBridge
ref={ REF_WEBVIEW_BRIDGE }
automaticallyAdjustContentInsets={true}
source={{ html: HTML }}
style={styles.webView}
onBridgeMessage={this.onBridgeMessage.bind(this)}
injectedJavaScript={ BRIDGE_INJECT_SCRIPT }
javaScriptEnabled={true}
scalesPageToFit={true}
/>
</View>
</View>
)
}
}
let styles = StyleSheet.create({
container: {
flex: 1,
},
preview: {
flex: 1,
flexDirection: 'column',
alignItems: 'stretch',
justifyContent: 'flex-end',
alignItems: 'center',
},
webviewcont: {
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
backgroundColor: 'transparent'
},
webView: {
backgroundColor: 'transparent'
},
});
<|start_filename|>app/reducers/rootReducer.js<|end_filename|>
import { combineReducers } from 'redux';
import pins from './reducer_pins';
import recent from './reducer_recent';
import user from './reducer_user';
import friends from './reducer_friends';
import targetPin from './reducer_target';
const rootReducer = combineReducers({
pins,
recent,
user,
friends,
targetPin,
});
export default rootReducer;
<|start_filename|>app/components/PinListItem.js<|end_filename|>
import React, {
Component,
Text,
TouchableHighlight,
View,
StyleSheet,
AlertIOS,
Image
} from 'react-native';
import { Actions } from 'react-native-router-flux';
import Location from '../lib/orientation/locationMath';
export default class PinListItem extends Component {
constructor(props) {
super(props);
}
touchOptions() {
const { pin, friends, deletePin, setTarget, redraw, shareWithFriend } = this.props;
AlertIOS.prompt(
pin.title,
'('+pin.longitude + ', ' + pin.latitude + ')',
[{
text: 'Cancel',
style: 'cancel'
},
{
text: 'Edit Title',
onPress: this.editTitle.bind(this)
},
{
text: 'Share',
onPress: () => { Actions.friends({ onPress: shareWithFriend.bind( null, pin ), friends: friends }) },
},
{
text: 'Set Target',
onPress: () => {
setTarget(pin);
redraw();
},
},
{
text: 'Delete',
onPress: () => {
deletePin(pin);
}
}],
'plain-text'
);
}
editTitle(value) {
const { pin, updatePins, updateRecent } = this.props;
updatePins(pin, value);
updateRecent();
}
render() {
const { pin, targetPin, currLoc } = this.props;
let name = 'Your Pin';
let isTarget = pin.id === targetPin.id;
let relative = Location.relativeLocsInFeet( currLoc, pin );
let distance = Math.sqrt( Math.pow( relative.x, 2 ) + Math.pow( relative.z, 2 ) ).toFixed(0);
if ( distance > 5280 ) {
distance /= 5280;
distance = Math.floor( distance );
distance += ' mi.'
} else {
distance += ' ft.';
}
if( pin.friend ) {
name = pin.friend.name;
}
return (
<TouchableHighlight
onPress={() => {
this.touchOptions()
}}
>
<View style={[style.container, pin.friend && style.friend, isTarget && style.target]}>
<Image
source={require('../assets/listviewPin.png')}
style={style.pin}
/>
<View style={style.left}>
<Text style={[style.text, pin.friend && style.friendText, isTarget && style.targetText]}>
{pin.title}
</Text>
<Text style={[style.friendName, isTarget && style.targetText]}>
{name}
</Text>
</View>
<View style={style.right}>
<Text style={[style.distance,isTarget && style.targetText]}>
{distance}
</Text>
</View>
</View>
</TouchableHighlight>
);
}
}
const style = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'row',
backgroundColor: 'white',
borderRadius: 4,
margin: 4,
padding: 6,
},
left: {
flex: 5,
marginLeft: 10,
alignSelf: 'flex-start',
},
right: {
flex: 2,
alignSelf: 'center',
justifyContent: 'flex-end',
},
text: {
alignSelf: 'flex-start',
fontSize: 22,
},
friend: {
backgroundColor: 'lightblue',
},
friendName: {
justifyContent: 'flex-start',
},
target: {
backgroundColor: 'pink',
borderWidth: 2,
borderColor: 'black',
},
targetText: {
color: 'black',
},
distance: {
fontSize: 19,
fontStyle: 'italic',
},
pin: {
flex: 1,
alignSelf: 'center',
height: 50,
}
});
<|start_filename|>app/containers/container_viewContainer.js<|end_filename|>
import React, { Component, PropTypes, StyleSheet, Text, View } from 'react-native';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import getLocationToSave from '../actions/action_dropNewPin';
import updatePins from '../actions/action_updatePins';
import deletePin from '../actions/action_deletePin';
import updateRecent from '../actions/action_updateRecent';
import clearTarget from '../actions/action_setTarget';
import setTarget from '../actions/action_setTarget';
import ViewContainer from '../components/ViewContainer';
function mapStateToProps(state) {
return {
pins: state.pins,
recent: state.recent,
friends: state.friends,
user: state.user,
targetPin: state.targetPin,
};
}
export default connect(mapStateToProps, { getLocationToSave, updatePins, deletePin, updateRecent, setTarget, clearTarget })(ViewContainer);
<|start_filename|>app/actions/action_updatePins.js<|end_filename|>
import { UPDATE_PINS, SET_TARGET } from '../constants/constants.js';
import { userData } from '../lib/db/db.js';
import { Alert } from 'react-native';
function updatePins (payload) {
return {
type: UPDATE_PINS,
payload
};
}
function setTarget(payload) {
return {
type: SET_TARGET,
payload
};
}
export default function(pin, newTitle) {
if(arguments.length === 2) {
pin.title = newTitle;
userData.child(pin.id).set(pin);
}
return (dispatch) => {
//this listens to new things added
userData.on("child_added", function(snap) {
var sharedPin = snap.val();
if (sharedPin.alertedYet !== null && sharedPin.alertedYet === false) {
var message = sharedPin.friend.name + " shared a pin with you!";
//if user choose to show shared pin
var targetRecentlyShared = {};
targetRecentlyShared.id = sharedPin.id;
targetRecentlyShared.longitude = sharedPin.longitude;
targetRecentlyShared.latitude = sharedPin.latitude;
Alert.alert(message, null,
[
{text: 'Show me shared pin!', onPress: () => dispatch(setTarget(targetRecentlyShared))},
{text: 'OK', onPress: () => console.log('OK Pressed')},
]);
//once alerted, yet alertedYet to true so it doesn't alert again
userData.child(sharedPin.id).update({alertedYet: true});
}
});
userData.on("value", function(snap) {
dispatch(updatePins(snap.val()));
});
};
}
<|start_filename|>app/lib/threejs/text.js<|end_filename|>
const THREE_RENDER_TEXT = `
<script src="http://mrdoob.github.com/three.js/examples/fonts/helvetiker_regular.typeface.js"></script>
<script>
var textmodels = [];
var createTextModel = function( text ) {
var canvas = document.createElement('canvas');
canvas.width = 2000;
canvas.height = 400;
var curve = 75;
var context = canvas.getContext('2d');
context.font = "Bold 300px Helvetica";
context.textAlign = "center";
context.fillStyle = "rgba( 0, 0, 0, 0.5 )";
context.beginPath();
context.moveTo( 0, curve );
context.lineTo( 0, canvas.height-curve );
context.quadraticCurveTo( 0, canvas.height, curve, canvas.height);
context.lineTo( canvas.width-curve, canvas.height );
context.quadraticCurveTo( canvas.width, canvas.height, canvas.width, canvas.height-curve );
context.lineTo( canvas.width, curve );
context.quadraticCurveTo( canvas.width, 0, canvas.width-curve, 0);
context.lineTo( curve, 0 );
context.quadraticCurveTo( 0, 0, 0, curve);
context.fill();
context.fillStyle = "rgba( 0, 0, 0, 1.0 )";
context.fillText( text, canvas.width/2+10, canvas.height*2/3+10 );
context.fillStyle = "rgba( 255, 255, 255, 1.0 )";
context.fillText( text, canvas.width/2, canvas.height*2/3 );
var texture = new THREE.Texture( canvas );
texture.needsUpdate = true;
var material = new THREE.MeshBasicMaterial( { map: texture, side: THREE.DoubleSide } );
return new THREE.Mesh(
new THREE.PlaneGeometry( 50, 10 ),
material
);
}
</script>
`;
export default THREE_RENDER_TEXT;
<|start_filename|>app/constants/constants.js<|end_filename|>
export const DROP_NEW_PIN = 'DROP_NEW_PIN';
export const UPDATE_PINS = 'UPDATE_PINS';
export const DELETE_PIN = 'DELETE_PIN';
export const UPDATE_RECENT = 'UPDATE_RECENT';
export const SET_RECENT = 'SET_RECENT';
export const LOG_IN = 'LOG_IN';
export const LOG_OUT = 'LOG_OUT';
export const UPDATE_FRIENDS = 'UPDATE_FRIENDS';
export const SET_TARGET = 'SET_TARGET';
export const CLEAR_TARGET = 'CLEAR_TARGET';
<|start_filename|>app/reducers/reducer_pins.js<|end_filename|>
import { DROP_NEW_PIN, UPDATE_PINS, DELETE_PIN } from '../constants/constants.js';
const initialState = {};
export default function(state = initialState, action) {
switch(action.type) {
case UPDATE_PINS:
return Object.assign({},
action.payload
);
case DROP_NEW_PIN:
let newPin = {};
newPin[action.id] = action.payload;
return Object.assign({}, state,
newPin
);
case DELETE_PIN:
let id = action.payload.id;
// create copy of state
let deletedPinState = Object.assign({}, state);
// create copy of reference to pin we want to delete
deletedPinState[id] = Object.assign({}, state[id]);
// delete pin
delete deletedPinState[id];
return deletedPinState;
default:
return state;
}
}
<|start_filename|>index.ios.js<|end_filename|>
'use strict';
import React, {
AppRegistry,
Component,
StyleSheet,
Text,
View
} from 'react-native';
import { Provider } from 'react-redux';
import { createStore, applyMiddleware } from 'redux';
import rootReducer from './app/reducers/rootReducer.js';
import ViewContainer from './app/containers/container_viewContainer';
import thunk from 'redux-thunk';
import createLogger from 'redux-logger';
import promise from 'redux-promise';
import Signin from './app/containers/container_FBLogin';
import FriendList from './app/components/FriendList';
import { Router, Scene, Actions} from 'react-native-router-flux';
//creates logger
const logger = createLogger();
// creates store
const store = createStore(
rootReducer,
applyMiddleware(thunk, promise, logger)
);
const findAR = () => (
<Provider store={store}>
<Router scenes={scenes} />
</Provider>
);
const scenes = Actions.create(
<Scene key="root" hideNavBar>
<Scene initial key="login" component={Signin} />
<Scene key="view" component={ViewContainer} type="replace" />
<Scene key="friends" component={FriendList} />
</Scene>
);
AppRegistry.registerComponent('findAR', () => findAR);
<|start_filename|>app/components/Map.js<|end_filename|>
import React, {
Component,
StyleSheet,
View,
Dimensions,
AlertIOS,
Text,
Image
} from 'react-native';
import Button from 'react-native-button';
import MapView from 'react-native-maps';
import _ from 'underscore';
import baseImg from '../assets/redPin.png';
import targetImg from '../assets/blackPin.png';
import { PinCallout } from './PinCallout';
import PinEditButton from './PinEditButton';
import * as geoAction from '../lib/orientation/utils';
import { myCurrLoc, currLoc } from '../lib/db/db';
export default class Map extends Component {
constructor(props) {
super(props);
this.state = {
selectedPin: undefined,
dropPinLocation: undefined,
loaded: false,
friendLocs: {},
};
}
componentWillMount() {
const { friends } = this.props;
let self = this;
let counter = 0;
for(var friendId in friends) {
self.setListener(friends[friendId]);
counter++;
if(counter === Object.keys(friends).length) {
this.setState({loaded: true});
}
}
}
componentDidMount() {
geoAction.getCurrent((loc)=>{
this.refs.map.animateToRegion(loc, 100);
});
}
componentWillUpdate(nextProps) {
const {targetPin} = nextProps;
if(this.props.targetPin.id !== targetPin.id) {
if(targetPin.longitude) {
this.goToTarget.call(this, targetPin);
}
}
}
setListener(friend) {
let self = this;
// sets a firebase listener on each friend
currLoc.child(friend.id).on("value", function(snap) {
// updates friend's location in state as they move
let friendObj ={};
friendObj[friend.id] = snap.val();
friendObj = Object.assign({}, self.state.friendLocs, friendObj);
self.setState({
friendLocs: friendObj
});
});
}
setPinTitle(title) {
const { getLocationToSave, recent } = this.props;
getLocationToSave(this.state.dropPinLocation, recent, title);
this.setState({dropPinLocation: undefined});
}
dropPin(coordinate) {
this.setState({dropPinLocation: coordinate});
AlertIOS.prompt(
'Drop a Pin?',
'Enter title:',
[{
text: 'Cancel',
style: 'cancel'
},
{
text: 'OK',
onPress: this.setPinTitle.bind(this)
}],
'plain-text'
);
}
moveMapToUser() {
var self = this;
geoAction.getCurrent((loc) =>{
self.refs.map.animateToRegion(loc, 100);
});
}
goToTarget(targetPin){
let goTo= Object.assign({}, targetPin, {
latitudeDelta: 0.005,
longitudeDelta: 0.005
});
this.refs.map.animateToRegion(goTo, 100);
}
renderMarkers() {
const { pins, targetPin } = this.props;
return _.map(pins, (pinObject, key) => {
let image = baseImg;
if ( key === targetPin.id ) {
image = targetImg;
}
return (
<MapView.Marker
key={key}
coordinate={{latitude: pinObject.latitude, longitude: pinObject.longitude}}
onSelect={() => this.setState({ selectedPin: pinObject })}
onDeselect={() => this.setState({ selectedPin: undefined })}
>
<Image source={image} />
<MapView.Callout tooltip>
<PinCallout>
<Text style={{ color: 'black', alignSelf:'center', fontSize:16 }}>{pinObject.title}</Text>
</PinCallout>
</MapView.Callout>
</MapView.Marker>
);
});
}
renderFriends() {
const { friends } = this.props;
let copy = this.state.friendLocs;
// renders friends current locations
return _.map(copy, (coords, id) => {
return (
<MapView.Marker
coordinate={coords}
key={id}
title={friends[id].name}
>
<Image
source={{uri: friends[id].picture}}
style={styles.icon}
/>
</MapView.Marker>
);
}) ;
}
renderEditButton() {
const { friends, updatePins, updateRecent, deletePin, setTarget, targetPin, shareWithFriend } = this.props;
return (
<View style={styles.editButton}>
<PinEditButton
pin={this.state.selectedPin}
friends={friends}
shareWithFriend={shareWithFriend}
updatePins={updatePins}
updateRecent={updateRecent}
deletePin={deletePin}
setTarget={setTarget}
targetPin={targetPin}
hideButton={() => this.setState({selectedPin: undefined})}
/>
</View>
);
}
render() {
const { pins, getLocationToSave, recent, targetPin, friends } = this.props;
const { stateLocation } = this.state;
return (
<View style={styles.container}>
<MapView
ref="map"
showsUserLocation={true}
initialRegion={stateLocation}
style={styles.map}
showsCompass={true}
onLongPress={ (e) => {
let coords = e.nativeEvent.coordinate;
this.dropPin(coords);
}
}
>
{ Object.keys(pins).length !== 0 ? this.renderMarkers.call(this) : void 0 }
{ this.state.loaded === true ? this.renderFriends.call(this) : void 0 }
</MapView>
{ this.state.selectedPin ? this.renderEditButton.call(this) : void 0 }
<View style={styles.centerButton}>
<Button
style={[styles.bubble, styles.button]}
onPress={this.moveMapToUser.bind(this)}>
Center on me!
</Button>
</View>
</View>
)
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
},
map: {
flex: 1,
},
editButton: {
position: 'absolute',
backgroundColor: 'transparent',
left: Dimensions.get('window').width/2 - 75,
bottom: 100,
},
centerButton: {
position: 'absolute',
backgroundColor: 'transparent',
left: Dimensions.get('window').width/2 - 100,
bottom: 50,
borderRadius: 10,
},
bubble: {
backgroundColor: 'rgba(255,255,255,0.7)',
paddingHorizontal: 15,
paddingVertical: 10,
borderRadius: 10,
},
button: {
width: 200,
alignItems: 'center',
},
icon: {
width: 36,
height: 36,
borderRadius: 18,
backgroundColor: 'transparent',
},
});
<|start_filename|>app/actions/action_setTarget.js<|end_filename|>
import { SET_TARGET, CLEAR_TARGET } from '../constants/constants.js';
function setTarget(payload) {
return {
type: SET_TARGET,
payload
};
}
function clearTarget() {
return {
type: CLEAR_TARGET,
};
}
export default function(pin) {
return (dispatch) => {
dispatch(setTarget(pin));
};
}
<|start_filename|>app/components/ViewContainer.js<|end_filename|>
import React, { Component, View, StyleSheet } from 'react-native';
import AR from './AR';
import Map from './Map';
import DropNewPinButton from '../containers/container_dropNewPin';
import PinList from './PinList';
import Button from 'react-native-button';
import { ref, myCurrLoc } from '../lib/db/db';
const styles = StyleSheet.create({
ViewMenu: {
position: 'absolute',
top: 25,
right: 25,
flexDirection: 'row',
},
ViewButton: {
paddingVertical: 10,
paddingHorizontal: 12,
marginRight: 1,
color: '#fff',
fontWeight: 'bold',
backgroundColor: '#2e8b7d',
},
});
export default class ViewContainer extends Component {
constructor(props) {
super(props);
this.state = {
view: 'map'
};
}
componentWillMount() {
const { updatePins, updateRecent } = this.props;
updatePins();
updateRecent();
}
toggleView(view) {
this.setState({ view });
}
shareWithFriend( pin, friend ) {
const { user } = this.props;
if( typeof user.id !== 'string' ) {
console.log( 'shareWithFriend: user id must be a string' );
return null;
}
if( typeof friend.id !== 'string' ) {
console.log( 'shareWithFriend: friend id must be a string' );
return null;
}
if( typeof pin !== 'object' ) {
console.log( 'shareWithFriend: pin must be an object' );
return null;
}
if( typeof pin.id !== 'string' ) {
console.log( 'shareWithFriend: pin id must be a string' );
return null;
}
// Make a copy of the pin
var pinCopy = Object.assign({}, {alertedYet: false} ,pin);
// Set pin.friend to the userID of the person sending the pin
pinCopy.friend = user;
// Post the pin to the friend's firebase.
var friendPin = ref.child( friend.id ).child( 'pins' ).child( pin.id );
friendPin.set( pinCopy );
return true;
}
render() {
const { pins, recent, friends, user, targetPin, getLocationToSave, updatePins, updateRecent, deletePin, setTarget, clearTarget } = this.props;
return (
<View style={{flex: 1}}>
{ this.state.view === 'ar' ? <AR pins={pins} targetPin={targetPin} /> : void 0 }
{ this.state.view === 'map' ? <Map
shareWithFriend={this.shareWithFriend.bind(this)}
getLocationToSave={getLocationToSave}
// initialLoc={this.state.initialLoc}
pins = {pins}
recent = {recent}
updatePins={updatePins}
updateRecent={updateRecent}
deletePin={deletePin}
friends={friends}
targetPin={targetPin}
setTarget={setTarget}
clearTarget={clearTarget}
/> : void 0 }
{ this.state.view === 'list' ? <PinList
shareWithFriend={this.shareWithFriend.bind(this)}
deletePin={deletePin}
updatePins={updatePins}
updateRecent={updateRecent}
pins={pins}
friends={friends}
user={user}
targetPin={targetPin}
setTarget={setTarget}
/> : void 0 }
<View style={styles.ViewMenu}>
{ this.state.view != 'ar' ? <Button
style={styles.ViewButton}
onPress={this.toggleView.bind(this, 'ar')}
>
AR
</Button> : void 0 }
{ this.state.view != 'map' ? <Button
style={styles.ViewButton}
onPress={this.toggleView.bind(this, 'map')}
>
Map
</Button> : void 0 }
{ this.state.view != 'list' ? <Button
style={styles.ViewButton}
onPress={this.toggleView.bind(this, 'list')}
>
List
</Button> : void 0 }
</View>
<DropNewPinButton/>
</View>
);
}
}
<|start_filename|>app/actions/action_deletePin.js<|end_filename|>
import { UPDATE_RECENT, DELETE_PIN } from '../constants/constants.js';
import { userData, userRecent } from '../lib/db/db.js';
function deletePin(selectedPin) {
return {
type: DELETE_PIN,
payload: selectedPin
};
};
function updateRecent(payload) {
return {
type: UPDATE_RECENT,
payload
}
}
function deleteRecentPin(selectedPin, dispatch) {
let newRecent;
userRecent.once("value", (snapshot) => {
let recents = snapshot.val(), index;
for(var i = 0; i < recents.length; i++) {
if(recents[i] === selectedPin.id) {
index = i;
}
}
// delete only if the deleted pin is also in the recent pins
if (index) {
recents.splice(index, 1);
userRecent.set(recents);
}
dispatch(updateRecent(recents));
});
}
export default function (pin) {
userData.child(pin.id).remove();
return (dispatch) => {
dispatch(deletePin(pin));
deleteRecentPin(pin, dispatch);
};
}
<|start_filename|>app/lib/orientation/locationMath.js<|end_filename|>
let Location = {};
Location.EARTH_RADIUS_IN_FEET = 5280 * 3961; // feet times miles
Location.SF_MAGNETIC
Location.locDegreesToKilometers = function ( degrees ) {
const kmPerNinety = 10000/90;
// 10,000 km per 90 degrees
return degrees * kmPerNinety;
}
Location.locDegreesToFeet = function ( degrees ) {
const ftPerKm = 3280.4;
// 3280.4 feet per kilometer
var km = this.locDegreesToKilometers( degrees );
return ftPerKm * km;
}
Location.haversine = function ( start, end, R ) {
var dlon = end.longitude - start.longitude;
var dlat = end.latitude - start.latitude;
// a = sin(dlat/2)^2 + cos(sLat) * cos( eLat ) * (sin(dlon/2))^2
var a = Math.pow( Math.sin( dlat/2 ), 2 ) + Math.cos( start.latitude ) * Math.cos( end.latitude ) * Math.pow( Math.sin( dlon/2 ), 2 );
// c = 2 * atan2( sqrt(a), sqrt(1-a) )
var c = 2 * Math.atan2( Math.sqrt( a ), Math.sqrt( 1-a ) );
// d = R * c where R is the radius of the earth.
var d = R * c;
return d;
}
Location.haversineFeet = function ( start, end ) {
return this.haversine( start, end, this.EARTH_RADIUS_IN_FEET );
}
Location.pythag = function( start, end ) {
var a = end.longitude - start.longitude;
var b = end.latitude - start.latitude;
var c = Math.sqrt( Math.pow( a, 2 ) + Math.pow( b, 2 ) );
return this.locDegreesToFeet( c );
}
Location.bearing = function( x, z ) {
return Math.atan2( z, x );
}
Location.relativeLocsInFeet = function ( start, end ) {
const title = end.title || undefined;
const id = end.id || undefined;
const z = -1 * this.locDegreesToFeet( end.latitude - start.latitude ).toFixed(1);
const x = this.locDegreesToFeet( end.longitude - start.longitude ).toFixed(1);
return { id, title, x, z };
}
export default Location;
<|start_filename|>app/actions/action_user.js<|end_filename|>
import { LOG_IN, LOG_OUT, UPDATE_FRIENDS} from '../constants/constants.js';
import { ref } from '../lib/db/db.js';
import { Actions } from 'react-native-router-flux';
import _ from 'underscore';
export const logIn = (payload) => {
return {
type: LOG_IN,
payload
};
};
export const updateFriends = (payload) => {
return{
type: UPDATE_FRIENDS,
payload
};
};
//we have array of all friends but fb does not give photo on initial call, so we have another
//helper function that gets each users friends photo
const generateFriends = (friends, token, callback) =>{
let friendsObj = {};
let counter = 0;
_.each(friends, function(friend, index) {
let photoquery = "https://graph.facebook.com/v2.3/"+friend.id+"?fields=picture,name&access_token="+token;
fetch(photoquery)
.then((response) => response.json())
.then(function(responseData) {
const {id, name} = responseData;
let friendInfo = { id, name };
friendInfo.picture = responseData.picture.data.url;
friendsObj[id] = friendInfo;
counter++;
//when done iterating through object
if(counter === Object.keys(friends).length){
callback(friendsObj);
}
});
});
};
export const firebase_check = (userCredentials) => {
let {userId, token} = userCredentials;
let api = "https://graph.facebook.com/v2.3/"+userId+"?fields=name,email,friends,picture&access_token="+token;
let friendcall = "https://graph.facebook.com/v2.3/"+userId+"?fields=name,friends&access_token="+token;
let friendsObj = {};
const checkIfUserExists = (userId, callback) => {
ref.once('value', function(snapshot) {
let userExistsBool = snapshot.hasChild(userId);
callback(userExistsBool);
});
};
return(dispatch) => {
checkIfUserExists(userId, (userExist) => {
if(!userExist) {
let userInfo={};
userInfo.id = userId;
//fetch the other info
return fetch(api)
.then((response) => response.json())
.then((responseData)=> {
userInfo.name = responseData.name;
userInfo.email = responseData.email;
userInfo.picture = responseData.picture.data.url;
//pushes all gathereed infor to database
let newUser = ref.child(userId).set(userInfo);
//generates friends for new user
generateFriends(responseData.friends.data, token, (allFriends)=> {
dispatch(updateFriends(allFriends));
});
//logs new user in
dispatch(logIn(userInfo));
});
} else {
//this logs in user in redux state based on their existing db info
ref.child(userId).on("value", function(snapshot) {
let found = snapshot.val();
const { id, name, email, picture } = found;
let obj = {name, email, id, picture};
dispatch(logIn(obj));
});
//this api call to fb updates friends list
return fetch(friendcall)
.then((response) => response.json())
.then((responseData) => {
let friends = responseData.friends.data;
generateFriends(friends, token, (allFriends) => {
dispatch(updateFriends(allFriends));
});
});
}
});
};
};
export const logOut = () => {
return {
type: LOG_OUT
};
};
<|start_filename|>app/actions/action_updateRecent.js<|end_filename|>
import { UPDATE_RECENT } from '../constants/constants.js';
import { userRecent } from '../lib/db/db.js';
function updateRecent(payload) {
return {
type: UPDATE_RECENT,
payload
};
}
export default function() {
return (dispatch) => {
userRecent.once("value", function(snap) {
dispatch(updateRecent(snap.val()));
});
};
}
<|start_filename|>app/components/FriendList.js<|end_filename|>
import React, {
Component,
View,
ListView,
StyleSheet,
Text
} from 'react-native';
import Button from 'react-native-button';
import { Actions } from 'react-native-router-flux';
import FriendListItem from './FriendListItem';
export default class FriendList extends Component {
constructor(props) {
super(props);
this.state = {
// create the data source
dataSource: new ListView.DataSource({
rowHasChanged: (r1, r2) => r1 !== r2
})
};
}
componentWillReceiveProps(nextProps) {
this.setState({
dataSource: this.state.dataSource.cloneWithRows(nextProps.friends)
});
}
componentWillMount() {
this.setState({
dataSource: this.state.dataSource.cloneWithRows(this.props.friends)
});
}
renderItem(friend) {
const { onPress } = this.props;
return (
// pass down pin info to FriendListItem
<FriendListItem
onPress={onPress}
friend={friend}
/>
);
}
render() {
return (
<View style={style.container}>
<View style={style.status}>
<Button
style={style.button}
onPress={ () => { Actions.pop() }}
>Back
</Button>
<View style={style.title}>
<Text style={style.text}>Friends</Text>
</View>
<View style={{flex:1}}></View>
</View>
<ListView
style={style.list}
dataSource={this.state.dataSource}
renderRow={this.renderItem.bind(this)}
/>
</View>
);
}
}
const style = StyleSheet.create({
container: {
flex: 1,
},
list: {
flex: 9,
},
status: {
flex: 1,
alignSelf: 'stretch',
alignItems: 'center',
flexDirection: 'row',
backgroundColor: 'white',
borderRadius: 5,
},
title: {
flex: 4,
alignItems: 'center',
marginTop: 20,
},
text: {
fontSize: 26,
},
button: {
flex: 1,
marginTop: 28,
marginLeft: 15,
alignItems: 'center',
}
})
<|start_filename|>app/lib/orientation/orientationHandler.js<|end_filename|>
const HANDLE_ORIENTATION = `
<script>
var degreeToRad = function( degree ) {
return Math.PI / 180 * degree;
}
var leftOrRight = function( A, B ) {
return ( -1 * A.x * B.z ) + ( A.z * B.x ) > 0;
}
var angleFromCamera = function( vector ) {
return camera.getWorldDirection().angleTo( vector );
}
var inFrustum = function( vector ) {
return frustum.containsPoint( vector );
}
var getDistanceFromCamera = function( vector ) {
return Math.sqrt( Math.pow( vector.x, 2 ) + Math.pow( vector.z, 2 ) );
}
var renderDirection = function( vector ) {
$(".left").addClass( "hidden" );
$(".right").addClass( "hidden" );
if( !inFrustum( vector ) ) {
if( leftOrRight( camera.getWorldDirection(), vector ) ) {
$(".left").removeClass( "hidden" );
} else {
$(".right").removeClass( "hidden" );
}
}
}
var updateHUDForTarget = function( targetLoc ) {
renderDirection( targetLoc.position );
$("#target").text( targetLoc.title + ": " + getDistanceFromCamera( targetLoc.position ).toFixed(0) + " feet" );
}
if (window.DeviceOrientationEvent) {
// Listen for the deviceorientation event and handle the raw data
window.addEventListener('deviceorientation', function( e ) {
var compassdir;
if( e.webkitCompassHeading) {
// Apple works only with this, alpha doesn't work
compassdir = e.webkitCompassHeading + window.orientation;
}
else compassdir = e.alpha;
// Set camera's heading
camera.rotation.y = -degreeToRad( compassdir);
// Configure frustum
camera.updateMatrix(); // make sure camera's local matrix is updated
camera.updateMatrixWorld(); // make sure camera's world matrix is updated
camera.matrixWorldInverse.getInverse( camera.matrixWorld );
frustum.setFromMatrix( new THREE.Matrix4().multiply( camera.projectionMatrix, camera.matrixWorldInverse ) );
$("#alpha").text( "Heading: " + compassdir );
updateHUDForTarget( meshes[targetLocIdx] );
}, false );
}
</script>
`;
export default HANDLE_ORIENTATION;
<|start_filename|>app/components/PinEditButton.js<|end_filename|>
import React, { Component, AlertIOS , View, StyleSheet} from 'react-native';
import Button from 'react-native-button';
import { Actions } from 'react-native-router-flux';
export default class PinEditButton extends Component{
constructor(props) {
super(props);
}
editTitle(value) {
const { pin, updatePins, updateRecent } = this.props;
updatePins(pin, value);
updateRecent();
}
render() {
const { pin, deletePin, friends, hideButton, setTarget, shareWithFriend } = this.props;
return(
<Button
style={[styles.bubble, styles.button]}
onPress={ () => {
hideButton();
AlertIOS.prompt(
pin.title,
'Editing Pin',
[{
text: 'Cancel',
style: 'cancel',
},
{
text: 'Edit Title',
onPress: this.editTitle.bind(this)
},
{
text: 'Share',
onPress: () => { Actions.friends({ onPress: shareWithFriend.bind( null, pin ), friends: friends }) },
},
{
text: 'Set Target',
onPress: () => {
setTarget(pin)
},
},
{
text: 'Delete',
onPress: () => {
deletePin(pin);
}
}],
'plain-text'
)}}>
EDIT PIN</Button>
)
}
}
const styles = StyleSheet.create({
bubble: {
backgroundColor: 'rgba(255,255,255,0.7)',
paddingHorizontal: 15,
paddingVertical: 10,
borderRadius: 10,
},
button: {
width: 150,
alignItems: 'center',
},
})
<|start_filename|>app/components/PinList.js<|end_filename|>
import React, {
Component,
View,
Text,
ListView,
StyleSheet
} from 'react-native';
import PinListItem from './PinListItem.js';
import * as geoAction from '../lib/orientation/utils';
export default class PinList extends Component {
constructor(props) {
super(props);
this.state = {
// create the data source
currLoc: {
latitude: 37.7835551,
longitude: -122.4089013,
},
dataSource: new ListView.DataSource({
rowHasChanged: (r1, r2) => r1 !== r2
})
};
}
componentWillReceiveProps(nextProps) {
this.setState({
dataSource: this.state.dataSource.cloneWithRows(nextProps.pins)
});
}
componentWillMount() {
var self = this;
geoAction.getCurrent((loc) => {
self.setState({
currLoc: loc
});
});
this.watchID = geoAction.setWatch((loc)=> {
self.setState({
currLoc: loc
});
self.redraw();
});
this.setState({
dataSource: this.state.dataSource.cloneWithRows(this.props.pins)
});
}
componentWillUnmount() {
geoAction.clearWatch(this.watchID);
}
redraw() {
let newPins = {};
for( var key in this.props.pins ) {
newPins[key] = Object.assign({}, this.props.pins[key]);
}
this.setState({
dataSource: this.state.dataSource.cloneWithRows(newPins)
});
}
renderItem(pin) {
const { updatePins, updateRecent, deletePin, setTarget, targetPin, shareWithFriend, friends, user } = this.props;
return (
// pass down pin info to PinListItem
<PinListItem
shareWithFriend={shareWithFriend}
updatePins={updatePins}
updateRecent={updateRecent}
deletePin={deletePin}
targetPin={targetPin}
setTarget={setTarget}
currLoc={this.state.currLoc}
redraw={this.redraw.bind(this)}
pin={pin}
friends={friends}
user={user}
/>
);
}
render() {
return (
<View style={style.container}>
<View style={style.status}>
<View style={style.title}>
<Text style={style.text}>My Pins</Text>
</View>
</View>
<ListView
style={style.list}
dataSource={this.state.dataSource}
renderRow={this.renderItem.bind(this)}
/>
</View>
);
}
}
const style = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'column',
},
status: {
flex: 1,
alignSelf: 'stretch',
alignItems: 'center',
backgroundColor: 'white',
flexDirection: 'column',
borderRadius: 5,
},
title: {
flex: 1,
flexDirection: 'row',
alignSelf: 'center',
marginTop: 20,
},
text: {
fontSize: 26,
alignSelf: 'center',
color: '#2e8b7d',
fontWeight: 'bold',
},
list: {
flex: 9,
}
})
<|start_filename|>app/lib/db/db.js<|end_filename|>
const Firebase = require('firebase');
export const ref = new Firebase("https://interruptedlobster.firebaseio.com/");
export const currLoc = ref.child('currLoc');
export let myCurrLoc = currLoc.child('anonymous');
export let user = ref.child('anonymous');
export let userData = user.child('pins');
export let userRecent = user.child('recent');
export const changeUser = function( uid ) {
if( typeof uid !== 'string' ) {
console.error( 'uid must have typeof string in changeUser( uid )' );
return;
}
user = ref.child( uid );
myCurrLoc = currLoc.child( uid );
userData = user.child( 'pins' );
userRecent = user.child( 'recent' );
}
<|start_filename|>app/lib/orientation/utils.js<|end_filename|>
import React from 'react-native';
export const getCurrent = (callback) => {
navigator.geolocation.getCurrentPosition(
(position) => {
var coords = {};
coords.longitude = position.coords.longitude;
coords.latitude = position.coords.latitude;
coords.longitudeDelta = 0.005;
coords.latitudeDelta = 0.005;
callback(coords);
},
(error) => {
alert(error.message);
},
{enableHighAccuracy: true, timeout: 20000, maximumAge: 1000}
);
};
export const setWatch = (callback) => {
return navigator.geolocation.watchPosition(
(position) => {
var coords = {};
coords.longitude = position.coords.longitude;
coords.latitude = position.coords.latitude;
coords.longitudeDelta = 0.005;
coords.latitudeDelta = 0.005;
callback(coords);
}
);
};
export const clearWatch = (watchID) => {
navigator.geolocation.clearWatch(
watchID
);
};
<|start_filename|>app/actions/action_dropNewPin.js<|end_filename|>
import { DROP_NEW_PIN, UPDATE_RECENT, SET_RECENT} from '../constants/constants';
import { userData, userRecent } from '../lib/db/db';
import setTarget from './action_setTarget';
import * as geoAction from '../lib/orientation/utils';
function dropNewPin(payload, id) {
return {
type: DROP_NEW_PIN,
id,
payload
};
}
function setRecent(payload) {
return {
type: SET_RECENT,
payload
};
}
function checkRecent(current = [], id) {
var updated;
if(!Array.isArray(current)) {
return [id];
}
if(current.length >= 10) {
updated = current.slice(1);
updated.push(id);
return updated;
} else {
current = current.concat(id);
return current;
}
}
export default function getLocationToSave(location, current, pinTitle) {
function getLocationHelper(loc, title, dispatch){
let recent;
loc.title = title;
// this assigns pushedObj to added pin object while adding to db
let pushedObj = userData.push(loc);
loc.id = pushedObj.key();
// this adds the 'key' as a key-value pair in the pin object
pushedObj.update({"id": pushedObj.key()});
dispatch(dropNewPin(loc, pushedObj.key()));
// set the target to the most recently dropped pin
dispatch(setTarget(loc));
// this updates the recent pins state
recent = checkRecent(current, loc.id);
dispatch(setRecent(recent));
userRecent.set(recent);
}
return (dispatch) => {
if(!location) {
geoAction.getCurrent((loc)=>{
getLocationHelper(loc, "My Current Location", dispatch);
});
} else {
if(pinTitle === '') {
pinTitle = 'New Totem';
}
getLocationHelper(location, pinTitle, dispatch );
}
};
}
<|start_filename|>app/components/PinCallout.js<|end_filename|>
import React, {
StyleSheet,
View,
Text
} from 'react-native';
export const PinCallout = React.createClass({
render() {
return (
<View style={styles.container}>
<View style={styles.bubble}>
<View style={styles.amount}>
{this.props.children}
</View>
</View>
</View>
);
},
});
var styles = StyleSheet.create({
container: {
flexDirection: 'column',
alignSelf: 'flex-start',
},
bubble: {
width: 140,
flexDirection: 'row',
alignSelf: 'flex-start',
backgroundColor: 'white',
opacity: 0.8,
paddingHorizontal: 8,
paddingVertical: 1,
borderRadius: 4,
borderColor: 'black',
borderWidth: 0.3,
},
amount: {
flex: 0.5,
},
});
<|start_filename|>app/lib/threejs/marker.js<|end_filename|>
const THREE_RENDER_MARKER = `
<script>
var camera, scene, renderer;
var frustum;
var meshes = [];
var mesh;
var hemiLight;
var fovPortrait = 53;
var fovLandscape = 37.5;
init();
animate();
function init() {
/* CAMERA */
camera = new THREE.PerspectiveCamera( fovPortrait, window.innerWidth / window.innerHeight, 1, 5280 );
frustum = new THREE.Frustum();
scene = new THREE.Scene();
/* LIGHTS */
hemiLight = new THREE.HemisphereLight( 0x2194ce, 0x2194ce, 1.25 );
hemiLight.groundColor.setHSL( 0.6, 1, 0.6 );
hemiLight.color.setHSL( 0.095, 1, 1.0 );
hemiLight.position.set( 0, 500, 0 );
scene.add( hemiLight );
/* MARKER */
var geometry = new THREE.Geometry();
var height = 25;
var heightsplit = .75
var width = 10;
geometry.vertices.push(
new THREE.Vector3( 0, 0, 0 ),
new THREE.Vector3( width/2, height * heightsplit, 0 ),
new THREE.Vector3( 0, height * heightsplit, -1 * width/2 ),
new THREE.Vector3( -1 * width/2, height * heightsplit, 0 ),
new THREE.Vector3( 0, height * heightsplit, width/2 ),
new THREE.Vector3( 0, height, 0 )
);
geometry.faces.push(
new THREE.Face3( 0, 1, 2 ),
new THREE.Face3( 0, 2, 3 ),
new THREE.Face3( 0, 3, 4 ),
new THREE.Face3( 4, 1, 0 ),
new THREE.Face3( 5, 2, 1 ),
new THREE.Face3( 5, 3, 2 ),
new THREE.Face3( 4, 3, 5 ),
new THREE.Face3( 1, 4, 5 )
);
geometry.computeFaceNormals();
geometry.computeVertexNormals();
geometry.computeBoundingBox();
var material = new THREE.MeshPhongMaterial( { specular: 0x111111, color: 0xffffff, shininess: 100, specular: 100, reflectivity: 100, shading: THREE.FlatShading, side: THREE.FrontSide } );
mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
mesh.position.z = -50;
/* RENDER SCENE */
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
// Adjust the aspect ratio for the new size of the window.
camera.aspect = window.innerWidth / window.innerHeight;
// TODO: Calculate FOV programatically
camera.fov = camera.aspect > 1 ? fovLandscape : fovPortrait;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
// Add some spin for each mesh
meshes.forEach( function( mesh ) {
mesh.rotation.y += 0.01;
})
renderer.render( scene, camera );
}
</script>
`;
export default THREE_RENDER_MARKER;
<|start_filename|>app/components/FriendListItem.js<|end_filename|>
import React, {
Component,
Image,
Text,
TouchableHighlight,
View,
StyleSheet
} from 'react-native';
import { Actions } from 'react-native-router-flux';
export default class FriendListItem extends Component {
constructor(props) {
super(props);
}
render() {
const { friend } = this.props;
return (
<TouchableHighlight
onPress={() => {
this.props.onPress( friend );
// TODO: And then make the friend list go away.
Actions.pop();
}}
>
<View style={style.container}>
<Image
source={{ uri: friend.picture }}
style={style.picture}
/>
<Text style={style.text}>
{friend.name}
</Text>
</View>
</TouchableHighlight>
);
}
}
const style = StyleSheet.create({
container: {
flex: 1,
backgroundColor: 'white',
borderRadius: 4,
margin: 4,
padding: 6,
flexDirection: 'row',
},
text: {
alignSelf: 'center',
fontSize: 24,
left: 15,
},
picture: {
height: 50,
width: 50,
left: 5,
borderRadius: 5,
alignSelf: 'flex-start',
}
});
<|start_filename|>app/containers/container_FBLogin.js<|end_filename|>
import React, {
Component,
StyleSheet,
Image,
Text,
View,
} from 'react-native';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import * as userActions from '../actions/action_user';
import { Actions } from 'react-native-router-flux';
import { changeUser } from '../lib/db/db';
import FBLogin from 'react-native-facebook-login';
class LogIn extends Component {
constructor(props){
super(props);
}
render() {
const { user, action } = this.props;
return (
<View style={styles.loginContainer}>
<FBLogin style={{ marginBottom: 10, }}
permissions={["email","user_friends"]}
onLogin={function(data){
//upon successful log in to fb
changeUser(data.credentials.userId)
Actions.view();
action.firebase_check(data.credentials);
}}
onLogout={function(){
action.logOut();
}}
onLoginFound={function(data){
//if user was already signed in this will exec
changeUser(data.credentials.userId)
Actions.view();
action.firebase_check(data.credentials);
}}
onLoginNotFound={function(){
//if user has not signed in since last session
action.logOut();
}}
onError={function(data){
console.log(data);
}}
onCancel={function(){
//when user hits cancel on fb sigin
console.log("User cancelled.");
}}
onPermissionsMissing={function(data){
console.log(data);
}}
/>
</View>
);
}
}
var styles = StyleSheet.create({
loginContainer: {
marginTop: 150,
flex: 1,
alignItems: 'center',
justifyContent: 'center',
},
bottomBump: {
marginBottom: 15,
},
});
function mapStateToProps(state) {
return {
user: state.user
}
}
function mapDispatchToProps(dispatch) {
return {
action: bindActionCreators(userActions, dispatch)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(LogIn);
<|start_filename|>app/containers/container_dropNewPin.js<|end_filename|>
//this should add ownprops from parent map
import React, { Component, StyleSheet, Dimensions } from 'react-native';
import getLocationToSave from '../actions/action_dropNewPin.js';
import Button from 'react-native-button';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import { userData } from '../lib/db/db.js';
const styles = StyleSheet.create({
DropNewPinButton: {
position: 'absolute',
left: Dimensions.get('window').width / 2 - 90,
bottom: 0,
width: 180,
color: '#fff',
textAlign: 'center',
padding: 10,
backgroundColor: '#2e8b7d',
}
});
class DropNewPinButton extends Component {
constructor(props) {
super(props);
this.state= {
long: null,
lat: null,
title: ''
};
}
handleClick() {
const { getLocationToSave, recent } = this.props;
getLocationToSave(null, recent);
}
render() {
return (
<Button
style={styles.DropNewPinButton}
onPress={this.handleClick.bind(this)}>
Drop New Pin
</Button>
);
}
}
function mapStateToProps(state) {
return {
pins: state.pins,
recent: state.recent
};
}
export default connect(mapStateToProps, { getLocationToSave })(DropNewPinButton);
<|start_filename|>app/lib/orientation/normaliser.js<|end_filename|>
export default DEVICE_ORIENTATION_NORMALISER = `
<script>
/**
deviceapi-normaliser.js
Author: <NAME>
Date: 27/03/2013
Version: 0.5
This file is licensed under a BSD licence as per the Licence.
**/
var mo = {
_browser: null,
_os: null,
_ua: navigator.userAgent,
normalise: false,
orientation: false,
motion: false,
init: function() {
// Initialises the library to do things like device checking etc.
var orientation = false;
var motion = false
// first pass.
if (window.DeviceOrientationEvent) {
orientation = true;
}
if (window.DeviceMotionEvent) {
motion = true;
}
if (orientation && motion) {
// Could be iOS, Android Stock or FF or blackberry
if (this._ua.match(/Firefox/i) && this._ua.match(/Android/i)) {
// this is definitive
this._os = "Android";
this._browser = "Firefox";
} else if (this._ua.match(/Android/i)){
// Stock Android
this._os = "Android";
this._browser = "Stock";
} else if (this._ua.match(/Blackberry|RIM/i)){
//blackberry
this._os = "Blackberry";
this._browser = "Stock";
} else {
this._os = "iOS";
this._browser = "webkit";
}
} else if (orientation && !motion) {
// It's chrome but is it desktop or mobile?
this._browser = "Chrome";
if (this._ua.match(/Android/i)){
this._os = "Android";
} else {
this._os = "Desktop";
}
} else if (!orientation) {
// this is something very odd
this._browser = "Unknown";
this._os = "Unknown";
}
// TODO - actually set these properly.
this.orientation = orientation;
this.motion = motion;
},
}
// set up some constants
var accel_multi = 1; // used to normalise the accel values if firefox
$(function() {
;
});
function deviceMotion(e) {
// we need to normalise the values, safari will just return
// as they are but ff will multiply by gravity.
this.accelerationIncludingGravity = new Object();
this.accelerationIncludingGravity.x = e.accelerationIncludingGravity.x;
this.accelerationIncludingGravity.y = e.accelerationIncludingGravity.y;
this.accelerationIncludingGravity.z = e.accelerationIncludingGravity.z;
this.acceleration = new Object();
if (e.acceleration !== null) {
this.acceleration.x = e.acceleration.x;
this.acceleration.y = e.acceleration.y;
this.acceleration.z = e.acceleration.z;
} else {
this.acceleration.x = null;
this.acceleration.y = null;
this.acceleration.z = null;
}
this.rotationRate = new Object();
if (e.rotationRate !== null) {
this.rotationRate.alpha = e.rotationRate.alpha;
this.rotationRate.beta = e.rotationRate.beta;
this.rotationRate.gamma = e.rotationRate.gamma;
} else {
this.rotationRate.alpha = null;
this.rotationRate.beta = null;
this.rotationRate.gamma = null;
}
this.interval = null;
if (e.interval !== null) { this.interval = e.interval; }
return (this);
}
function deviceOrientation(e) {
// normalise the values for the orientation event.
// TODO:
// these values need further normalisation because I know safari
// breaks them but haven't got a device to test with.
this.gamma = e.gamma;
this.beta = e.beta;
this.alpha = e.alpha; // compass
this.absolute = false;
if (e.absolute !== null) { this.absolute = e.absolute; }
return(this);
}
</script>
`;
<|start_filename|>app/reducers/reducer_recent.js<|end_filename|>
import {UPDATE_RECENT, SET_RECENT} from '../constants/constants.js';
const initialState = [0,1,2];
export default function(state = initialState, action) {
switch(action.type) {
case SET_RECENT:
return action.payload;
case UPDATE_RECENT:
return action.payload;
default:
return state;
}
}
<|start_filename|>app/reducers/reducer_target.js<|end_filename|>
import { SET_TARGET, CLEAR_TARGET } from '../constants/constants.js';
var initialState = {id:0};
export default function(state = initialState, action) {
switch(action.type){
case SET_TARGET:
return action.payload;
case CLEAR_TARGET:
return initialState;
default:
return state;
}
}
| jake-shasteen/findAR |
<|start_filename|>main.go<|end_filename|>
package main
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"os"
echo "github.com/labstack/echo/v4"
middleware "github.com/labstack/echo/v4/middleware"
"github.com/pkg/errors"
"github.com/rs/zerolog"
astronomer "github.com/ullaakut/astronomer/pkg/signature"
"github.com/ullaakut/astronomer/pkg/trust"
)
var log *zerolog.Logger
type astroBadge struct {
SchemaVersion int `json:"schemaVersion"`
Label string `json:"label"`
Message string `json:"message"`
Color string `json:"color"`
}
func storeReport(report *astronomer.SignedReport) error {
data, err := json.Marshal(report)
if err != nil {
return fmt.Errorf("unable to marshal report: %v", err)
}
err = ioutil.WriteFile(fmt.Sprintf("reports/%s-%s", report.RepositoryOwner, report.RepositoryName), data, 0655)
if err != nil {
return err
}
return nil
}
func fetchReport(repoOwner, repoName string) (*astronomer.SignedReport, error) {
report := &astronomer.SignedReport{}
data, err := ioutil.ReadFile(fmt.Sprintf("reports/%s-%s", repoOwner, repoName))
if err != nil {
return nil, fmt.Errorf("unable to read report: %v", err)
}
err = json.Unmarshal(data, report)
if err != nil {
return nil, fmt.Errorf("unable to marshal report: %v", err)
}
return report, nil
}
func percentToLetterGrade(percent float64) string {
switch {
case percent > 0.8:
return "A"
case percent > 0.6:
return "B"
case percent > 0.4:
return "C"
case percent > 0.2:
return "D"
default:
return "E"
}
}
func handleReport(ctx echo.Context) error {
var signedReport astronomer.SignedReport
err := ctx.Bind(&signedReport)
if err != nil {
err = errors.Wrap(err, "could not parse blog post from request body")
log.Error().Err(err).Msg("ALERT")
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
}
logger := log.With().
Str("owner", signedReport.RepositoryOwner).
Str("name", signedReport.RepositoryName).
Float64("trust", signedReport.Factors[trust.Overall].TrustPercent).
Logger()
err = astronomer.Check(&signedReport)
if err != nil {
err = errors.Wrap(err, "invalid signature for report")
logger.Error().Err(err).Msg("ALERT")
return echo.NewHTTPError(http.StatusUnauthorized, err.Error())
}
err = storeReport(&signedReport)
if err != nil {
err = errors.Wrap(err, "unable to write report to filesystem")
logger.Error().Err(err).Msg("ALERT")
return echo.NewHTTPError(http.StatusInternalServerError, err.Error())
}
logger.Info().Msg("Report stored successfully")
return ctx.JSON(http.StatusCreated, signedReport)
}
func handleBadge(ctx echo.Context) error {
repoOwner := ctx.QueryParam("owner")
if len(repoOwner) == 0 {
return echo.NewHTTPError(http.StatusBadRequest, "owner not set in request context")
}
repoName := ctx.QueryParam("name")
if len(repoName) == 0 {
return echo.NewHTTPError(http.StatusBadRequest, "repository name not set in request context")
}
badgeData := &astroBadge{
SchemaVersion: 1,
Label: "astro rating",
}
report, err := fetchReport(repoOwner, repoName)
if err != nil {
badgeData.Color = "inactive"
badgeData.Message = "unavailable"
} else {
if report.Factors[trust.Overall].TrustPercent > 0.6 {
badgeData.Color = "success"
} else if report.Factors[trust.Overall].TrustPercent > 0.4 {
badgeData.Color = "yellow"
} else {
badgeData.Color = "red"
}
badgeData.Message = percentToLetterGrade(report.Factors[trust.Overall].TrustPercent)
}
return ctx.JSON(http.StatusOK, badgeData)
}
func main() {
e := echo.New()
e.Use(middleware.Recover())
e.Use(middleware.Gzip())
// Use zerolog for debugging HTTP requests
log = NewZeroLog(os.Stderr)
e.Logger.SetLevel(5) // Disable default logging
e.Use(HTTPLogger(log))
e.POST("/", handleReport)
e.GET("/shields", handleBadge)
e.Logger.Fatal(e.Start(":80"))
}
| Ullaakut/astrolab |
<|start_filename|>GPS_IMU_Kalman_Filter/src/geo_ned.hpp<|end_filename|>
//
// geo_ned.hpp
// ExtendedKalmanFilter
//
// Created by Karan on 4/6/18.
// Copyright © 2018 Karan. All rights reserved.
//
#ifndef geo_ned_hpp
#define geo_ned_hpp
#include <stdio.h>
#include "Eigen/Dense"
#include <fstream>
#include <sstream>
#include <stdlib.h>
#include <vector>
#include <cmath>
using Eigen::MatrixXd;
using Eigen::VectorXd;
namespace geodectic_converter
{
static double kSemimajorAxis = 6378137;
static double kSemiminorAxis = 6356752.3142;
static double kFirstEccentricitySquared = 6.69437999014 * 0.001;
static double kSecondEccentricitySquared = 6.73949674228 * 0.001;
static double kFlattening = 1 / 298.257223563;
class GeodecticConverter
{
public:
GeodecticConverter();
bool isInitialised();
void getReference(double &latitude, double &longitude, double &altitude);
void intializeReference(const double latitude, const double longitude, const double altitude);
void geodetic2Ecef(const double latitude, const double longitude, const double altitude, double* x,
double* y, double* z);
void ecef2Geodetic(const double x, const double y, const double z, double* latitude,
double* longitude, double* altitude);
void ecef2Ned(const double x, const double y, const double z, double* north, double* east, double* down);
void ned2Ecef(const double north, const double east, const double down, double* x, double* y, double* z);
void geodetic2Ned(const double latitude, const double longitude, const double altitude, double* north, double* east, double* down);
void ned2Geodetic(const double north, const double east, const double down, double* latitude, double* longitude, double* altitude);
void geodetic2Enu(const double latitude, const double longitude, const double altitude, double* east, double* north, double* up);
void enu2Geodetic(const double east, const double north, const double up, double* latitude, double* longitude, double* altitude);
private:
bool _have_reference;
double _initial_latitude;
double _initial_longitude;
double _initial_altitude;
double _initial_ecef_x;
double _initial_ecef_y;
double _initial_ecef_z;
Eigen::Matrix3d ned_to_ecef_matrix_;
Eigen::Matrix3d ecef_to_ned_matrix_;
inline
double rad2Deg(const double radians)
{
return (radians / M_PI) * 180.0;
}
inline
double deg2Rad(const double degrees)
{
return (degrees / 180.0) * M_PI;
}
inline Eigen::Matrix3d nRe(const double lat_radians, const double lon_radians)
{
const double sLat = sin(lat_radians);
const double sLon = sin(lon_radians);
const double cLat = cos(lat_radians);
const double cLon = cos(lon_radians);
Eigen::Matrix3d ret;
ret(0, 0) = -sLat * cLon;
ret(0, 1) = -sLat * sLon;
ret(0, 2) = cLat;
ret(1, 0) = -sLon;
ret(1, 1) = cLon;
ret(1, 2) = 0.0;
ret(2, 0) = cLat * cLon;
ret(2, 1) = cLat * sLon;
ret(2, 2) = sLat;
return ret;
}
};
};
#endif /* geo_ned_hpp */
<|start_filename|>GPS_IMU_Kalman_Filter/src/utils.cpp<|end_filename|>
//
// utils.cpp
// EKF
//
// Created by Karan on 4/9/18.
// Copyright © 2018 Karan. All rights reserved.
//
#include "utils.hpp"
Eigen::MatrixXd calculate_joacobian(const Eigen::VectorXd& v, const double dt)
{
// Assumes Jacobian is 6 x 6
Eigen::MatrixXd JA = Eigen::MatrixXd::Zero(6,6);
// Assumes the size of input vector is 6
const double psi = v(2);
const double velocity = v(3);
const double psi_dot = v(4);
const double THRESHOLD = 0.001;
if (psi_dot > THRESHOLD) //Avoid dividing by zero
{
const double turn_radius = (velocity/psi_dot);
const double psi_dot_inverse = 1/psi_dot;
const double pdotp = dt * psi_dot + psi;
const double r13 = turn_radius * (-cos(dt * psi_dot) + psi);
const double r14 = psi_dot_inverse * (-sin(psi) + sin(pdotp));
const double r15 = dt * turn_radius * cos(pdotp) - (turn_radius/psi_dot) * (-sin(psi) + sin(pdotp));
const double r23 = turn_radius * (-sin(psi) + sin(pdotp));
const double r24 = psi_dot_inverse * (cos(psi) - cos(pdotp));
const double r25 = dt * turn_radius * sin(pdotp) - (turn_radius/psi_dot) * (cos(psi) - cos(pdotp));
JA << 1.0, 0.0, r13, r14, r15, 0.0,
0.0, 1.0, r23, r24, r25, 0.0,
0.0, 0.0, 1.0, 0.0, dt, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, dt,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0 ;
}
return JA;
}
<|start_filename|>GPS_IMU_Kalman_Filter/src/utils.hpp<|end_filename|>
//
// utils.hpp
// ExtendedKalmanFilter
//
// Created by Karan on 4/6/18.
// Copyright © 2018 Karan. All rights reserved.
//
#ifndef utils_hpp
#define utils_hpp
#include "Eigen/Dense"
Eigen::MatrixXd calculate_joacobian(const Eigen::VectorXd& v, const double dt);
#endif /* utils_hpp */
<|start_filename|>GPS_IMU_Kalman_Filter/src/fusion.cpp<|end_filename|>
//
// fusion.cpp
// Fusion
//
// Created by Karan on 4/9/18.
// Copyright � 2018 Karan. All rights reserved.
//
#include <iostream>
#include "fusion.hpp"
Fusion::Fusion(double max_acceleration, double max_turn_rate, double max_yaw_accel, double varGPS,
double varSpeed, double varYaw, double varAcc, double xOffset, double yOffset, bool verbose)
: _initialized(false), _max_turn_rate(max_turn_rate), _max_acceleration(max_acceleration), _max_yaw_accel(max_yaw_accel), _xOffset(xOffset),
_yOffset(yOffset), _KF(verbose)
{
// Initialize initial uncertainity P0
_P = Eigen::MatrixXd(_n, _n);
_P << 1000.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1000.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1000.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1000.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1000.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1000.0;
_R = Eigen::MatrixXd(5, 5); //Assuming 5 sources of measurement
_R << pow(varGPS, 2), 0.0, 0.0, 0.0, 0.0,
0.0, pow(varGPS, 2), 0.0, 0.0, 0.0,
0.0, 0.0, pow(varSpeed, 2), 0.0, 0.0,
0.0, 0.0, 0.0, pow(varYaw, 2), 0.0,
0.0, 0.0, 0.0, 0.0, pow(varAcc, 2);
this->verbose = verbose;
if(verbose) std::cout << " =========================== FUSION: Initializing --- " << "\r\n";
}
void const Fusion::updateQ(double dt)
{
if(this->verbose) std::cout << " =========================== FUSION: Updating Q --- " << "\r\n";
// Process Noise Covariance Matrix Q
_Q = Eigen::MatrixXd(_n, _n);
_sGPS = 0.5 * _max_acceleration * pow(dt, 2);
_sVelocity = _max_acceleration * dt;
_sCourse = _max_turn_rate * dt;
_sYaw = _max_yaw_accel * dt;
_sAccel = _max_acceleration;
_Q << pow(_sGPS, 2), 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, pow(_sGPS, 2), 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, pow(_sCourse, 2), 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, pow(_sVelocity, 2), 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, pow(_sYaw, 2), 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, pow(_sAccel, 2);
_KF.setQ(_Q);
}
void Fusion::start(const DataPoint &data)
{
if(this->verbose) std::cout << " Fusion: ------ In start.....\r\n";
_timestamp = data.get_timestamp();
Eigen::VectorXd state = data.get_state();
_KF.start(_n, state, _P, _F, _Q);
_initialized = true;
}
void Fusion::compute(const DataPoint &data)
{
/*******************************************
* Prediction Step
- Assumes current velocity is the same for this dt
*******************************************/
if(this->verbose) std::cout << " Fusion: ------ In compute.....\r\n";
// Assuming 1.e6 for timestamp - confirm after running on the system
const double dt = (data.get_timestamp())/ 1.e6;
// const double dt = 0.1;
if(this->verbose) std::cout << dt << "timestep in compute";
_timestamp = data.get_timestamp();
// Update Q
this->updateQ(dt);
// Update state and calculate jacobian
_KF.updateJA(dt);
// Prediction
_KF.predict();
/*******************************************
* Update Step
- Updates appropriate matrices given a measurement
- Assumes measurement is received either from GPS or IMU
*******************************************/
Eigen::VectorXd zz = data.get_state();
Eigen::VectorXd z;
z.resize(5);
z << zz(0), //east
zz(1), //north
zz(3), //vel
zz(4), //yaw_rate
zz(5); //accel
const Eigen::VectorXd state = _KF.get_resulting_state();
Eigen::VectorXd Hx;
Eigen::MatrixXd JH;
Hx.resize(5);
JH.resize(5,6);
// measurement function
Hx << state(0) + _xOffset * cos(state(3)) - _yOffset * sin(state(3)),
state(1) + _xOffset * sin(state(3)) + _yOffset * cos(state(3)),
state(3),
state(4),
state(5);
double j13 = - _xOffset * sin(state(3)) - _yOffset * cos(state(3));
double j23 = _xOffset * cos(state(3)) - _yOffset * sin(state(3));
if(data.get_data_point_type() == DataPointType::GPS)
{
JH << 1.0, 0.0, j13, 0.0, 0.0, 0.0,
0.0, 1.0, j23, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0;
_KF.update(z, Hx, JH, _R);
}
else if(data.get_data_point_type() == DataPointType::IMU)
{
JH << 0.0, 0.0, j13, 0.0, 0.0, 0.0,
0.0, 0.0, j23, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0;
_KF.update(z, Hx, JH, _R);
}
}
void Fusion::process(const DataPoint &data)
{
if(this->verbose) std::cout << " Fusion: ------ In process.....\r\n";
if(data.get_timestamp() > 0.0)
{
_initialized ? this->compute(data) : this->start(data);
}
}
Eigen::VectorXd Fusion::get_resulting_state() const
{
return _KF.get_resulting_state();
}
<|start_filename|>GPS_IMU_Kalman_Filter/src/ekf.cpp<|end_filename|>
//
// ekf.cpp
// EKF
//
// Created by Karan on 4/7/18.
// Copyright © 2018 Karan. All rights reserved.
//
#include "ekf.hpp"
#include <iostream>
void EKF::start(const int nin, const Eigen::VectorXd &xin, const Eigen::MatrixXd &Pin, const Eigen::MatrixXd &Fin, const Eigen::MatrixXd &Qin)
{
_num_states = nin;
_I = Eigen::MatrixXd::Identity(_num_states, _num_states);
if(this->verbose) std::cout << " EKF: Number of states ->" << nin << "\n";
this->_state.resize(nin);
this->_state = xin;
if(this->verbose) std::cout << " EKF: Size of Input states ->" << xin.size() << "\n";
_P = Pin;
_JA = Fin;
_Q = Qin;
return;
}
void EKF::setQ(const Eigen::MatrixXd &Q_in)
{
_Q = Q_in;
}
void EKF::updateJA(const double dt)
{
/*******************************************
* State Equation Update Rule
x + v/ψ˙(−sin(ψ) + sin(dtψ˙+ψ))
y + v/ψ˙(cos(ψ) − cos(dtψ˙+ψ))
dtψ˙+ ψ
dta + vψ˙
ψ˙
a
*******************************************/
if(this->verbose) std::cout << "Updating JA: About to update state equations" << "\n";
if(this->verbose) std::cout << "Updating JA: size of states" << this->_state.rows() << "x" <<this->_state.cols() << "\n";
// Updating state equations
if(fabs(_state(4)) < 0.01){
_state(0) = _state(0) + (_state(3) * dt) * cos(_state(2));
if(this->verbose) std::cout << "Updating JA: state 0" << "\n";
_state(1) = _state(1) + (_state(3) * dt) * sin(_state(2));
if(this->verbose) std::cout << "Updating JA: state 1" << "\n";
_state(2) = _state(2);
if(this->verbose) std::cout << "Updating JA: state 2" << "\n";
_state(3) = _state(3) + _state(5) * dt;
if(this->verbose) std::cout << "Updating JA: state 3" << "\n";
_state(4) = 0.0000001;
if(this->verbose) std::cout << "Updating JA: state 4" << "\n";
_state(5) = _state(5);
if(this->verbose) std::cout << "Updating JA: state 5" << "\n";
}else{
_state(0) = _state(0) + (_state(3)/_state(4)) * (sin(_state(4) * dt + _state(2)) - sin(_state(2)));
if(this->verbose) std::cout << "Updating JA: state 0" << "\n";
_state(1) = _state(1) + (_state(3)/_state(4)) * (-cos(_state(4) * dt + _state(2)) + cos(_state(2)));
if(this->verbose) std::cout << "Updating JA: state 1" << "\n";
_state(2) = std::fmod((_state(2) + _state(4) * dt + M_PI), (2.0 * M_PI)) - M_PI;
if(this->verbose) std::cout << "Updating JA: state 2" << "\n";
_state(3) = _state(3) + _state(5) * dt;
if(this->verbose) std::cout << "Updating JA: state 3" << "\n";
_state(4) = _state(4);
if(this->verbose) std::cout << "Updating JA: state 4" << "\n";
_state(5) = _state(5);
if(this->verbose) std::cout << "Updating JA: state 5" << "\n";
}
if(this->verbose) std::cout << "Updating JA: About to calculate jacobian" << "\n";
// Calculate jacobian
_JA = calculate_jacobian(_state, dt);
}
void EKF::predict()
{
// Prediction step
_P = _JA * _P * _JA.transpose() + _Q;
}
void EKF::update(const Eigen::VectorXd& Z, const Eigen::VectorXd& Hx, const Eigen::MatrixXd &JH, const Eigen::MatrixXd &R)
{
Eigen::MatrixXd JHT = _P * JH.transpose();
// Temporary variable for storing this intermediate value
Eigen::MatrixXd _S = JH * JHT + R;
// Compute the Kalman gain
_K = JHT * _S.inverse();
// Update the estimate
Eigen::VectorXd y = Z - Hx;
_state = _state + _K * y;
// Update the error covariance
_P = (_I - _K * JH) * _P;
}
Eigen::VectorXd EKF::get_resulting_state() const
{
return _state;
}
<|start_filename|>GPS_IMU_Kalman_Filter/src/parameterReader.hpp<|end_filename|>
//
// paramReader.hpp
// Fusion
//
// Created by Karan on 6/6/18.
// Copyright � 2018 Karan. All rights reserved.
#ifndef paramReader_hpp
#define paramReader_hpp
#endif /* paramReader_hpp */
#include <fstream>
#include <map>
#include <vector>
using namespace std;
class ParameterReader
{
public:
ParameterReader( string filename="parameters.txt" )
{
ifstream fin( filename.c_str() );
if (!fin)
{
cerr<<"parameter file does not exist."<<endl;
return;
}
while(!fin.eof())
{
cout << "------ Reading in Parameter File...\r\n";
string str;
getline( fin, str );
cout << " Line Read: " << str << endl;
if (str[0] == '#')
{
continue;
}
int pos = str.find("=");
if (pos == -1){
cout << "pos found = -1 ---- Continuing loop...\r\n";
continue;
}
string key = str.substr( 0, pos );
string value = str.substr( pos+1, str.length() );
this->data[key] = value;
cout << " Key Found with Value: " << key << " -> " << value << endl;
cout << " Stored data mapping: key (" << key << ") ------- value(" << this->data[key] << ")\r\n";
if ( !fin.good() ){
cout<<"\r\n";
break;
}
}
}
string getData( string key )
{
map<string, string>::iterator iter;
iter = this->data.find(key.c_str());
std::cout << "Searching for key (" << key.c_str() << ") => " << this->data[key] << '\n';
if (iter == this->data.end())
{
cerr<<" Parameter name "<< key <<" not found!"<<endl;
return string("NOT_FOUND");
}
return iter->second;
}
public:
map<string, string> data;
};
<|start_filename|>GPS_IMU_Kalman_Filter/src/run_fusion.cpp<|end_filename|>
//
// run_fusion.cpp
// Main Class running GPS INS Fusion
//
// Created by Karan on 5/2/18.
// Copyright � 2018 Karan. All rights reserved.
#include "fusion.hpp"
#include "csv_reader.hpp"
#include "run_fusion.hpp"
#include <eigen3/Eigen/Dense>
/**
* @brief Constructor for the main class
*
* @param max_acceleration maximum acceleration for the system
* @param max_turn_rate maximum turn rate for the system
* @param max_yaw_accel maximum yaw acceleration for the system
* These parameters are used to set up the system noise matrices in the EKF
*/
GpsIns::GpsIns(bool verbose)
{
try
{
params = getDefaultParams();
}
catch (const ifstream::failure& e)
{
cout << "Exception opening/reading parameter file";
}
_raw_data = Eigen::VectorXd(6);
filter = new Fusion(params.maxAccel, params.maxTurnRate, params.maxYawAccel, params.varGPS,
params.varSpeed, params.varYaw, params.varAcc, params.xOff, params.yOff, verbose);
_sensor_data = new DataPoint(verbose);
_prev_enc_counter = 0;
_prev_gps_counter = 0;
_prev_imu_counter = 0;
_imucounter = 0;
_gpscounter = 0;
_enccounter = 0;
_dt = 0;
_prev_time = clock();
_cur_time = 0;
this->verbose = verbose;
if(this->verbose) std::cout << " GPS-INS: finished Initializing Constructor" << "\n";
if(this->verbose){
std::printf("EKF Params Initialized:\r\n maxAccel = %.3f\r\n maxTurnRate = %.3f\r\n maxYawAccel = %.3f\r\n varGPS = %.3f\r\n varSpeed = %.3f\r\n varYaw = %.3f\r\n varAcc = %.3f\r\n", params.maxAccel, params.maxTurnRate, params.maxYawAccel, params.varGPS, params.varSpeed, params.varYaw, params.varAcc);
}
}
/**
* @brief Default destructor
*/
GpsIns::~GpsIns()
{}
/**
* @brief Reads in the GPS data
*/
void GpsIns::read_gps_data(double lat, double lon, double alt)
{
if(this->verbose) std::cout << " GPS-INS: In Read GPS" << "\n";
//m.lock();
std::lock_guard<std::mutex> lock(m);
_raw_data(0) = lat;
_raw_data(1) = lon;
_raw_data(5) = alt;
_gpscounter += 1;
//m.unlock();
if(this->verbose) std::cout << " GPS-INS: Raw GPS data --- " << _raw_data << "\n";
if(this->verbose) std::cout << " GPS-INS: Exiting read GPS" << "\n";
}
/**
* @brief Reads IMU values
*
* @param yaw_rate psi_dot
* @param long_accel longitudinal acceleration of the robot
* @param timestamp current timestamp
*
*/
void GpsIns::read_imu_data(double yaw_rate, double long_accel)
{
if(this->verbose) std::cout << " GPS-INS: In Read IMU" << "\n";
//m.lock();
std::lock_guard<std::mutex> lock(m);
this->_cur_time = clock();
this->_dt = _cur_time - _prev_time;
_raw_data(3) = yaw_rate;
_raw_data(4) = long_accel;
_imucounter += 1;
// update previous time only if dt > 0
if(_dt > 0.0) this->_prev_time = _cur_time;
//m.unlock();
if(this->verbose) std::cout << " GPS-INS: IMU Data -- " << _raw_data << "\n";
if(this->verbose) std::cout << " GPS-INS: Exiting read IMU" << "\n";
}
/**
* @brief Reads in the encoders - called in tasks
*
* @param psi yaw values computed from encoders
* @param vel velocity computed from encoders
*/
void GpsIns::read_encoders(double vel)
{
if(this->verbose) std::cout << " GPS-INS: Read encoder" << "\n";
//m.lock();
std::lock_guard<std::mutex> lock(m);
_raw_data(2) = vel;
_enccounter += 1;
//m.unlock();
if(this->verbose) std::cout << " GPS-INS: Exiting Read encoder" << "\n";
}
/**
* @brief Sets up the data in DataPoint class to be used up by the filter
* How do you ensure data in this object is the current and has been completely filled at this timestep?
*/
void GpsIns::set_data()
{
if(this->verbose) std::cout << " GPS-INS: Setting data" << "\n";
bool flag = (_gpscounter > _prev_gps_counter);
// iF gps got updated then we update the filter else we just predict
if(flag)
_data_type = DataPointType::GPS;
else
_data_type = DataPointType::IMU;
if(this->verbose) std::cout << " GPS-INS: delta Time used --- " << (float)_dt << "\n";
_sensor_data->set(_dt, _data_type, _raw_data);
if(this->verbose) std::cout << " GPS-INS: Data set" << "\n";
}
/**
* @brief Main loop for the filter
*/
void GpsIns::loop()
{
//m.lock();
std::lock_guard<std::mutex> lock(m);
set_data();
filter->process(*_sensor_data);
_prev_gps_counter = _gpscounter;
//m.unlock();
}
/**
* @brief Returns the estimated state of the system
*
* @return Estimated state for the robot
*/
Eigen::VectorXd GpsIns::get_estimated_state() const
{
return filter->get_resulting_state();
}
DataPoint GpsIns::get_sensor_data()
{
return *_sensor_data;
}
<|start_filename|>GPS_IMU_Kalman_Filter/src/geo_ned.cpp<|end_filename|>
//
// geo_ned.cpp
// EKF
//
// Created by Karan on 4/9/18.
// Copyright © 2018 Karan. All rights reserved.
//
#include "geo_ned.hpp"
using namespace geodectic_converter;
GeodecticConverter::GeodecticConverter()
:_have_reference(false)
{}
bool GeodecticConverter::isInitialised()
{
return _have_reference;
}
void GeodecticConverter::getReference(double &latitude, double &longitude, double &altitude)
{
latitude = _initial_latitude;
longitude = _initial_longitude;
altitude = _initial_altitude;
return;
}
void GeodecticConverter::intializeReference(const double latitude, const double longitude, const double altitude)
{
// Save origin
_initial_latitude = deg2Rad(latitude);
_initial_longitude = deg2Rad(longitude);
_initial_altitude = altitude;
// Compute ECEF of NED origin
geodetic2Ecef(latitude, longitude, altitude, &_initial_ecef_x, &_initial_ecef_y, &_initial_ecef_z);
// Compute ECEF to NED and NED to ECEF matrices
double phiP = atan2(_initial_ecef_z, sqrt(pow(_initial_ecef_x, 2) + pow(_initial_ecef_y, 2)));
ecef_to_ned_matrix_ = nRe(phiP, _initial_longitude);
ned_to_ecef_matrix_ = nRe(_initial_latitude, _initial_longitude).transpose();
_have_reference = true;
}
void GeodecticConverter::geodetic2Ecef(const double latitude, const double longitude, const double altitude, double* x,
double* y, double* z)
{
// Convert geodetic coordinates to ECEF.
// http://code.google.com/p/pysatel/source/browse/trunk/coord.py?r=22
double lat_rad = deg2Rad(latitude);
double lon_rad = deg2Rad(longitude);
double xi = sqrt(1 - kFirstEccentricitySquared * sin(lat_rad) * sin(lat_rad));
*x = (kSemimajorAxis / xi + altitude) * cos(lat_rad) * cos(lon_rad);
*y = (kSemimajorAxis / xi + altitude) * cos(lat_rad) * sin(lon_rad);
*z = (kSemimajorAxis / xi * (1 - kFirstEccentricitySquared) + altitude) * sin(lat_rad);
}
void GeodecticConverter::ecef2Geodetic(const double x, const double y, const double z, double* latitude,
double* longitude, double* altitude)
{
// Convert ECEF coordinates to geodetic coordinates.
// <NAME>, "Conversion of Earth-centered Earth-fixed coordinates
// to geodetic coordinates," IEEE Transactions on Aerospace and
// Electronic Systems, vol. 30, pp. 957-961, 1994.
double r = sqrt(x * x + y * y);
double Esq = kSemimajorAxis * kSemimajorAxis - kSemiminorAxis * kSemiminorAxis;
double F = 54 * kSemiminorAxis * kSemiminorAxis * z * z;
double G = r * r + (1 - kFirstEccentricitySquared) * z * z - kFirstEccentricitySquared * Esq;
double C = (kFirstEccentricitySquared * kFirstEccentricitySquared * F * r * r) / pow(G, 3);
double S = cbrt(1 + C + sqrt(C * C + 2 * C));
double P = F / (3 * pow((S + 1 / S + 1), 2) * G * G);
double Q = sqrt(1 + 2 * kFirstEccentricitySquared * kFirstEccentricitySquared * P);
double r_0 = -(P * kFirstEccentricitySquared * r) / (1 + Q)
+ sqrt(
0.5 * kSemimajorAxis * kSemimajorAxis * (1 + 1.0 / Q)
- P * (1 - kFirstEccentricitySquared) * z * z / (Q * (1 + Q)) - 0.5 * P * r * r);
double U = sqrt(pow((r - kFirstEccentricitySquared * r_0), 2) + z * z);
double V = sqrt(
pow((r - kFirstEccentricitySquared * r_0), 2) + (1 - kFirstEccentricitySquared) * z * z);
double Z_0 = kSemiminorAxis * kSemiminorAxis * z / (kSemimajorAxis * V);
*altitude = U * (1 - kSemiminorAxis * kSemiminorAxis / (kSemimajorAxis * V));
*latitude = rad2Deg(atan((z + kSecondEccentricitySquared * Z_0) / r));
*longitude = rad2Deg(atan2(y, x));
}
void GeodecticConverter::ecef2Ned(const double x, const double y, const double z, double* north, double* east,
double* down)
{
// Converts ECEF coordinate position into local-tangent-plane NED.
// Coordinates relative to given ECEF coordinate frame.
Eigen::Vector3d vect, ret;
vect(0) = x - _initial_ecef_x;
vect(1) = y - _initial_ecef_y;
vect(2) = z - _initial_ecef_z;
ret = ecef_to_ned_matrix_ * vect;
*north = ret(0);
*east = ret(1);
*down = -ret(2);
}
void GeodecticConverter::ned2Ecef(const double north, const double east, const double down, double* x, double* y,
double* z)
{
// NED (north/east/down) to ECEF coordinates
Eigen::Vector3d ned, ret;
ned(0) = north;
ned(1) = east;
ned(2) = -down;
ret = ned_to_ecef_matrix_ * ned;
*x = ret(0) + _initial_ecef_x;
*y = ret(1) + _initial_ecef_y;
*z = ret(2) + _initial_ecef_z;
}
void GeodecticConverter::geodetic2Ned(const double latitude, const double longitude, const double altitude,
double* north, double* east, double* down)
{
// Geodetic position to local NED frame
double x, y, z;
geodetic2Ecef(latitude, longitude, altitude, &x, &y, &z);
ecef2Ned(x, y, z, north, east, down);
}
void GeodecticConverter::ned2Geodetic(const double north, const double east, const double down, double* latitude,
double* longitude, double* altitude)
{
// Local NED position to geodetic coordinates
double x, y, z;
ned2Ecef(north, east, down, &x, &y, &z);
ecef2Geodetic(x, y, z, latitude, longitude, altitude);
}
void GeodecticConverter::geodetic2Enu(const double latitude, const double longitude, const double altitude,
double* east, double* north, double* up)
{
// Geodetic position to local ENU frame
double x, y, z;
geodetic2Ecef(latitude, longitude, altitude, &x, &y, &z);
double aux_north, aux_east, aux_down;
ecef2Ned(x, y, z, &aux_north, &aux_east, &aux_down);
*east = aux_east;
*north = aux_north;
*up = -aux_down;
}
void GeodecticConverter::enu2Geodetic(const double east, const double north, const double up, double* latitude,
double* longitude, double* altitude)
{
// Local ENU position to geodetic coordinates
const double aux_north = north;
const double aux_east = east;
const double aux_down = -up;
double x, y, z;
ned2Ecef(aux_north, aux_east, aux_down, &x, &y, &z);
ecef2Geodetic(x, y, z, latitude, longitude, altitude);
}
<|start_filename|>GPS_IMU_Kalman_Filter/src/ekf.hpp<|end_filename|>
//
// ekf.hpp
// EKF
//
// Created by Karan on 4/7/18.
// Copyright © 2018 Karan. All rights reserved.
//
#ifndef ekf_hpp
#define ekf_hpp
#include <stdio.h>
#include "utils.hpp"
#include "lib/Eigen/Dense"
/**
* @brief EKF base class implementing generic Extended Kalman Filter
*/
class EKF
{
public:
/**
* @brief Default constructor
*/
EKF(bool verbose = false){this->verbose = verbose;};
/**
* @brief Default destructor
*/
~EKF(){};
/**
* @brief Function to be called the first time EKF is initialized
*
* @param nin Number of states
* @param xin States
* @param Pin Initial covariance matrix
* @param Fin Initial Jacobian of the state
* @param Qin Initial Q matrix
*/
void start(const int nin, const Eigen::VectorXd& xin, const Eigen::MatrixXd& Pin, const Eigen::MatrixXd& Fin, const Eigen::MatrixXd& Qin);
/**
* @brief set Q value for the filter
*
* @param Q_in Input Q matrix
*/
void setQ(const Eigen::MatrixXd& Q_in);
/**
* @brief Returns estimated state
*
* @return State of the system
*/
Eigen::VectorXd get_resulting_state() const;
/**
* @brief Integrates system variables to predict the system state
*
* @param dt Time interval over which the integration takes place. Usually the difference between the previous and
* current time step
*/
void updateJA(const double dt);
/**
* @brief Updates the state covairance matrix and adds process noise
*/
void predict();
/**
* @brief Runs the correction/update step of the filter
*
* @param Z Measurements for the current time step
* @param Hx Measurement model
* @param JH Jacobian of the measurment model
* @param R Measurement noise
*/
void update(const Eigen::VectorXd& Z, const Eigen::VectorXd& Hx, const Eigen::MatrixXd &JH, const Eigen::MatrixXd &R);
private:
// Flag to indicate if the filter has started
bool _init;
bool verbose;
int _num_states; // Number of states in the EKF
Eigen::MatrixXd _P; // initial covaraince/uncertainity in states
Eigen::MatrixXd _Q; // process noise covariance
Eigen::MatrixXd _JH; // measurment jacobian
Eigen::MatrixXd _R; // measurement noise covariance
Eigen::MatrixXd _I; // Identity matrix
Eigen::MatrixXd _JA; // Jacobian state matrix
Eigen::MatrixXd _S; // Matrix for storing intermediate step in update part
Eigen::MatrixXd _K; // Kalman Gain
Eigen::VectorXd _state; // State - x y heading velocity yaw_rat long_acceleration
};
#endif /* ekf_hpp */
<|start_filename|>GPS_IMU_Kalman_Filter/src/main.cpp<|end_filename|>
//
// main.cpp
// ExtendedKalmanFilter
//
// Created by Karan on 4/6/18.
// Copyright © 2018 Karan. All rights reserved.
//
#include <iostream>
#include <fstream>
#include <sstream>
#include "fusion.hpp"
int main(int argc, const char * argv[])
{
std::ifstream ip("/Users/karan/Desktop/EKF/EKF/Data.csv");
if(!ip.is_open())
{
std::cerr << "Failed to open the data file";
std::exit(EXIT_FAILURE);
}
std::string timestamp;
std::string ax;
std::string yaw_rate;
std::string yaw;
std::string course;
std::string lat;
std::string lon;
std::string str;
std::getline(ip, str); // Skip the first line
while(std::getline(ip, str))
{
std::istringstream iss(str);
std::string token;
while (std::getline(iss, token, ','))
{
// process each token
std::cout << token.size() << " ";
}
std::cout << std::endl;
}
ip.close();
return 0;
}
| chendaxiashizhu/GPS_IMU_Kalman_Filter |
<|start_filename|>resources/js/modules/notification/components/notificationMixin.js<|end_filename|>
import {mapGetters, mapMutations} from 'vuex'
import * as notificationTypes from '../store/types'
export default {
name: 'Notificator',
render(h) {
return h
},
computed: {
...mapGetters(['notificationMessages'])
},
watch: {
notificationMessages: function (newValue) {
if (newValue.length) {
newValue.forEach(m => this.showMessage(m))
this[notificationTypes.CLEAR_NOTIFICATION_MESSAGES]()
}
}
},
methods: {
...mapMutations([
notificationTypes.CLEAR_NOTIFICATION_MESSAGES,
]),
showMessage(message) {
switch (message.type) {
case notificationTypes.ERROR_MESSAGE:
this.$message.error(message.message)
break
case notificationTypes.WARNING_MESSAGE:
case notificationTypes.SUCCESS_MESSAGE:
this.$message({
...message
})
break
case notificationTypes.COMMON_MESSAGE:
this.$notify({
...message.message
})
}
}
}
}
| soulzilla/skinzilla |
<|start_filename|>.eslintrc.js<|end_filename|>
module.exports = {
'env':
{
'browser': true,
'es6': true
},
'parserOptions':
{
'sourceType': 'module'
},
'rules':
{
'no-unused-vars': 1,
'no-console': 0,
'dot-notation': 1,
'eqeqeq': 1,
'no-else-return': 0,
'no-new-func': 1,
'no-param-reassign': [1, { props: false }],
'no-useless-concat': 1,
'no-useless-escape': 1,
'radix': [1, 'as-needed'],
'no-undef': 2,
'array-bracket-spacing': [1, 'never'],
'brace-style': [1, 'allman'],
'camelcase': [1, { properties: 'always' }],
'comma-dangle': [1, 'never'],
'comma-style': [1, 'last'],
'func-style': [1, 'expression'],
'id-length': 0,
'indent': [1, 4, { SwitchCase: 1 }],
'keyword-spacing': [1, { after: false, before: true, overrides: { from: { after: true }, return: { after: true }, import: { after: true }, case: { after: true } } }],
'max-len': 0,
'new-cap': [1, { newIsCap: true, newIsCapExceptions: [], capIsNew: false, capIsNewExceptions: ['Immutable.Map', 'Immutable.Set', 'Immutable.List'] }],
'no-array-constructor': 1,
'no-bitwise': 0,
'no-mixed-operators': 0,
'no-nested-ternary': 0,
'no-new-object': 1,
'no-plusplus': 0,
'no-restricted-syntax': 0,
'no-trailing-spaces': 1,
'no-underscore-dangle': 0,
'no-unneeded-ternary': 1,
'no-whitespace-before-property': 1,
'object-curly-spacing': [1, 'always'],
'one-var': [1, 'never'],
'padded-blocks': [1, 'never'],
'quote-props': [1, 'as-needed'],
'quotes': [1, 'single'],
'semi': [1, 'never'],
'space-before-blocks': [1, 'always'],
'space-before-function-paren': [1, { anonymous: 'never', named: 'never', asyncArrow: 'never' }],
'space-in-parens': [1, 'never'],
'space-infix-ops': 1,
'spaced-comment': [1, 'always'],
'arrow-body-style': 0,
'arrow-parens': [1, 'always'],
'arrow-spacing': [1, { before: true, 'after': true }],
'no-confusing-arrow': 0,
'no-dupe-class-members': 1,
'no-duplicate-imports': 0,
'no-useless-constructor': 1,
'no-var': 1,
'object-shorthand': 0,
'prefer-const': 1,
'prefer-rest-params': 1,
'prefer-spread': 1,
'prefer-template': 0,
'template-curly-spacing': [1, 'never']
}
}
<|start_filename|>static/manifest.json<|end_filename|>
{
"manifest_version": 2,
"name": "Notedown",
"version": "0.1",
"content_security_policy": "script-src 'self' https://apis.google.com; object-src 'self'",
"chrome_url_overrides":
{
"newtab": "index.html"
}
}
<|start_filename|>config/webpack.dev.js<|end_filename|>
const merge = require('webpack-merge')
const webpackCommonConfig = require('./webpack.common.js')
const webpack = require('webpack')
const ip = require('ip')
module.exports = merge(
webpackCommonConfig,
{
devServer:
{
contentBase: './dist'
},
plugins:
[
new webpack.HotModuleReplacementPlugin()
]
}
)
<|start_filename|>src/javascript/GoogleDriveAPI.js<|end_filename|>
import EventEmitter from './EventEmitter.js'
const CLIENT_ID = process.env.GOOGLE_CLIENT_ID
const API_KEY = process.env.GOOGLE_API_KEY
const DISCOVERY_DOCS = ['https://www.googleapis.com/discovery/v1/apis/drive/v3/rest']
const SCOPES = 'https://www.googleapis.com/auth/drive.file'
const FILE_NAME = 'notedown'
export default class GoogleDriveAPI extends EventEmitter
{
constructor()
{
super()
this.gapi = window.gapi
this.authInstance = null
this.isReady = false
this.file = null
// Init the google API
this.gapi
.load('client:auth2', () =>
{
this.gapi.client
.init({
apiKey: API_KEY,
clientId: CLIENT_ID,
discoveryDocs: DISCOVERY_DOCS,
scope: SCOPES
})
.catch(() =>
{
this.trigger('errorInit')
})
.then(() =>
{
this.isReady = true
this.authInstance = this.gapi.auth2.getAuthInstance()
this.setButtons()
this.authInstance.isSignedIn.listen((isSignedIn) =>
{
this.isSignedIn = isSignedIn
this.trigger('signedInUpdate')
this.signInStatusUpdate()
})
this.isSignedIn = this.authInstance.isSignedIn.get()
this.signInStatusUpdate()
this.trigger('apiReady')
})
this.trigger('apiLoaded')
})
}
/**
* Set buttons
*/
setButtons()
{
// Retrieve buttons
this.$signInButton = document.querySelector('.js-sign-in')
this.$signOutButton = document.querySelector('.js-sign-out')
// Listen to clicks
this.$signInButton.addEventListener('click', (event) =>
{
event.preventDefault()
this.authInstance.signIn()
})
this.$signOutButton.addEventListener('click', (event) =>
{
event.preventDefault()
this.authInstance.signOut()
})
}
/**
* Sign in status update
*/
signInStatusUpdate()
{
if(this.isSignedIn)
{
// Update buttons
this.$signInButton.style.display = 'none'
this.$signOutButton.style.display = 'block'
// Fetch files
this.gapi.client.drive.files
.list({
// spaces: 'appDataFolder',
fields: 'nextPageToken, files(id, name, webContentLink)',
pageSize: 100
})
.execute((response) =>
{
// Error
if(response.error)
{
this.authInstance.signIn().then(() =>
{
this.isSignedIn = this.authInstance.isSignedIn.get()
this.trigger('signedInUpdate')
this.signInStatusUpdate()
})
this.trigger('errorList')
}
// No error
else
{
const file = response.files.find((file) => file.name === FILE_NAME)
// File doesn't exist
// Create
if(!file)
{
this.create()
}
// File exist
// Fetch
else
{
this.file = file
// Download
this.fetch()
}
}
})
}
else
{
this.$signInButton.style.display = 'block'
this.$signOutButton.style.display = 'none'
}
}
/**
* Create
*/
create()
{
// Not ready
if(!this.isReady)
{
return
}
const boundary = '-------314159265358979323846'
const delimiter = `\r\n--${boundary}\r\n`
const closeDelim = `\r\n--${boundary}--`
const contentType = 'text/plain'
const metadata = {
name: FILE_NAME,
parents: [],
mimeType: contentType
}
const multipartRequestBody = `${delimiter}Content-Type: application/json\r\n\r\n${JSON.stringify(metadata)}${delimiter}Content-Type: ${contentType}\r\n\r\n${''}${closeDelim}`
const request = this.gapi.client.request({
path: '/upload/drive/v3/files',
method: 'POST',
params:
{
uploadType: 'multipart'
},
headers:
{
'Content-Type': `multipart/related; boundary="${boundary}"`
},
body: multipartRequestBody
})
this.trigger('startCreate')
request.execute((result) =>
{
// Error
if(!result)
{
this.trigger('errorCreate')
}
// No error
else
{
this.file = result
this.trigger('endCreate')
}
})
}
/**
* Update file using request
*/
update(data = '')
{
// Not ready
if(!this.isReady)
{
return
}
// No file
if(!this.file)
{
return false
}
const boundary = '-------314159265358979323846'
const delimiter = `\r\n--${boundary}\r\n`
const closeDelim = `\r\n--${boundary}--`
const contentType = 'text/plain';
const metadata = {
parents: [],
mimeType: contentType
}
const multipartRequestBody = `${delimiter}Content-Type: application/json\r\n\r\n${JSON.stringify(metadata)}${delimiter}Content-Type: ${contentType}\r\n\r\n${data}${closeDelim}`
const request = this.gapi.client.request({
path: '/upload/drive/v3/files/' + this.file.id,
method: 'PATCH',
params:
{
uploadType: 'multipart'
},
headers:
{
'Content-Type': `multipart/related; boundary="${boundary}"`
},
body: multipartRequestBody
})
this.trigger('startUpdate')
request.execute((result) =>
{
// Error
if(!result)
{
this.trigger('errorUpdate')
}
// No error
else
{
this.file = result
this.trigger('endUpdate')
}
})
}
/**
* Fetch
*/
fetch()
{
// Not ready
if(!this.isReady)
{
return false
}
// No file
if(!this.file)
{
return false
}
// Retrieve token
const accessToken = this.authInstance.currentUser.get().getAuthResponse().access_token
// Start fetching
this.trigger('startFetch')
fetch(
`https://www.googleapis.com/drive/v3/files/${this.file.id}?alt=media`,
{
method: 'get',
headers:
{
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/x-www-form-urlencoded'
},
})
.then((response) =>
{
if(response.status === 200)
{
return response.text()
}
else
{
this.authInstance.signIn()
console.log('isSignedIn', this.authInstance.isSignedIn.get())
throw 'Response status != 200'
}
})
.catch((error) =>
{
console.log('error', error)
this.trigger('errorFetch')
})
.then((result) =>
{
if(result)
{
this.trigger('endFetch', [result])
}
})
}
}
<|start_filename|>config/webpack.prod.js<|end_filename|>
const path = require('path')
const merge = require('webpack-merge')
const webpackCommonConfig = require('./webpack.common.js')
const UglifyJSPlugin = require('uglifyjs-webpack-plugin')
const CleanWebpackPlugin = require('clean-webpack-plugin')
module.exports = merge(
webpackCommonConfig,
{
plugins:
[
new CleanWebpackPlugin(['build'], { root: path.resolve(__dirname, '..') }),
new UglifyJSPlugin()
]
}
)
<|start_filename|>src/javascript/EventEmitter.js<|end_filename|>
export default class EventEmitter
{
/**
* Constructor
*/
constructor()
{
this.callbacks = {}
this.callbacks.base = {}
}
/**
* On
*/
on(names, callback)
{
// Errors
if(typeof names === 'undefined' || names === '')
{
console.warn('wrong names')
return false
}
if(typeof callback === 'undefined')
{
console.warn('wrong callback')
return false
}
// Resolve names
names = this.resolve_names(names)
// Each name
for(name of names)
{
// Resolve name
let resolved_name = this.resolve_name(name)
// Create namespace if not exist
if(!(this.callbacks[ resolved_name.namespace ] instanceof Object))
this.callbacks[ resolved_name.namespace ] = {}
// Create callback if not exist
if(!(this.callbacks[ resolved_name.namespace ][ resolved_name.value ] instanceof Array))
this.callbacks[ resolved_name.namespace ][ resolved_name.value ] = []
// Add callback
this.callbacks[ resolved_name.namespace ][ resolved_name.value ].push(callback)
}
return this
}
/**
* Off
*/
off(names)
{
// Errors
if(typeof names === 'undefined' || names === '')
{
console.warn('wrong name')
return false
}
// Resolve names
names = this.resolve_names(names)
// Each name
for(name of names)
{
// Resolve name
let resolved_name = this.resolve_name(name)
// Remove namespace
if(resolved_name.namespace !== 'base' && resolved_name.value === '')
{
delete this.callbacks[ resolved_name.namespace ]
}
// Remove specific callback in namespace
else
{
// Default
if(resolved_name.namespace === 'base')
{
// Try to remove from each namespace
for(let namespace in this.callbacks)
{
if(this.callbacks[ namespace ] instanceof Object && this.callbacks[ namespace ][ resolved_name.value ] instanceof Array)
{
delete this.callbacks[ namespace ][ resolved_name.value ]
// Remove namespace if empty
if(Object.keys(this.callbacks[ namespace ]).length === 0)
delete this.callbacks[ namespace ]
}
}
}
// Specified namespace
else if(this.callbacks[ resolved_name.namespace ] instanceof Object && this.callbacks[ resolved_name.namespace ][ resolved_name.value ] instanceof Array)
{
delete this.callbacks[ resolved_name.namespace ][ resolved_name.value ]
// Remove namespace if empty
if(Object.keys(this.callbacks[ resolved_name.namespace ]).length === 0)
delete this.callbacks[ resolved_name.namespace ]
}
}
}
return this
}
/**
* Trigger
*/
trigger(name, args)
{
// Errors
if(typeof name === 'undefined' || name === '')
{
console.warn('wrong name')
return false
}
let final_result, result
// Default args
if(!(args instanceof Array))
args = []
// Resolve names (should on have one event)
name = this.resolve_names(name)
// Resolve name
name = this.resolve_name(name[ 0 ])
// Default namespace
if(name.namespace === 'base')
{
// Try to find callback in each namespace
for(let namespace in this.callbacks)
{
if(this.callbacks[ namespace ] instanceof Object && this.callbacks[ namespace ][ name.value ] instanceof Array)
{
for(let callback of this.callbacks[ namespace ][ name.value ])
{
result = callback.apply(this, args)
if(typeof final_result === 'undefined')
final_result = result
}
}
}
}
// Specified namespace
else if(this.callbacks[ name.namespace ] instanceof Object)
{
if(name.value === '')
{
console.warn('wrong name')
return this
}
for(let callback of this.callbacks[ name.namespace ][ name.value ])
{
result = callback.apply(this, args)
if(typeof final_result === 'undefined')
final_result = result
}
}
return final_result
}
/**
* Resolve names
*/
resolve_names(names)
{
names = names.replace(/[^a-zA-Z0-9 ,\/.]/g, '')
names = names.replace(/[,\/]+/g, ' ')
names = names.split(' ')
return names
}
/**
* Resolve name
*/
resolve_name(name)
{
let new_name = {}
let parts = name.split('.')
new_name.original = name
new_name.value = parts[ 0 ]
new_name.namespace = 'base' // Base namespace
// Specified namespace
if(parts.length > 1 && parts[ 1 ] !== '')
new_name.namespace = parts[ 1 ]
return new_name
}
}
<|start_filename|>src/javascript/Code.js<|end_filename|>
import CodeMirror from 'codemirror'
import 'codemirror/addon/mode/simple.js'
import 'codemirror/addon/scroll/simplescrollbars.js'
import 'codemirror/addon/scroll/simplescrollbars.css'
import 'codemirror/addon/fold/foldcode.js'
import 'codemirror/addon/fold/foldgutter.js'
import 'codemirror/addon/fold/indent-fold.js'
import 'codemirror/lib/codemirror.css'
import 'codemirror/addon/fold/foldgutter.css'
import EventEmitter from './EventEmitter.js'
export default class Code extends EventEmitter
{
constructor()
{
super()
this.locked = false
this.preventUpdate = false
// Textarea
this.$textarea = document.querySelector('.js-textarea')
// Use code mirror simple mode
CodeMirror.defineSimpleMode(
'simplemode',
{
start:
[
{ regex: /\s*#.+/, sol: true, token: 'title' },
{ regex: /(\[)([√xX])(])(\s)(.+)/, token: [null, 'checked', null, null, 'checked-value'] },
{ regex: /(\[)([!])(])(\s)(.+)/, token: [null, 'danger', null, null, 'danger-value'] },
{ regex: /(\[)([?])(])(\s)(.+)/, token: [null, 'warning', null, null, 'warning-value'] },
{ regex: /\[\s]/, token: '' },
{ regex: /\([^)]+\)/, token: 'parenthesis' },
{ regex: /\[[^\]]+\]/, token: 'brackets' },
{ regex: /(")([^"]+)(")/, token: [null, 'italic', null] },
{ regex: /(\*)([^\*]+)(\*)/, token: [null, 'bold', null] },
{ regex: /(_)([^_]+)(_)/, token: [null, 'underline', null] },
{ regex: /(~)([^~]+)(~)/, token: [null, 'lineThrough', null] },
{ regex: new RegExp(/(?:(?:https?|ftp):\/\/)(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*(?:\.(?:[a-z\u00a1-\uffff]{2,}))\.?)(?::\d{2,5})?(?:[/?#]\S*)?/i), token: 'underline' }
],
comment:
[
{ regex: /.*?\*\//, token: 'comment', next: 'start' },
{ regex: /.*/, token: 'comment' }
],
meta:
{
dontIndentStates: ['comment'],
lineComment: '//'
}
}
)
// Commands
CodeMirror.commands.swapLineUp = (cm) =>
{
if(cm.isReadOnly())
{
return CodeMirror.Pass
}
const ranges = cm.listSelections()
const linesToMove = []
let at = cm.firstLine() - 1
const newSels = []
for(var i = 0; i < ranges.length; i++)
{
const range = ranges[i]
const from = range.from().line - 1
let to = range.to().line
newSels.push({
anchor: CodeMirror.Pos(range.anchor.line - 1, range.anchor.ch),
head: CodeMirror.Pos(range.head.line - 1, range.head.ch)
})
if(range.to().ch == 0 && !range.empty())
{
--to
}
if(from > at)
{
linesToMove.push(from, to)
}
else if (linesToMove.length)
{
linesToMove[linesToMove.length - 1] = to
}
at = to
}
cm.operation(() =>
{
for (let i = 0; i < linesToMove.length; i += 2)
{
const from = linesToMove[i], to = linesToMove[i + 1]
const line = cm.getLine(from)
cm.replaceRange('', CodeMirror.Pos(from, 0), CodeMirror.Pos(from + 1, 0), '+swapLine')
if(to > cm.lastLine())
{
cm.replaceRange('\n' + line, CodeMirror.Pos(cm.lastLine()), null, '+swapLine')
}
else
{
cm.replaceRange(line + '\n', CodeMirror.Pos(to, 0), null, '+swapLine')
}
}
cm.setSelections(newSels)
cm.scrollIntoView()
})
}
CodeMirror.commands.swapLineDown = (cm) =>
{
if(cm.isReadOnly())
{
return CodeMirror.Pass
}
const ranges = cm.listSelections()
const linesToMove = []
let at = cm.lastLine() + 1
for(let i = ranges.length - 1; i >= 0; i--)
{
const range = ranges[i]
let from = range.to().line + 1
const to = range.from().line
if(range.to().ch == 0 && !range.empty())
{
from--
}
if (from < at)
{
linesToMove.push(from, to)
}
else if(linesToMove.length)
{
linesToMove[linesToMove.length - 1] = to
}
at = to
}
cm.operation(() =>
{
for(let i = linesToMove.length - 2; i >= 0; i -= 2)
{
const from = linesToMove[i], to = linesToMove[i + 1]
const line = cm.getLine(from)
if(from == cm.lastLine())
{
cm.replaceRange('', CodeMirror.Pos(from - 1), CodeMirror.Pos(from), '+swapLine')
}
else
{
cm.replaceRange('', CodeMirror.Pos(from, 0), CodeMirror.Pos(from + 1, 0), '+swapLine')
cm.replaceRange(line + '\n', CodeMirror.Pos(to, 0), null, '+swapLine')
}
}
cm.scrollIntoView()
})
}
// Update key mapping
const duplicateLine = (codeMirror) =>
{
const currentLine = codeMirror.getCursor().line
codeMirror.replaceRange(`${codeMirror.getLine(currentLine)}\n`, { line: currentLine, ch: 0 })
}
const save = (codeMirror) =>
{
this.trigger('save')
}
CodeMirror.keyMap.default['Alt-Up'] = 'swapLineUp'
CodeMirror.keyMap.default['Alt-Down'] = 'swapLineDown'
CodeMirror.keyMap.default['Shift-Tab'] = 'indentLess'
CodeMirror.keyMap.default['Shift-Cmd-D'] = duplicateLine
CodeMirror.keyMap.default['Shift-Ctrl-D'] = duplicateLine
CodeMirror.keyMap.default['Cmd-S'] = save
CodeMirror.keyMap.default['Ctrl-S'] = save
// Set code mirror
this.codeMirror = CodeMirror.fromTextArea(
this.$textarea,
{
// lineNumbers: true,
foldGutter:
{
rangeFinder: CodeMirror.fold.indent
},
gutters: ['CodeMirror-linenumbers', 'CodeMirror-foldgutter'],
scrollbarStyle: 'simple',
lineWrapping: true,
indentUnit: 4
}
)
this.codeMirror.on('change', () =>
{
// Don't prevent update
if(!this.preventUpdate)
{
// Trigger
this.trigger('update')
// Wait a few time then trigger the throttle update
if(this.updateTimeout)
{
window.clearTimeout(this.updateTimeout)
this.updateTimeout = null
}
this.updateTimeout = window.setTimeout(() =>
{
this.trigger('throttleUpdate')
this.updateTimeout = null
}, 1000)
}
// Reset prevent update
this.preventUpdate = false
})
this.updateTimeout = null
}
/**
* Set value
*/
setValue(value, preventUpdate = false)
{
this.preventUpdate = preventUpdate
this.codeMirror.setValue(value)
}
/**
* Lock to prevent modification
*/
lock()
{
this.codeMirror.setOption('readOnly', true)
this.codeMirror.display.wrapper.classList.add('locked')
this.locked = true
}
/**
* Unlock to prevent modification
*/
unlock()
{
this.codeMirror.setOption('readOnly', false)
this.codeMirror.display.wrapper.classList.remove('locked')
this.locked = false
}
}
<|start_filename|>config/webpack.common.js<|end_filename|>
const path = require('path')
const HtmlWebpackPlugin = require('html-webpack-plugin')
const ExtractTextPlugin = require('extract-text-webpack-plugin')
const CopyWebpackPlugin = require('copy-webpack-plugin')
const DotenvWebpack = require('dotenv-webpack')
const extractSass = new ExtractTextPlugin({
filename: 'css/[name].[contenthash].css',
disable: process.env.NODE_ENV === 'development'
})
module.exports = {
entry: './src/index.js',
devtool: 'source-map',
node:
{
fs: 'empty'
},
devServer:
{
contentBase: path.resolve(__dirname, '../build'),
hot: true
},
plugins:
[
new HtmlWebpackPlugin({
template: 'src/index.html'
}),
extractSass,
new CopyWebpackPlugin([{ from: path.resolve(__dirname, '../static'), to: 'static' }]),
new DotenvWebpack({
path: path.resolve(__dirname, '../.env')
})
],
output:
{
filename: 'js/bundle.[hash].js',
path: path.resolve(__dirname, '../build')
},
module:
{
rules:
[
{
test: /\.js$/,
exclude: /(node_modules|bower_components)/,
use:
{
loader: 'babel-loader',
options:
{
presets: ['env']
}
}
},
{
test: /\.sass$/,
use: extractSass.extract({
use: [
{
loader: 'css-loader'
},
{
loader: 'sass-loader'
}
],
fallback: 'style-loader'
})
},
{
test: /\.css$/,
use: extractSass.extract({
use: [
{
loader: 'css-loader'
}
],
fallback: 'style-loader'
})
},
{
test: /\.(png|svg|jpg|gif)$/,
use:
{
loader: 'file-loader',
options:
{
name: 'images/[name].[hash].[ext]'
}
}
},
{
test: /\.(woff2|woff|eot|ttf|otf)$/,
use:
{
loader: 'file-loader',
options:
{
name: 'fonts/[name].[hash].[ext]'
}
}
}
]
}
}
<|start_filename|>src/javascript/Controller.js<|end_filename|>
export default class Controller
{
/**
* Constructor
*/
constructor()
{
this.shouldOpen = false
this.canOpen = true
this.opened = false
this.$container = document.querySelector('.js-controller')
this.$area = this.$container.querySelector('.js-area')
this.setPosition()
this.setItems()
this.setDesktopGesture()
this.setLoop()
}
/**
* Set position
*/
setPosition()
{
this.position = {}
this.position.target = { x: 200, y: 200 }
this.position.current = { x: this.position.target.x, y: this.position.target.y }
this.position.rounded = { x: this.position.target.x, y: this.position.target.y }
this.$container.style.transform = `translateX(${this.position.rounded.x}px) translateY(${this.position.rounded.y}px)`
}
/**
* Set items
*/
setItems()
{
this.items = []
const $items = this.$container.querySelectorAll('.js-item')
let index = 0
for(const $item of $items)
{
const item = {}
item.$element = $item
item.current = {}
item.current.x = 0
item.current.y = 0
item.target = {}
item.target.x = 0
item.target.y = 0
item.rounded = {}
item.rounded.x = 0
item.rounded.y = 0
item.index = index
this.items.push(item)
index++
}
}
/**
* Set desktop gesture
*/
setDesktopGesture()
{
/**
* Container position
*/
const previous = { x: 0, y: 0 }
const mouseDownCallback = (event) =>
{
event.preventDefault()
previous.x = event.clientX
previous.y = event.clientY
window.addEventListener('mousemove', mouseMoveCallback)
window.addEventListener('mouseup', mouseUpCallback)
}
const mouseMoveCallback = (event) =>
{
event.preventDefault()
const offset = {}
offset.x = event.clientX - previous.x
offset.y = event.clientY - previous.y
previous.x = event.clientX
previous.y = event.clientY
this.position.target.x += offset.x
this.position.target.y += offset.y
}
const mouseUpCallback = (event) =>
{
event.preventDefault()
window.removeEventListener('mousemove', mouseMoveCallback)
window.removeEventListener('mouseup', mouseUpCallback)
}
this.$area.addEventListener('mousedown', mouseDownCallback)
/**
* Items
*/
const mouseEnterCallback = () =>
{
this.shouldOpen = true
}
const mouseLeaveCallback = () =>
{
this.shouldOpen = false
}
this.$container.addEventListener('mouseenter', mouseEnterCallback)
this.$container.addEventListener('mouseleave', mouseLeaveCallback)
}
/**
* Set loop
*/
setLoop()
{
const loop = () =>
{
window.requestAnimationFrame(loop)
this.loop()
}
loop()
}
/**
* Loop
*/
loop()
{
/**
* Position
*/
this.position.current.x += (this.position.target.x - this.position.current.x) * 0.2
this.position.current.y += (this.position.target.y - this.position.current.y) * 0.2
const roundedX = Math.round(this.position.current.x * 10) / 10
const roundedY = Math.round(this.position.current.y * 10) / 10
// Verify if value changed
if(roundedX !== this.position.rounded.x || roundedY !== this.position.rounded.y)
{
this.position.rounded.x = roundedX
this.position.rounded.y = roundedY
this.$container.style.transform = `translateX(${this.position.rounded.x}px) translateY(${this.position.rounded.y}px)`
}
/**
* Items
*/
for(const item of this.items)
{
item.current.x += (item.target.x - item.current.x) * 0.2
item.current.y += (item.target.y - item.current.y) * 0.2
const roundedX = Math.round(item.current.x * 10) / 10
const roundedY = Math.round(item.current.y * 10) / 10
// Verify if value changed
if(roundedX !== item.rounded.x || roundedY !== item.rounded.y)
{
item.rounded.x = roundedX
item.rounded.y = roundedY
item.$element.style.transform = `translateX(${item.rounded.x}px) translateY(${item.rounded.y}px)`
}
}
}
/**
* Open
*/
open()
{
if(!this.canOpen)
{
return
}
const segmentAngle = Math.PI * 2 / this.items.length
const radius = 45
for(const item of this.items)
{
item.target.x = Math.sin(segmentAngle * item.index) * 45
item.target.y = Math.cos(segmentAngle * item.index) * 45
}
}
/**
* Close
*/
close()
{
for(const item of this.items)
{
item.target.x = 0
item.target.y = 0
}
}
}
<|start_filename|>src/index.js<|end_filename|>
import 'codemirror/lib/codemirror.css'
import './style/main.sass'
import Codedown from './javascript/Codedown.js'
window.codedown = new Codedown()
<|start_filename|>src/javascript/Logs.js<|end_filename|>
export default class Logs
{
/**
* Constructor
*/
constructor()
{
this.$container = document.querySelector('.js-logs')
this.$messages = this.$container.querySelector('.js-messages')
this.limit = 15
this.$allMessages = []
}
/**
* Add message
*/
addMessage(text = '', type = 'default')
{
// Fetch clock
const date = new Date()
let hours = `${date.getHours()}`
if(hours.length === 1)
{
hours = `0${hours}`
}
let minutes = `${date.getMinutes()}`
if(minutes.length === 1)
{
minutes = `0${minutes}`
}
let seconds = `${date.getSeconds()}`
if(seconds.length === 1)
{
seconds = `0${seconds}`
}
const clock = `${minutes}:${seconds}`
// Create message element
const $message = document.createElement('div')
$message.classList.add('message')
$message.classList.add(type)
$message.innerText = `${clock} - ${text}`
this.$messages.appendChild($message)
this.$allMessages.push($message)
if(this.$allMessages.length > this.limit)
{
const $message = this.$allMessages.shift()
this.$messages.removeChild($message)
}
}
}
| jannickholm/notedown |
<|start_filename|>src/main/java/org/robotframework/javalib/keyword/CollisionKeyword.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.keyword;
import java.util.List;
import java.util.Map;
/**
* If a keyword name collision is detected, an instance of this class will
* be associated with the colliding name. Execution of this keyword will
* notify the user of the collision and the two classes that have the same
* keyword name.
*/
public class CollisionKeyword implements Keyword {
private String implementingClassName1;
private String implementingClassName2;
/**
* Creates a collision keyword
*
* @param implementingClassName1 name of first class with colliding
* keyword name
* @param implementingClassName2 name of second class with colliding
* keyword name
*/
public CollisionKeyword(String implementingClassName1, String implementingClassName2) {
this.implementingClassName1 = implementingClassName1;
this.implementingClassName2 = implementingClassName2;
}
/**
* Throws a {@link KeywordNameCollisionException} with an error
* message notifying the user of the collision and classes causing
* the collision.
*
* @throws KeywordNameCollisionException always throws this exception
*/
public Object execute(List arguments, Map kwargs) {
throw new KeywordNameCollisionException("Two keywords with same name not allowed. Alternative implementations available from " + implementingClassName1 + " and " + implementingClassName2 + ".");
}
@Override
public List<String> getArgumentTypes() {
// TODO Auto-generated method stub
return null;
}
}
<|start_filename|>src/main/java/org/robotframework/javalib/keyword/KeywordMap.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.keyword;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections4.functors.NotNullPredicate;
import org.apache.commons.collections4.functors.TruePredicate;
import org.apache.commons.collections4.functors.UniquePredicate;
import org.apache.commons.collections4.map.HashedMap;
import org.apache.commons.collections4.map.PredicatedMap;
/**
* A data structure for keywords and related values, such as instances or
* class names. Keyword names must be unique after normalization. Names
* and values have to be non null.
*/
public class KeywordMap {
private Map map;
public KeywordMap() {
map = new HashedMap();
map = PredicatedMap.predicatedMap(map, UniquePredicate.uniquePredicate(), TruePredicate.INSTANCE);
map = PredicatedMap.predicatedMap(map, NotNullPredicate.INSTANCE, NotNullPredicate.INSTANCE);
}
/**
* Adds a keyword to the map. Name will be normalized.
*
* @param keywordName name to be added
* @param value associated value
*/
public void add(String keywordName, Object value) {
map.put(normalizeKeywordName(keywordName), value);
}
/**
* Gets the value associated with given keyword name. Keyword name
* is normalized before searching.
*
* @param keywordName keyword name
* @return associated value
*/
public Object get(String keywordName) {
return map.get(normalizeKeywordName(keywordName));
}
/**
* Normalizes a keyword name. Removes spaces and special characters.
* Converts all letters to lower case.
*
* @param keywordName keyword name
* @return normalized keyword name
*/
public static String normalizeKeywordName(String keywordName) {
if (keywordName == null) {
return null;
}
keywordName = keywordName.toLowerCase().trim();
keywordName = keywordName.replaceAll(" ", "");
keywordName = keywordName.replaceAll("_", "");
keywordName = keywordName.replaceAll("\t", "");
keywordName = keywordName.replaceAll("\r", "");
keywordName = keywordName.replaceAll("\n", "");
return keywordName;
}
/**
* Amount of pairs in map
*
* @return amount of pairs in map
*/
public int size() {
return map.size();
}
/**
* Returns the keyword names. Similar to {@link Map#keySet()}.
*
* @return array of keyword names
*/
public List<String> getKeywordNames() {
return new ArrayList<String>(map.keySet());
}
/**
* Checks whether map contains a pair with given keyword name
*
* @param keywordName keyword name
* @return true if pair exists, false otherwise
*/
public boolean containsKeyword(String keywordName) {
return map.containsKey(normalizeKeywordName(keywordName));
}
/**
* Returns the underlying Map instance.
*
* @return underlying predicated HashedMap
*/
protected Map getUnderlyingMap() {
return map;
}
}
<|start_filename|>src/main/java/org/robotframework/javalib/library/AnnotationLibrary.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.library;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.robotframework.javalib.annotation.Autowired;
import org.robotframework.javalib.beans.annotation.AnnotationBasedKeywordFilter;
import org.robotframework.javalib.beans.annotation.IBeanLoader;
import org.robotframework.javalib.beans.annotation.KeywordBeanLoader;
import org.robotframework.javalib.beans.common.IClassFilter;
import org.robotframework.javalib.factory.AnnotationKeywordFactory;
import org.robotframework.javalib.factory.KeywordFactory;
import org.robotframework.javalib.keyword.DocumentedKeyword;
public class AnnotationLibrary extends KeywordFactoryBasedLibrary<DocumentedKeyword> implements
KeywordDocumentationRepository {
protected List<IBeanLoader> beanLoaders = new ArrayList<IBeanLoader>();
protected IClassFilter classFilter = new AnnotationBasedKeywordFilter();
private KeywordFactory<DocumentedKeyword> keywordFactory;
public AnnotationLibrary() {
}
public AnnotationLibrary(String keywordPattern) {
addKeywordPattern(keywordPattern);
}
public AnnotationLibrary(List<String> keywordPatterns) {
for (String pattern : keywordPatterns) {
addKeywordPattern(pattern);
}
}
@Override
protected KeywordFactory<DocumentedKeyword> createKeywordFactory() {
assumeKeywordPatternIsSet();
if (keywordFactory == null) {
List<Map> keywordBeansMaps = new ArrayList<Map>();
for (IBeanLoader beanLoader : beanLoaders) {
keywordBeansMaps.add(beanLoader.loadBeanDefinitions(classFilter));
}
keywordFactory = new AnnotationKeywordFactory(keywordBeansMaps);
List<Object> injectionValues = new ArrayList<Object>();
injectionValues.add(this);
for (Map keywordBeansMap : keywordBeansMaps) {
injectionValues.addAll(keywordBeansMap.values());
}
for (Object injectionTarget : injectionValues) {
autowireFields(injectionTarget, injectionValues);
}
}
return keywordFactory;
}
protected void autowireFields(Object injectionTarget, Collection<Object> injectionValues) {
Class<?> objectClass = injectionTarget.getClass();
while (objectClass != null) {
Field[] fields = objectClass.getDeclaredFields();
next_field: for (final Field field : fields) {
try {
if (field.isAnnotationPresent(Autowired.class)) {
if ((!Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass()
.getModifiers())) && !field.isAccessible()) {
field.setAccessible(true);
}
Class<?> fieldClass = field.getType();
for (Object injectionValue : injectionValues) {
if (injectionValue.getClass().equals(fieldClass)) {
field.set(injectionTarget, injectionValue);
continue next_field;
}
}
throw new IllegalArgumentException(String.format(
"Can't autowire field '%s' at keyword class '%s'.", field.getName(), injectionTarget
.getClass().getName()));
}
} catch (IllegalAccessException e) {
throw new IllegalArgumentException(String.format(
"Can't autowire field '%s' at keyword class '%s'.", field.getName(), injectionTarget
.getClass().getName()), e);
}
}
objectClass = objectClass.getSuperclass();
}
}
public List<String> getKeywordArguments(String keywordName) {
List<String> argumentNames = createKeywordFactory().createKeyword(keywordName).getArgumentNames();
return argumentNames;
}
/**
* This method should be overridden in the Library implementation including
* the equals comparison for '__intro__'.
*
* Default implementation returns empty String for the '__intro__'.
*/
public String getKeywordDocumentation(String keywordName) {
if (keywordName.equals("__intro__"))
return "";
return createKeywordFactory().createKeyword(keywordName).getDocumentation();
}
@Override
public Object runKeyword(String keywordName, List args, Map kwargs) {
try {
return super.runKeyword(keywordName, args, kwargs);
} catch (RuntimeException e) {
throw retrieveInnerException(e);
}
}
@Override
public Object runKeyword(String keywordName, List args) {
try {
return super.runKeyword(keywordName, args);
} catch (RuntimeException e) {
throw retrieveInnerException(e);
}
}
public void addKeywordPattern(String keywordPattern) {
beanLoaders.add(new KeywordBeanLoader(keywordPattern, Thread.currentThread().getContextClassLoader()));
}
private void assumeKeywordPatternIsSet() {
if (beanLoaders.isEmpty()) {
throw new IllegalStateException("Keyword pattern must be set before calling getKeywordNames.");
}
}
private RuntimeException retrieveInnerException(RuntimeException e) {
Throwable cause = e.getCause();
if (cause != null && InvocationTargetException.class.equals(cause.getClass())) {
Throwable original = cause.getCause();
return new RuntimeException(original.getMessage(), original);
}
return e;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/beans/classpath/InterfaceBasedKeywordFilterTest.java<|end_filename|>
package org.robotframework.javalib.beans.classpath;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.beans.common.IClassFilter;
import org.robotframework.javalib.keyword.EmptyKeyword;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class InterfaceBasedKeywordFilterTest {
private IClassFilter keywordFilter = new InterfaceBasedKeywordFilter();;
@Test
public void testIgnoresClassesThatAreNotKeywords() {
assertFalse(keywordFilter.accept(Object.class));
}
@Test
public void testIdentifiesKeywordClass() {
assertTrue(keywordFilter.accept(EmptyKeyword.class));
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/library/ClassLoadingIntegrationTest.java<|end_filename|>
package org.robotframework.javalib.library;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
public class ClassLoadingIntegrationTest {
private static MockClassLoader mockClassLoader;
@BeforeAll
public static void setUp() {
mockClassLoader = new MockClassLoader();
}
@Test
public void testClassPathLibraryUsesProvidedClassLoaderForKeywordCreation() {
ClassPathLibrary library = createClassPathLibraryWithMockClassLoader();
library.runKeyword("Empty Keyword", null);
assertClassWasLoaded("org.robotframework.javalib.keyword.EmptyKeyword");
}
@Test
public void testClassPathLibraryUsesProvidedClassLoaderForKeywordExtraction() {
ClassPathLibrary library = createClassPathLibraryWithMockClassLoader();
library.getKeywordNames();
assertProvidedClassLoaderWasUsedForSearching();
}
private void assertProvidedClassLoaderWasUsedForSearching() {
assertTrue(mockClassLoader.searchedResources.contains("org/"));
}
private void assertClassWasLoaded(String expectedClassToBeLoaded) {
if (mockClassLoader.loadedClasses.size() < 1) {
fail("0 classes loaded through custom class loader");
}
assertTrue(mockClassLoader.loadedClasses.contains(expectedClassToBeLoaded));
}
private ClassPathLibrary createClassPathLibraryWithMockClassLoader() {
ClassPathLibrary library = new ClassPathLibrary();
library.setKeywordPattern("org/**/keyword/**.class");
library.setClassLoader(mockClassLoader);
return library;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/autowired/AnnotatedAutowiredLibrary.java<|end_filename|>
package org.robotframework.javalib.autowired;
import java.util.List;
import org.robotframework.javalib.annotation.Autowired;
import org.robotframework.javalib.library.AnnotationLibrary;
public class AnnotatedAutowiredLibrary extends AnnotationLibrary {
@Autowired
private AnnotatedAutowiredKeywords1 annotatedAutowiredKeywords1;
public AnnotatedAutowiredKeywords1 getAnnotatedAutowiredKeywords1() {
return annotatedAutowiredKeywords1;
}
@Autowired
private AnnotatedAutowiredKeywords2 annotatedAutowiredKeywords2;
public AnnotatedAutowiredKeywords2 getAnnotatedAutowiredKeywords2() {
return annotatedAutowiredKeywords2;
}
@Autowired
private AnnotatedAutowiredLibrary annotatedAutowiredLibrary;
public AnnotatedAutowiredLibrary getAnnotatedAutowiredLibrary() {
return annotatedAutowiredLibrary;
}
public AnnotatedAutowiredLibrary() {
}
public AnnotatedAutowiredLibrary(String keywordPattern) {
super(keywordPattern);
}
public AnnotatedAutowiredLibrary(List<String> keywordPatterns) {
super(keywordPatterns);
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/keyword/CollisionKeywordTest.java<|end_filename|>
package org.robotframework.javalib.keyword;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class CollisionKeywordTest {
@Test
public void testExecutionThrowsException() {
String implementingClassName1 = "class org.robotframework.somecomponent.ImplementingClass";
String implementingClassName2 = "class org.robotframework.othercomponent.OtherImplementingClass";
CollisionKeyword collisionKeyword = new CollisionKeyword(implementingClassName1, implementingClassName2);
KeywordNameCollisionException e = assertThrows(KeywordNameCollisionException.class, () -> collisionKeyword.execute(null, null));
assertEquals("Two keywords with same name not allowed. Alternative implementations available from " + implementingClassName1 + " and " + implementingClassName2 + ".", e.getMessage());
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/keyword/ArgumentCheckingKeywordTest.java<|end_filename|>
package org.robotframework.javalib.keyword;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
public class ArgumentCheckingKeywordTest {
private RecordingAbstractKeyword fakeKeyword;
@BeforeEach
public void setupTest() {
this.fakeKeyword = new RecordingAbstractKeyword();
}
@Test
public void testExecuteDelegatesToOperate() {
fakeKeyword.execute(Arrays.asList(), null);
assertTrue(fakeKeyword.wasDelegatedToOperate);
}
@Test
public void testExecutePassesArgumentsToOperate() {
List args = Arrays.asList("argument1", "argument2");
fakeKeyword.expectedArgumentCount = 2;
fakeKeyword.execute(args, null);
assertEquals(args, fakeKeyword.arguments);
}
@Test
public void testExecutePassesReturnValueFromOperate() {
fakeKeyword.returnValue = "My Return Value";
assertEquals("My Return Value", fakeKeyword.execute(Arrays.asList(), null));
}
private class RecordingAbstractKeyword extends PreparableKeyword {
boolean wasDelegatedToOperate;
int expectedArgumentCount;
Object returnValue;
List arguments;
protected Object operate(List arguments) {
this.arguments = arguments;
wasDelegatedToOperate = true;
return returnValue;
}
public int getExpectedArgumentCount() {
return expectedArgumentCount;
}
public Object execute(List args, Map kwargs) {
return super.execute(args, kwargs);
}
@Override
public List<String> getArgumentTypes() {
// TODO Auto-generated method stub
return null;
}
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/keyword/KeywordMapTest.java<|end_filename|>
package org.robotframework.javalib.keyword;
import java.util.Arrays;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
public class KeywordMapTest {
private KeywordMap map;
private String keywordName = "My Keyword";
private String keywordValue = "Value";
@BeforeEach
protected void setUp() {
map = new KeywordMap();
}
@Test
public void testAddsKeywordsToMap() {
map.add(keywordName, keywordValue);
assertEquals(1, map.size());
}
@Test
public void testGetsValueUsingKeywordNameAsKey() {
map.add(keywordName, keywordValue);
assertEquals(keywordValue, map.get(keywordName));
}
@Test
public void testStoredKeywordNamesAreUnique() {
map.add(keywordName, "");
assertThrows(IllegalArgumentException.class, () -> map.add(keywordName, ""));
}
@Test
public void testNullKeywordNamesAreNotAllowed() {
assertThrows(IllegalArgumentException.class, () -> map.add(null, ""));
}
@Test
public void testNullKeywordValuesAreNotAllowed() {
assertThrows(IllegalArgumentException.class, () -> map.add("", null));
}
@Test
public void testNormalizesKeywordNames() {
map.add("Keyword Name", "");
assertTrue(map.getUnderlyingMap().containsKey("keywordname"));
}
@Test
public void testCanReturnsArrayOfKeywordNames() {
map.add("First Keyword", "");
map.add("Second Keyword", "");
String[] keywordNames = map.getKeywordNames().toArray(new String[0]);
assertTrue(Arrays.equals(new String[] { "firstkeyword", "secondkeyword" }, keywordNames));
}
@Test
public void testCanBeQueriedForContainedKeywords() {
map.add(keywordName, keywordValue);
assertTrue(map.containsKeyword(keywordName));
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/autowired/AnnotatedAutowiredKeywords1.java<|end_filename|>
package org.robotframework.javalib.autowired;
import org.robotframework.javalib.annotation.Autowired;
import org.robotframework.javalib.annotation.RobotKeywords;
@RobotKeywords
public class AnnotatedAutowiredKeywords1 {
@Autowired
private AnnotatedAutowiredKeywords1 annotatedAutowiredKeywords1;
public AnnotatedAutowiredKeywords1 getAnnotatedAutowiredKeywords1() {
return annotatedAutowiredKeywords1;
}
@Autowired
private AnnotatedAutowiredKeywords2 annotatedAutowiredKeywords2;
public AnnotatedAutowiredKeywords2 getAnnotatedAutowiredKeywords2() {
return annotatedAutowiredKeywords2;
}
@Autowired
private AnnotatedAutowiredLibrary annotatedAutowiredLibrary;
public AnnotatedAutowiredLibrary getAnnotatedAutowiredLibrary() {
return annotatedAutowiredLibrary;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/beans/common/AllAcceptingFilter.java<|end_filename|>
package org.robotframework.javalib.beans.common;
import org.robotframework.javalib.beans.common.IClassFilter;
public class AllAcceptingFilter implements IClassFilter {
public boolean accept(Class clazz) {
return true;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/factory/AnnotationKeywordFactoryTest.java<|end_filename|>
package org.robotframework.javalib.factory;
import java.util.*;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.beans.annotation.IKeywordExtractor;
import org.robotframework.javalib.keyword.DocumentedKeyword;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
import static org.mockito.Mockito.*;
public class AnnotationKeywordFactoryTest {
private static Object someKeywordBean = new Object();
private static Object anotherKeywordBean = new Object();
private static DocumentedKeyword keyword1 = mock(DocumentedKeyword.class);
private static DocumentedKeyword keyword2 = mock(DocumentedKeyword.class);
private List expectedKeywordNames = Arrays.asList("keywordname1", "keywordname2", "keywordname3", "keywordname4" );
private static KeywordFactory<DocumentedKeyword> keywordFactory;
private static IKeywordExtractor keywordExtractor;
private static Map keywordBeans = new HashMap() {
@Override
public Collection values() {
return new HashSet() {{ add(someKeywordBean); add(anotherKeywordBean); }};
}
};
@BeforeAll
public static void setUp() {
keywordExtractor = spy(IKeywordExtractor.class);
when(keywordExtractor.extractKeywords(someKeywordBean)).thenReturn(new HashMap() {{
put("keywordname1", keyword1);
put("keywordname2", keyword2);
}});
when(keywordExtractor.extractKeywords(anotherKeywordBean)).thenReturn(new HashMap() {{
put("keywordname3", null);
put("keywordname4", null);
}});
keywordFactory = new AnnotationKeywordFactory(keywordBeans) {
@Override
IKeywordExtractor createKeywordExtractor() {
return keywordExtractor;
}
};
}
@Test
public void testExtractsKeywordNamesFromKeywordBeans() {
List keywordNames = keywordFactory.getKeywordNames();
keywordNames.sort(Comparator.naturalOrder());
assertIterableEquals(expectedKeywordNames, keywordNames);
}
@Test
public void testExtractsKeywordsFromKeywordBeansWithNormalizedName() {
String keywordName1 = "Keyword Name 1";
String keywordName2 = "KEYWORD_NAME_2";
assertEquals(keyword1, keywordFactory.createKeyword(keywordName1));
assertEquals(keyword2, keywordFactory.createKeyword(keywordName2));
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/util/StdOutAndErrRedirecterTest.java<|end_filename|>
package org.robotframework.javalib.util;
import org.junit.jupiter.api.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
@TestInstance(PER_CLASS)
public class StdOutAndErrRedirecterTest {
private String emptyString = "";
private String logOutput = "StdOutAndErrRedirecterTest";
private StdStreamRedirecter streamRedirecter;
@BeforeEach
public void setUp() {
this.streamRedirecter = new StdStreamRedirecter();
this.streamRedirecter.redirectStdStreams();
}
@AfterEach
public void tearDown() {
streamRedirecter.resetStdStreams();
}
@Test
public void testRedirectsSystemOutToInternalBuffer() {
System.out.print(logOutput);
assertEquals(logOutput, streamRedirecter.getStdOutAsString());
}
@Test
public void testRedirectsSystemErrToInternalBuffer() {
System.err.print(logOutput);
assertEquals(logOutput, streamRedirecter.getStdErrAsString());
}
@Test
public void testResettingStreamsRedirectsSystemOutBackToSystemOut() {
streamRedirecter.resetStdStreams();
assertEquals(System.out, streamRedirecter.stdOut);
}
@Test
public void testResettingStreamsRedirectsSystemErrBackToSystemOut() {
streamRedirecter.resetStdStreams();
assertEquals(System.err, streamRedirecter.stdErr);
}
@Test
public void testGettingSystemOutEmptiesTheBuffer() {
System.out.print(logOutput);
streamRedirecter.getStdOutAsString();
assertEquals(emptyString, streamRedirecter.getStdOutAsString());
}
@Test
public void testGettingSystemErrEmptiesTheBuffer() {
System.err.print(logOutput);
streamRedirecter.getStdErrAsString();
assertEquals(emptyString, streamRedirecter.getStdErrAsString());
}
}
<|start_filename|>src/main/java/org/robotframework/javalib/reflection/KeywordInvoker.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.reflection;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.robotframework.javalib.annotation.ArgumentNames;
import org.robotframework.javalib.annotation.RobotKeyword;
public class KeywordInvoker implements IKeywordInvoker {
private final Method method;
private final Object obj;
public KeywordInvoker(Object obj, Method method) {
this.obj = obj;
this.method = method;
}
public List<String> getParameterNames() {
if (method.isAnnotationPresent(ArgumentNames.class)) {
// We use names stricter way than earlier, so making sure that varargs are marked correctly.
// https://github.com/robotframework/JavalibCore/wiki/AnnotationLibrary#argument-names only recommends
// marking varargs with *
List argumentNames = Arrays.asList(method.getAnnotation(ArgumentNames.class).value());
return argumentNames;
}
return getParameterNamesFromMethod();
}
public List<String> getParameterTypes() {
List<String> parameterTypes = new ArrayList<String>();
for (Class parameterClass : method.getParameterTypes()) {
parameterTypes.add(parameterClass.getSimpleName());
}
return parameterTypes;
}
public Object invoke(List args, Map kwargs) {
try {
List reflectionArgs = createArgumentCollector().collectArguments(args, kwargs);
Object[] reflectionArgsArray = reflectionArgs != null ? reflectionArgs.toArray() : null;
return method.invoke(obj, reflectionArgsArray);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public String getDocumentation() {
return method.getAnnotation(RobotKeyword.class).value();
}
IArgumentCollector createArgumentCollector() {
return new ArgumentCollector(method.getParameterTypes(), getParameterNames());
}
private List<String> getParameterNamesFromMethod() {
List<String> parameterNameList = this.getParameterNamesWithReflection();
// Marking varargs and kwargs correctly for RF
if (method.getParameterCount() > 0) {
int lastParameterIndex = method.getParameterCount() - 1;
Class lastParameterType = method.getParameters()[lastParameterIndex].getType();
if (lastParameterType.equals(List.class)
|| (lastParameterType.isArray() && lastParameterType != byte[].class)) {
parameterNameList.set(lastParameterIndex, "*" + parameterNameList.get(lastParameterIndex));
} else if (method.getParameters()[lastParameterIndex].getType().equals(Map.class)) {
if (lastParameterIndex > 1
&& (method.getParameters()[lastParameterIndex - 1].getType().equals(List.class)
|| (method.getParameters()[lastParameterIndex - 1].getType().isArray() && method.getParameters()[lastParameterIndex - 1].getType() != byte[].class))) {
parameterNameList.set(lastParameterIndex - 1, "*" + parameterNameList.get(lastParameterIndex - 1));
}
parameterNameList.set(lastParameterIndex, "**" + parameterNameList.get(lastParameterIndex));
}
}
return parameterNameList;
}
private List<String> getParameterNamesWithReflection() {
List<String> parameterNameList = new ArrayList<String>();
for (Parameter parameter : method.getParameters()) {
parameterNameList.add(parameter.getName());
}
return parameterNameList;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/beans/annotation/AnnotationBasedKeywordFilterTest.java<|end_filename|>
package org.robotframework.javalib.beans.annotation;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.beans.common.IClassFilter;
import org.robotframework.javalib.keyword.AnnotatedKeywords;
import static org.junit.jupiter.api.Assertions.*;
public class AnnotationBasedKeywordFilterTest {
private IClassFilter keywordFilter = new AnnotationBasedKeywordFilter();
@Test
public void testIdentifiesAnnotatedKeywordClasses() throws Exception {
assertTrue(keywordFilter.accept(AnnotatedKeywords.class));
}
@Test
public void testIgnoresClassesThatAreNotAnnotated() throws Exception {
assertFalse(keywordFilter.accept(Object.class));
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/beans/common/BasicKeywordFilterTest.java<|end_filename|>
package org.robotframework.javalib.beans.common;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.beans.common.BasicKeywordFilter.Condition;
import org.robotframework.javalib.keyword.CollisionKeyword;
import org.robotframework.javalib.keyword.Keyword;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
public class BasicKeywordFilterTest {
private BasicKeywordFilter keywordFilter = new BasicKeywordFilter();
@Test
public void testIgnoresInterfaces() throws Exception {
assertFalse(keywordFilter.accept(Keyword.class));
}
@Test
public void testIgnoresKeywordsWithoutDefaultConstructor() throws Exception {
assertFalse(keywordFilter.accept(CollisionKeyword.class));
}
@Test
public void testUsesAddedConditions() throws Exception {
Condition conditionSpy = spy(Condition.class);
when(conditionSpy.check(getClass())).thenReturn(false);
keywordFilter.addCondition(conditionSpy);
assertFalse(keywordFilter.accept(getClass()));
}
}
<|start_filename|>src/main/java/org/robotframework/javalib/factory/KeywordFactory.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.factory;
import java.util.List;
import org.robotframework.javalib.keyword.Keyword;
/**
* Creates instances of keywords.
*/
public interface KeywordFactory<T extends Keyword> {
/**
* Creates an instance of the class implementing the given keyword
* name
*
* @param keywordName keyword name (will be normalized, so pretty much
* any formatting will do)
* @return keyword instance
*/
T createKeyword(String keywordName);
/**
* Returns all the names of the keywords that this factory can create
*
* @return names of available keywords
*/
List<String> getKeywordNames();
}
<|start_filename|>src/test/java/org/robotframework/javalib/beans/annotation/AnnotationKeywordExtractorTest.java<|end_filename|>
package org.robotframework.javalib.beans.annotation;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Map;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.annotation.ArgumentNames;
import org.robotframework.javalib.annotation.RobotKeyword;
import org.robotframework.javalib.keyword.DocumentedKeyword;
import static org.junit.jupiter.api.Assertions.*;
public class AnnotationKeywordExtractorTest {
private static boolean keywordWasCalled = false;
private static String keywordWithoutArgumentsExecutionResult = "keyword1ExecutionResult";
private static String keywordWithArgumentsExecutionResult = "keyword2ExecutionResult";
private static Map extractedKeywords;
private static DocumentedKeyword keywordWithArguments;
private static DocumentedKeyword keywordWithoutArguments;
private static DocumentedKeyword keywordWithoutReturnValue;
private static IKeywordExtractor extractor;
@BeforeAll
public static void setUp() {
extractor = new AnnotationKeywordExtractor();
extractedKeywords = extractor.extractKeywords(new MyKeywordsBean());
keywordWithArguments = (DocumentedKeyword) extractedKeywords.get("keywordWithArguments");
keywordWithoutArguments = (DocumentedKeyword) extractedKeywords.get("keywordWithoutArguments");
keywordWithoutReturnValue = (DocumentedKeyword) extractedKeywords.get("keywordWithoutReturnValue");
}
@Test
public void testExtractsCorrectNumberOfKeywordsFromKeywordBean() {
assertEquals(expectedKeywordCount(), extractedKeywords.size());
}
@Test
public void testExtractsKeywordsWithReturnValue() {
assertEquals(keywordWithoutArgumentsExecutionResult, keywordWithoutArguments.execute(null, null));
}
@Test
public void testExtractsKeywordsWithArguments() {
String keywordArgument = "someArgument";
assertEquals(keywordWithArgumentsExecutionResult + keywordArgument, keywordWithArguments.execute(Arrays.asList(keywordArgument), null));
}
@Test
public void testExtractsKeywordsWithoutReturnValue() {
assertNull(keywordWithoutReturnValue.execute(null, null));
assertTrue(keywordWasCalled);
}
@Test
public void testExtractsKeywordDocumentation() {
assertEquals("This is a keyword with arguments", keywordWithArguments.getDocumentation());
assertEquals("This is a keyword without arguments", keywordWithoutArguments.getDocumentation());
assertEquals("This is a keyword without return value", keywordWithoutReturnValue.getDocumentation());
}
@Test
public void testExtractsKeywordArguments() {
assertIterableEquals(Arrays.asList("overridenArgumentName"), keywordWithArguments.getArgumentNames());
}
private int expectedKeywordCount() {
Method[] methods = MyKeywordsBean.class.getMethods();
int keywordCount = 0;
for (Method method : methods) {
if (method.isAnnotationPresent(RobotKeyword.class)) {
++keywordCount;
}
}
return keywordCount;
}
public static class MyKeywordsBean {
@RobotKeyword("This is a keyword without arguments")
public Object keywordWithoutArguments() {
return keywordWithoutArgumentsExecutionResult;
}
@ArgumentNames({"overridenArgumentName"})
@RobotKeyword("This is a keyword with arguments")
public Object keywordWithArguments(String argument) {
return keywordWithArgumentsExecutionResult + argument;
}
@RobotKeyword("This is a keyword without return value")
public void keywordWithoutReturnValue() {
keywordWasCalled = true;
}
@SuppressWarnings("unused")
@RobotKeyword
private void annotatedPrivateMethod() {}
@SuppressWarnings("unused")
private void notAKeyword() {}
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/library/AnnotationLibraryMultipleKeywordsWithSameNameIntegrationTest.java<|end_filename|>
package org.robotframework.javalib.library;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
public class AnnotationLibraryMultipleKeywordsWithSameNameIntegrationTest {
private static AnnotationLibrary annotationLibrary;
private String keywordThatReturnsItsArguments = "keywordThatReturnsItsArguments";
@BeforeAll
public static void setUp() throws Exception {
List<String> searchPaths = new ArrayList<String>();
searchPaths.add("org/robotframework/**/keyword/**/**.class");
searchPaths.add("com/some/**/keyword/**/**.class");
annotationLibrary = new AnnotationLibrary(searchPaths);
}
@Test
public void testFindsAnnotatedKeywordsFromClassPath() {
List keywordNames = annotationLibrary.getKeywordNames();
List expectedKeywordNames = Arrays.asList("failingKeyword", "someKeyword", "overloaded",
keywordThatReturnsItsArguments, "keywordWithVariableArgumentCount", "variousArgs", "defaultValues",
"keywordWithObjectArgument", "getSomeObject", "keywordWithNumericArguments",
"myFailingKeyword", "myKeywordThatReturnsItsArguments", "byteArrayTest", "defaultAndVarargs", "onlyVarargs",
"useInt", "useInteger", "listAsArgument", "mapAsArgument");
keywordNames.sort(Comparator.naturalOrder());
expectedKeywordNames.sort(Comparator.naturalOrder());
assertIterableEquals(keywordNames, expectedKeywordNames);
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/library/KeywordFactoryBasedLibraryTest.java<|end_filename|>
package org.robotframework.javalib.library;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.factory.KeywordFactory;
import org.robotframework.javalib.keyword.Keyword;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
public class KeywordFactoryBasedLibraryTest {
private static KeywordFactoryBasedLibrary<Keyword> library;
private static KeywordFactory keywordFactorySpy;
private static Keyword keywordSpy;
private static String keywordName = "Keyword Name";;
@BeforeAll
public static void setUp() {
keywordSpy = spy(Keyword.class);
keywordFactorySpy = spy(KeywordFactory.class);
when(keywordFactorySpy.createKeyword(keywordName)).thenReturn(keywordSpy);
library = new KeywordFactoryBasedLibrary<Keyword>() {
protected KeywordFactory createKeywordFactory() {
return keywordFactorySpy;
}
};
}
@Test
public void testUsesKeywordFactoryToCreateInstanceOfKeyword() throws Exception {
when(keywordSpy.execute(any(), any())).thenReturn(null);
when(keywordFactorySpy.createKeyword(keywordName)).thenReturn(keywordSpy);
library.runKeyword(keywordName, null);
}
@Test
public void testGetsKeywordNamesFromFactory() throws Exception {
when(keywordFactorySpy.getKeywordNames()).thenReturn(new ArrayList());
library.getKeywordNames();
}
@Test
public void testExecutesKeyword() throws Exception {
List args = Arrays.asList(new Object[0]);
when(keywordSpy.execute(args, null)).thenReturn(any());
library.runKeyword(keywordName, args);
}
@Test
public void testExecutionPassesKeywordReturnValue() throws Exception {
String keywordReturnValue = "Return Value";
when(keywordSpy.execute(null, null)).thenReturn(keywordReturnValue);
assertEquals(keywordReturnValue, library.runKeyword(keywordName, null));
}
@Test
public void testRunningAKeywordCreatesKeywordFactory() throws Exception {
keywordFactorySpyBasedLibrary keywordFactorySpyBasedLibrary = new keywordFactorySpyBasedLibrary();
keywordFactorySpyBasedLibrary.runKeyword(null, null);
assertTrue(keywordFactorySpyBasedLibrary.keywordFactoryWasCreated);
}
@Test
public void testGettingKeywordNamesCreatesKeywordFactory() throws Exception {
keywordFactorySpyBasedLibrary keywordFactorySpyBasedLibrary = new keywordFactorySpyBasedLibrary();
keywordFactorySpyBasedLibrary.getKeywordNames();
assertTrue(keywordFactorySpyBasedLibrary.keywordFactoryWasCreated);
}
@Test
public void testKeywordFactoryIsOnlyCreatedOnce() throws Exception {
keywordFactorySpyBasedLibrary keywordFactorySpyBasedLibrary = new keywordFactorySpyBasedLibrary();
keywordFactorySpyBasedLibrary.getKeywordNames();
assertTrue(keywordFactorySpyBasedLibrary.keywordFactoryWasCreated);
keywordFactorySpyBasedLibrary.keywordFactoryWasCreated = false;
keywordFactorySpyBasedLibrary.getKeywordNames();
assertFalse(keywordFactorySpyBasedLibrary.keywordFactoryWasCreated);
keywordFactorySpyBasedLibrary.keywordFactoryWasCreated = false;
keywordFactorySpyBasedLibrary.runKeyword(null, null);
assertFalse(keywordFactorySpyBasedLibrary.keywordFactoryWasCreated);
}
@Test
public void testDefaultClassLoaderIsThreadContextClassLoader() throws Exception {
assertEquals(Thread.currentThread().getContextClassLoader(), library.getClassLoader());
}
private class keywordFactorySpyBasedLibrary extends KeywordFactoryBasedLibrary<Keyword> {
boolean keywordFactoryWasCreated;
protected KeywordFactory<Keyword> createKeywordFactory() {
keywordFactoryWasCreated = true;
return new KeywordFactory<Keyword>() {
public Keyword createKeyword(String keywordName) {
return new Keyword() {
public Object execute(List arguments, Map kwargs) {
return null;
}
@Override
public List<String> getArgumentTypes() {
// TODO Auto-generated method stub
return null;
}
};
}
public List getKeywordNames() {
return new ArrayList();
}
};
}
}
}
<|start_filename|>src/test/java/my/same/keyword/AnnotatedKeywords.java<|end_filename|>
package my.same.keyword;
import org.opentest4j.AssertionFailedError;
import org.robotframework.javalib.annotation.RobotKeyword;
import org.robotframework.javalib.annotation.RobotKeywords;
@RobotKeywords
public class AnnotatedKeywords {
public static final String __PARANAMER_DATA =
"<init> \n" +
"myFailingKeyword \n";
@RobotKeyword
public void myFailingKeyword() {
throw new AssertionFailedError("Assertion failed");
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/keyword/AnnotatedKeywords.java<|end_filename|>
package org.robotframework.javalib.keyword;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.Assertions;
import org.opentest4j.AssertionFailedError;
import org.robotframework.javalib.annotation.ArgumentNames;
import org.robotframework.javalib.annotation.RobotKeyword;
import org.robotframework.javalib.annotation.RobotKeywordOverload;
import org.robotframework.javalib.annotation.RobotKeywords;
@RobotKeywords
public class AnnotatedKeywords {
public static final String __PARANAMER_DATA = "<init> \n" + "myKeyword \n"
+ "keywordThatReturnsItsArguments java.lang.String arg\n" + "someKeyword java.lang.String someArgument\n"
+ "keywordWithVariableArgumentCount java.lang.String,java.lang.String[] someArgument,restOfTheArguments\n";
@RobotKeyword
public void failingKeyword() {
throw new AssertionFailedError("Assertion failed");
}
@RobotKeyword
public String keywordThatReturnsItsArguments(String arg) {
return arg;
}
@RobotKeyword
@ArgumentNames({ "one", "two=", "three=" })
public Object overloaded(String one, String two, String three) {
return three;
}
@RobotKeywordOverload
public Object overloaded(String one) {
return one;
}
@RobotKeywordOverload
public Object overloaded(String one, int two) {
return two;
}
@ArgumentNames({ "overridenArgumentName" })
@RobotKeyword("Some documentation")
public void someKeyword(String someArgument) {
}
@RobotKeyword("This is a keyword with variable argument count")
public Object[] keywordWithVariableArgumentCount(String someArgument, String... restOfTheArguments) {
return restOfTheArguments;
}
@RobotKeyword
public void variousArgs(String arg, List<String> varargs, Map<String, Object> kwargs) {
System.out.println("arg: " + arg);
for (String varg: varargs)
System.out.println("vararg: " + varg);
for (String key: kwargs.keySet())
System.out.println("kwarg: " + key + " " + kwargs.get(key));
}
@RobotKeyword
@ArgumentNames({ "firstArg", "secondArg=two", "thirdArg=three" })
public String defaultValues(String first, String second, String third) {
return String.format("%s %s %s", first, second, third);
}
@RobotKeyword("This is a keyword with numeric arguments. The keyword will fail unless all are 42.")
public void keywordWithNumericArguments(long l1, Long l2, short s1, Short s2) {
if (l1 != 42 || l2 != 42 || s1 != 42 || s2 != 42)
throw new AssertionFailedError("All arguments should be 42.");
}
@RobotKeyword("This is a keyword with object argument.")
public SomeObject getSomeObject() {
SomeObject obj = new SomeObject();
obj.name = "Hello";
obj.value = "World";
return obj;
}
@RobotKeyword("This is a keyword with object argument.")
public void keywordWithObjectArgument(SomeInterface arg) {
if (arg == null)
throw new AssertionFailedError("Argument was null.");
}
@RobotKeyword("Handle byteArray")
public byte[] byteArrayTest(String expectedContent, byte[] bytesIn) {
if (!expectedContent.equals(new String(bytesIn))) {
throw new AssertionFailedError("Arguments were not same");
}
return expectedContent.getBytes();
}
public interface SomeInterface {
}
public class SomeObject implements SomeInterface {
public String name;
public String value;
}
@RobotKeyword
@ArgumentNames({"*Technical arguments"})
public String[] onlyVarargs(String[] arguments) {
return arguments;
}
@RobotKeyword
@ArgumentNames({"Image or text to wait", "Similarity of images=0.7", "*Technical arguments"})
public void defaultAndVarargs(String imageNameOrText, double similarity, String[] arguments) {
Assertions.assertEquals(0.7, similarity);
}
@RobotKeyword
@ArgumentNames({"port=0"})
public int useInt(int port) {
return port;
}
@RobotKeyword
@ArgumentNames({"port=0"})
public Integer useInteger(Integer port) {
return port;
}
@RobotKeyword
@ArgumentNames("arg")
public List<?> listAsArgument(List<?> arg) {
return arg;
}
@RobotKeyword
@ArgumentNames({ "arg" })
public Map<Object, Object> mapAsArgument(Map<Object, Object> arg) {
return arg;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/reflection/KeywordInvokerIntegrationTest.java<|end_filename|>
package org.robotframework.javalib.reflection;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.keyword.AnnotatedKeywords;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
public class KeywordInvokerIntegrationTest {
@Test
public void testReturnsParameterNames() throws Exception {
List expectedParameterNames = Arrays.asList("arg" );
assertIterableEquals(expectedParameterNames, getParameterNamesFromMethod("keywordThatReturnsItsArguments"));
}
@Test
public void testFindsKeywordArgumentsWithKeywordArgumentsAnnotation() throws Exception {
List expectedParameterNames = Arrays.asList("overridenArgumentName" );
assertIterableEquals(expectedParameterNames, getParameterNamesFromMethod("someKeyword"));
}
private List getParameterNamesFromMethod(String string) throws NoSuchMethodException {
IKeywordInvoker keywordInvoker = createKeywordInvoker(string);
return keywordInvoker.getParameterNames();
}
private IKeywordInvoker createKeywordInvoker(String methodName) throws NoSuchMethodException {
Method method = AnnotatedKeywords.class.getMethod(methodName, String.class);
return new KeywordInvoker(this, method);
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/library/ClassPathLibraryIntegrationTest.java<|end_filename|>
package org.robotframework.javalib.library;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
public class ClassPathLibraryIntegrationTest {
private static ClassPathLibrary classPathLibrary;
@BeforeAll
public static void setUp() {
classPathLibrary = new ClassPathLibrary("org/robotframework/**/**.class");
}
@Test
public void testFindsKeywords() {
List keywordNames = classPathLibrary.getKeywordNames();
assertEquals(4, keywordNames.size());
List expectedKeywordNames = Arrays.asList("recordingkeyword", "springkeyword", "emptykeyword", "conflictingkeyword");
keywordNames.sort(Comparator.naturalOrder());
expectedKeywordNames.sort(Comparator.naturalOrder());
assertIterableEquals(expectedKeywordNames, keywordNames);
}
@Test
public void testRunsKeyword() {
Object result = classPathLibrary.runKeyword("Conflicting Keyword", null);
assertEquals("Classpath Keyword", result.toString());
}
@Test
public void testUsesProvidedPattern() {
assertTrue(classPathLibrary.getKeywordNames().size() > 0);
classPathLibrary = new ClassPathLibrary();
classPathLibrary.setKeywordPattern("com/nonexistent/**.class");
assertEquals(0, classPathLibrary.getKeywordNames().size());
}
@Test
public void testThrowsExceptionIfKeywordPatternIsNotSet() {
try {
new ClassPathLibrary().getKeywordNames();
fail("Expected IllegalStateException to be thrown.");
} catch (IllegalStateException e) {
assertEquals("Keyword pattern must be set before calling getKeywordNames.", e.getMessage());
}
}
}
<|start_filename|>demo/src/org/robotframework/example/keyword/AnnotationKeywords.java<|end_filename|>
package org.robotframework.example.keyword;
import org.robotframework.javalib.annotation.RobotKeyword;
import org.robotframework.javalib.annotation.RobotKeywords;
@RobotKeywords
public class AnnotationKeywords {
@RobotKeyword
public void annotationBasedKeyword() {
System.out.println("Hello world");
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/reflection/KeywordInvokerHandlingInvocationsTest.java<|end_filename|>
package org.robotframework.javalib.reflection;
import org.junit.jupiter.api.BeforeEach;
import java.util.Arrays;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class KeywordInvokerHandlingInvocationsTest {
private KeywordInvoker keywordInvoker;
@BeforeEach
protected void setUp() {
keywordInvoker = new KeywordInvoker(this, new TestKeywordInvoker().getMethod("someMethod"));
}
public void testInvokesWrappedMethod() {
List args = Arrays.asList("someArg", "moreArgs");
assertEquals("someArg", keywordInvoker.invoke(args, null));
}
public void testGetsAnnotationValue() {
assertEquals("documentation", keywordInvoker.getDocumentation());
}
}
<|start_filename|>src/main/java/org/robotframework/javalib/library/KeywordFactoryBasedLibrary.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.library;
import java.util.List;
import java.util.Map;
import org.robotframework.javalib.factory.KeywordFactory;
import org.robotframework.javalib.keyword.Keyword;
/**
* A library that wraps a keyword factory. The keyword factory is used
* to create the keyword instance and this library simply executes the
* keyword. Subclasses must implement factory method
* {@link #createKeywordFactory()}.
*/
public abstract class KeywordFactoryBasedLibrary<T extends Keyword> implements RobotFrameworkDynamicAPI {
private KeywordFactory<T> keywordFactory;
private ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
/**
* @see RobotFrameworkDynamicAPI#runKeyword(String, List, Map)
*/
public Object runKeyword(String keywordName, List args, Map kwargs) {
Keyword keyword = getKeywordFactory().createKeyword(keywordName);
return keyword.execute(args, kwargs);
}
public Object runKeyword(String keywordName, List args) {
return this.runKeyword(keywordName, args, null);
}
/**
* @see RobotFrameworkDynamicAPI#getKeywordNames()
*/
public List<String> getKeywordNames() {
return getKeywordFactory().getKeywordNames();
}
// public List<String> getKeywordTypes(String keywordName) {
// return createKeywordFactory().createKeyword(keywordName).getArgumentTypes();
// }
/**
* Gets the classloader. Simply a property that the subclasses can use
* if the need to. The default classloader is the current thread's
* context class loader, {@link Thread#getContextClassLoader()}.
*
* @return classloader
*/
public ClassLoader getClassLoader() {
return classLoader;
}
/**
* Sets the classloader. Simply a property that the subclasses can use
* if the need to.
*
* @param classLoader new classloader
*/
public void setClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
/**
* Creates a keyword factory. Must be implemented by subclasses.
* The keyword factory is created lazily, when either
* {@link #getKeywordNames()}, {@link #runKeyword(String, List)}
* or {@link #runKeyword(String, List, Map)}
* is called for the first time.
*
* @return keyword factory
*/
protected abstract KeywordFactory<T> createKeywordFactory();
KeywordFactory<T> getKeywordFactory() {
if (keywordFactory == null) {
keywordFactory = createKeywordFactory();
}
return keywordFactory;
}
}
<|start_filename|>demo/run.cmd<|end_filename|>
@echo OFF
set base=%~dp0
set CLASSPATH=%base%src;%base%lib\javalib-core.jar;%CLASSPATH%
javac "%base%src\org\robotframework\example\keyword\AnnotationKeywords.java"
javac "%base%src\org\robotframework\example\keyword\InterfaceBasedKeyword.java"
jybot tests
<|start_filename|>src/test/java/org/robotframework/javalib/beans/annotation/AnnotationKeywordExtractorIntegrationTest.java<|end_filename|>
package org.robotframework.javalib.beans.annotation;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.keyword.AnnotatedKeywords;
import org.robotframework.javalib.keyword.DocumentedKeyword;
import org.robotframework.javalib.keyword.Keyword;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class AnnotationKeywordExtractorIntegrationTest {
private IKeywordExtractor<DocumentedKeyword> extractor = new AnnotationKeywordExtractor();
private Map<String, DocumentedKeyword> extractedKeywords = extractor.extractKeywords(new AnnotatedKeywords());
@Test
public void testReturnsKeywordNamesInCamelCase() {
assertTrue(extractedKeywords.keySet().contains("someKeyword"));
}
@Test
public void testExtractsKeywordArguments() {
DocumentedKeyword keywordThatReturnsItsArguments = extractedKeywords.get("keywordThatReturnsItsArguments");
DocumentedKeyword someKeyword =extractedKeywords.get("someKeyword");
assertIterableEquals(Arrays.asList("arg"), keywordThatReturnsItsArguments.getArgumentNames());
assertIterableEquals(Arrays.asList("overridenArgumentName"), someKeyword.getArgumentNames());
}
@Test
public void testExtractsKeywordsThatHandleVariableArgumentCount() {
Keyword keyword = extractedKeywords.get("keywordWithVariableArgumentCount");
assertLeftoverArgumentsAreCorrectlyGrouped(keyword, Arrays.asList("arg1", "arg2", "arg3", "arg4"));
assertLeftoverArgumentsAreCorrectlyGrouped(keyword, Arrays.asList( "arg1", "arg2", "arg3"));
assertLeftoverArgumentsAreCorrectlyGrouped(keyword, Arrays.asList( "arg1" ));
}
private void assertLeftoverArgumentsAreCorrectlyGrouped(Keyword keyword, List arguments) {
List expected = arguments.subList(1, arguments.size());
assertIterableEquals(expected, Arrays.asList((Object[])keyword.execute(arguments, null)));
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/util/KeywordNameNormalizerTest.java<|end_filename|>
package org.robotframework.javalib.util;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class KeywordNameNormalizerTest {
@Test
public void testNormalizesWhiteSpacesUnderScoresAndUppercaseCharacters() {
IKeywordNameNormalizer normalizer = new KeywordNameNormalizer();
String normalized = normalizer.normalize("sOmE string\tWI TH\rwHitespa ce\nandnewlinesandUnder_Scores");
assertEquals("somestringwithwhitespaceandnewlinesandunderscores", normalized);
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/keyword/SpringKeyword.java<|end_filename|>
package org.robotframework.javalib.keyword;
import java.util.List;
import java.util.Map;
import org.robotframework.javalib.keyword.Keyword;
public class SpringKeyword implements Keyword {
public Object execute(List args, Map kwargs) {
return "Spring Keyword";
}
@Override
public List<String> getArgumentTypes() {
// TODO Auto-generated method stub
return null;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/factory/AnnotationKeywordFactoryIntegrationTest.java<|end_filename|>
package org.robotframework.javalib.factory;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.annotation.RobotKeyword;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class AnnotationKeywordFactoryIntegrationTest {
private static AnnotationKeywordFactory annotationKeywordFactory;
private String keywordName = "someKeyword";
@BeforeAll
public static void setUp() {
annotationKeywordFactory = new AnnotationKeywordFactory(new HashMap() {{
put("keywordBean", new Object() {
@SuppressWarnings("unused")
@RobotKeyword
public void someKeyword() { }
});
}});
}
@Test
public void testFindsAnnotatedKeywordsFromKeywordBeans() throws Exception {
List expectedKeywordNames = Arrays.asList(keywordName);
assertIterableEquals(expectedKeywordNames, annotationKeywordFactory.getKeywordNames());
}
@Test
public void testNormalizesKeywordNamesBeforeExecution() throws Exception {
assertNotNull(annotationKeywordFactory.createKeyword(keywordName));
}
}
<|start_filename|>src/main/java/org/robotframework/javalib/reflection/ArgumentCollector.java<|end_filename|>
package org.robotframework.javalib.reflection;
import java.lang.reflect.Array;
import java.util.*;
import java.util.stream.Collectors;
public class ArgumentCollector implements IArgumentCollector {
private final Class<?>[] parameterTypes;
private final List<String> parameterNames;
public ArgumentCollector(Class<?>[] parameterTypes, List<String> parameterNames) {
this.parameterNames = parameterNames;
this.parameterTypes = parameterTypes;
}
@Override
public List collectArguments(List args, Map<String, Object> kwargs) {
List collectedArguments = new ArrayList();
Map<String, Object> cleanedKwargs = new HashMap<>();
if (kwargs != null) {
cleanedKwargs.putAll(kwargs);
}
boolean hasVarargs = this.keywordHasVarargs();
boolean hasKwargs = this.keywordHasKwargs();
if (parameterNames != null && parameterNames.size() > 0) {
List filteredList = parameterNames.stream().filter(line -> !line.contains("*")).collect(Collectors.toList());
for (int i = 0; i < filteredList.size(); i++) {
collectedArguments.add(null);
}
List varargs = new ArrayList();
for (int i = 0; i < parameterNames.size(); i++) {
String parameterName = parameterNames.get(i).split("=")[0];
boolean vararg = parameterName.contains("*");
boolean kwarg = parameterName.contains("**");
parameterName = parameterName.replace("*", "").replace("*", "");
Object value = this.getParameterValue(parameterName, i, args, cleanedKwargs);
Class<?> argumentType = parameterTypes.length > i && !vararg ? parameterTypes[i] : Object.class;
if (!kwarg) {
if (vararg) {
if (value != null) {
varargs.add(convertToType(argumentType, value));
}
} else {
collectedArguments.set(i, convertToType(argumentType, value));
}
}
}
if (hasVarargs && args != null && args.size() > filteredList.size()) {
for (int i = filteredList.size()+1; i < args.size(); i++) {
varargs.add(args.get(i));
}
}
if (hasVarargs) {
collectedArguments.add(this.ensureCorrectVarargsType(varargs));
}
}
if (hasKwargs) {
collectedArguments.add(cleanedKwargs);
}
return collectedArguments;
}
private int getVarargsIndex() {
int parameterSize = parameterNames != null ? parameterNames.size(): -1;
if (parameterSize > 0 && parameterNames.get(parameterSize-1).startsWith("*") && !parameterNames.get(parameterSize-1).startsWith("**")) {
return parameterSize-1;
} else if (parameterSize > 1 && parameterNames.get(parameterSize-2).startsWith("*") && !parameterNames.get(parameterSize-2).startsWith("**")) {
return parameterSize-2;
} else {
return -1;
}
}
private Object getParameterValue(String parameterName, int i, List args, Map<String, Object> kwargs) {
String parameterDefaultValue = this.parameterNames.get(i).contains("=") && this.parameterNames.get(i).split("=").length > 1 ? this.parameterNames.get(i).split("=")[1] : null;
Object value = args != null && args.size() > i ? args.get(i) : parameterDefaultValue;
if (kwargs != null && kwargs.containsKey(parameterName)) {
value = kwargs.get(parameterName);
kwargs.remove(parameterName);
}
return value;
}
private Object ensureCorrectVarargsType(List varargs) {
int varargIndex = this.getVarargsIndex();
if (parameterTypes != null && varargIndex > -1 && parameterTypes[varargIndex].isArray()) {
Class<?> arrayClass = parameterTypes[varargIndex].getComponentType();
Object[] varargsArray = (Object[]) Array.newInstance(arrayClass, varargs.size());
for (int i = 0; i < varargs.size(); i++) {
varargsArray[i] = varargs.get(i);
}
return varargsArray;
} else {
return varargs;
}
}
private boolean keywordHasVarargs() {
return this.getVarargsIndex() > -1;
}
private boolean keywordHasKwargs() {
return parameterNames != null && parameterNames.size() > 0 &&
(parameterNames.get(parameterNames.size()-1).startsWith("**"));
}
private Object convertToType(Class<?> clazz, Object object) {
if (object != null) {
if (clazz == Integer.class || clazz == Integer.TYPE) {
return Integer.valueOf(object.toString());
} else if (clazz == Long.class || clazz == Long.TYPE) {
return Long.valueOf(object.toString());
} else if (clazz == Short.class || clazz == Short.TYPE) {
return Short.valueOf(object.toString());
} else if (clazz == Byte.class || clazz == Byte.TYPE) {
return Byte.valueOf(object.toString());
} else if (clazz == Boolean.class || clazz == Boolean.TYPE) {
return Boolean.valueOf(object.toString());
} else if (clazz == Float.class || clazz == Float.TYPE) {
return Float.valueOf(object.toString());
} else if (clazz == Double.class || clazz == Double.TYPE) {
return Double.valueOf(object.toString());
} else if (clazz == String.class) {
return object.toString();
} else if (object.getClass().isArray() && clazz.isAssignableFrom(List.class)) {
//convert array to list. Needed at least with jrobotremotelibrary
return Arrays.asList((Object[])object);
} else if (List.class.isAssignableFrom(object.getClass()) && clazz.isArray()) {
//convert list to array. Needed at least with jrobotremotelibrary
return ((List)object).toArray();
}
}
return object;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/keyword/PreparableKeywordTest.java<|end_filename|>
package org.robotframework.javalib.keyword;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class PreparableKeywordTest {
private MockPreparableKeyword preparableKeyword;
@BeforeEach
public void setUp() {
this.preparableKeyword = new MockPreparableKeyword();
}
@Test
public void keywordIsPreparedBeforeExecution() throws Exception {
List args = Arrays.asList("Argument1");
preparableKeyword.execute(args, null);
assertPrepareWasCalledWith(args, null);
assertOperateWasCalledWith(args, null);
}
@Test
public void sequenceIsPrepareOperateFinish() throws Exception {
preparableKeyword.execute(null, null);
assertEquals(0, preparableKeyword.prepareCallSequenceNumber);
assertEquals(1, preparableKeyword.operateCallSequenceNumber);
assertEquals(2, preparableKeyword.finishCallSequenceNumber);
}
@Test
public void returnsResultFromOperate() throws Exception {
String returnValue = "Return Value";
preparableKeyword.operateReturnValue = returnValue;
assertEquals(returnValue, preparableKeyword.execute(null, null));
}
private void assertOperateWasCalledWith(List args, Map kwargs) {
assertTrue(preparableKeyword.operateWasCalled);
assertEquals(args, preparableKeyword.operateArguments);
}
private void assertPrepareWasCalledWith(List args, Map kwargs) {
assertTrue(preparableKeyword.prepareWasCalled);
assertEquals(args, preparableKeyword.prepareArguments);
}
private class MockPreparableKeyword extends PreparableKeyword {
boolean prepareWasCalled;
boolean operateWasCalled;
List prepareArguments;
List operateArguments;
int callCount;
int prepareCallSequenceNumber;
int operateCallSequenceNumber;
int finishCallSequenceNumber;
Object operateReturnValue;
protected void prepare(List arguments) {
prepareArguments = arguments;
prepareWasCalled = true;
prepareCallSequenceNumber = callCount;
callCount++;
}
protected Object operate(List arguments) {
operateArguments = arguments;
operateWasCalled = true;
operateCallSequenceNumber = callCount;
callCount++;
return operateReturnValue;
}
protected void finish(List arguments) {
finishCallSequenceNumber = callCount;
callCount++;
}
@Override
public List<String> getArgumentTypes() {
// TODO Auto-generated method stub
return null;
}
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/mocks/RecordingKeyword.java<|end_filename|>
package org.robotframework.javalib.mocks;
import java.util.List;
import java.util.Map;
import org.robotframework.javalib.keyword.Keyword;
public class RecordingKeyword implements Keyword {
public List arguments;
public boolean executed;
public Object returnValue;
public Object execute(List arguments, Map kwargs) {
this.arguments = arguments;
executed = true;
return returnValue;
}
@Override
public List<String> getArgumentTypes() {
// TODO Auto-generated method stub
return null;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/library/AnnotationLibraryTest.java<|end_filename|>
package org.robotframework.javalib.library;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.robotframework.javalib.beans.annotation.KeywordBeanLoader;
import java.lang.reflect.Field;
import static org.junit.jupiter.api.Assertions.*;
public class AnnotationLibraryTest {
private static AnnotationLibrary annotationLibrary;
private static String keywordPattern = "somePattern";
private static KeywordBeanLoader beanLoaderAtInitialization;
private static KeywordBeanLoader beanLoaderAfterSettingKeywordPattern;
@BeforeAll
public static void setUp() throws Exception {
annotationLibrary = new AnnotationLibrary();
beanLoaderAtInitialization = extractBeanLoaderFromAnnotationLibrary();
annotationLibrary.addKeywordPattern(keywordPattern);
beanLoaderAfterSettingKeywordPattern = extractBeanLoaderFromAnnotationLibrary();
}
@Test
public void testThrowsExceptionIfKeywordPatternIsNotSet() {
try {
new AnnotationLibrary().getKeywordNames();
fail("Expected IllegalStateException to be thrown.");
} catch (IllegalStateException e) {
assertEquals("Keyword pattern must be set before calling getKeywordNames.", e.getMessage());
}
}
@Test
public void testCreatesNewBeanLoaderWhenKeywordPatternSet() {
assertNotSame(beanLoaderAtInitialization, beanLoaderAfterSettingKeywordPattern);
}
@Test
public void testSetsKeywordPatternToBeanLoader() throws IllegalAccessException {
String extractedKeywordPattern = extractKeywordPatternFrom(beanLoaderAfterSettingKeywordPattern);
assertEquals(keywordPattern, extractedKeywordPattern);
}
private String extractKeywordPatternFrom(KeywordBeanLoader beanLoader) throws IllegalAccessException {
for (Field f: fields(beanLoader)) {
if (f.getName().equals("keywordPattern")) {
f.setAccessible(true);
return (String) f.get(beanLoader);
}
}
return null;
}
private static Field[] fields(KeywordBeanLoader beanLoader) {
return beanLoader.getClass().getDeclaredFields();
}
private static KeywordBeanLoader extractBeanLoaderFromAnnotationLibrary() {
try {
return (KeywordBeanLoader) annotationLibrary.beanLoaders.get(0);
} catch (IndexOutOfBoundsException e){
return null;
}
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/reflection/ArgumentCollectorTest.java<|end_filename|>
package org.robotframework.javalib.reflection;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.*;
public class ArgumentCollectorTest {
private List providedArguments = Arrays.asList("arg", "*varargs", "**kwargs");
private Class<?>[] argumentTypes = new Class[] { String.class, List.class, Map.class};
@Test
void collectArguments() {
IArgumentCollector collector = new ArgumentCollector(argumentTypes, providedArguments);
List<String> args = Arrays.asList("1","2");
Map<String, Object> kwargs = Collections.singletonMap("kw", 3);
List collectedArgs = collector.collectArguments(args, kwargs);
assertTrue(collectedArgs.size() == 3);
assertTrue(collectedArgs.get(1) instanceof List);
assertTrue(((Map)collectedArgs.get(2)).size() == 1);
}
@Test
void namedArguments() {
IArgumentCollector collector = new ArgumentCollector(argumentTypes, providedArguments);
List<String> args = Arrays.asList();
Map<String, Object> kwargs = Collections.singletonMap("arg", "value");
List collectedArgs = collector.collectArguments(args, kwargs);
assertTrue(collectedArgs.get(0) == "value");
assertTrue(collectedArgs.size() == 3);
assertTrue(collectedArgs.get(2) instanceof Map);
}
@Test
void varargsTypeInt() {
List providedArguments = Arrays.asList("arg", "*varargs");
Class<?>[] argumentTypes = new Class[] { String.class, Integer[].class};
IArgumentCollector collector = new ArgumentCollector(argumentTypes, providedArguments);
List<Integer> args = Arrays.asList(2, 3, 4);
List collectedArgs = collector.collectArguments(args, null);
assertEquals(2, collectedArgs.size());
assertTrue(collectedArgs.get(1).getClass().isArray());
assertEquals(((Integer[])collectedArgs.get(1))[0].getClass(), Integer.class);
assertEquals(((Integer[])collectedArgs.get(1))[1].getClass(), Integer.class);
}
@Test
void varargsTypeString() {
List providedArguments = Arrays.asList("arg", "*varargs");
Class<?>[] argumentTypes = new Class[] { String.class, String[].class};
IArgumentCollector collector = new ArgumentCollector(argumentTypes, providedArguments);
List<String> args = Arrays.asList("2", "3", "4");
List collectedArgs = collector.collectArguments(args, null);
assertEquals(2, collectedArgs.size());
assertTrue(collectedArgs.get(1).getClass().isArray());
assertEquals(String.class, ((Object[])collectedArgs.get(1))[0].getClass());
assertEquals(String.class, ((Object[])collectedArgs.get(1))[1].getClass());
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/library/MockClassLoader.java<|end_filename|>
package org.robotframework.javalib.library;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Enumeration;
public class MockClassLoader extends URLClassLoader {
public ArrayList loadedClasses = new ArrayList();
public ArrayList searchedResources = new ArrayList();
public MockClassLoader() {
super(new URL[0], Thread.currentThread().getContextClassLoader());
}
public Class loadClass(String name) throws ClassNotFoundException {
loadedClasses.add(name);
return super.loadClass(name);
}
public Enumeration findResources(String name) throws IOException {
searchedResources.add(name);
return super.findResources(name);
}
public void resetLists() {
loadedClasses.clear();
searchedResources.clear();
}
}
<|start_filename|>demo/src/org/robotframework/example/keyword/InterfaceBasedKeyword.java<|end_filename|>
package org.robotframework.example.keyword;
import org.robotframework.javalib.keyword.Keyword;
public class InterfaceBasedKeyword implements Keyword {
public Object execute(Object[] arguments) {
System.out.println("Hello World!");
return Boolean.TRUE;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/library/AnnotationLibraryWithMetaDataTest.java<|end_filename|>
package org.robotframework.javalib.library;
import java.util.Arrays;
import java.util.List;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.robotframework.javalib.factory.KeywordFactory;
import org.robotframework.javalib.keyword.DocumentedKeyword;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.when;
public class AnnotationLibraryWithMetaDataTest {
private static String keywordName = "somekeyword";
private static String keywordDocumentation = "documentation";
private static AnnotationLibrary annotationLibrary;
private static List keywordArguments = Arrays.asList("someArgument");
@BeforeAll
public static void setUp() {
final KeywordFactory<DocumentedKeyword> keywordFactory = createKeywordFactory();
annotationLibrary = new AnnotationLibrary() {
@Override
protected KeywordFactory<DocumentedKeyword> createKeywordFactory() {
return keywordFactory;
}
};
}
@Test
public void testGetsKeywordDocumentationFromKeywordFactory() {
assertEquals(keywordDocumentation, annotationLibrary.getKeywordDocumentation(keywordName));
}
@Test
public void testGetsKeywordArgumentsFromKeywordFactory() {
assertIterableEquals(keywordArguments, annotationLibrary.getKeywordArguments(keywordName));
}
private static KeywordFactory<DocumentedKeyword> createKeywordFactory() {
DocumentedKeyword documentedKeywordSpy = Mockito.spy(DocumentedKeyword.class);
when(documentedKeywordSpy.getArgumentNames()).thenReturn(keywordArguments);
when(documentedKeywordSpy.getDocumentation()).thenReturn(keywordDocumentation);
KeywordFactory keywordFactorySpy = Mockito.spy(KeywordFactory.class);
when(keywordFactorySpy.createKeyword(keywordName)).thenReturn(documentedKeywordSpy);
return keywordFactorySpy;
}
}
<|start_filename|>src/test/java/org/robotframework/javalib/library/AnnotationLibraryLoadingBeansTest.java<|end_filename|>
package org.robotframework.javalib.library;
import java.util.*;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.robotframework.javalib.annotation.RobotKeyword;
import org.robotframework.javalib.annotation.RobotKeywords;
import org.robotframework.javalib.beans.annotation.IBeanLoader;
import org.robotframework.javalib.beans.common.IClassFilter;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
public class AnnotationLibraryLoadingBeansTest {
private AnnotationLibrary annotationLibrary = new AnnotationLibrary();
@Test
public void loadsKeywordClassesWithBeanLoader() throws Exception {
injectBeanDefinitionsToAnnotationLibrary();
List expectedKeywordNames = Arrays.asList("someKeyword", "anotherKeyword");
assertIterableEquals(expectedKeywordNames, annotationLibrary.getKeywordNames());
}
private void injectBeanDefinitionsToAnnotationLibrary() {
IBeanLoader mockBeanLoader = Mockito.mock(IBeanLoader.class);
IClassFilter mockClassFilter = Mockito.mock(IClassFilter.class);
List<IBeanLoader> beanLoaders = new ArrayList<IBeanLoader>();
beanLoaders.add(mockBeanLoader);
annotationLibrary.beanLoaders = beanLoaders;
annotationLibrary.classFilter = mockClassFilter;
Mockito.when(mockBeanLoader.loadBeanDefinitions(mockClassFilter)).thenReturn(createKeywordBeans());
}
private Map createKeywordBeans() {
return new HashMap() {{
put("keywordsBean1", new SomeKeywords());
put("keywordsBean2", new AnotherKeywords());
}};
}
@RobotKeywords
private static class SomeKeywords {
@RobotKeyword
public void someKeyword() { }
}
@RobotKeywords
private static class AnotherKeywords {
@RobotKeyword
public void anotherKeyword() { }
}
}
<|start_filename|>src/main/java/org/robotframework/javalib/library/RobotFrameworkDynamicAPI.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.library;
import java.util.List;
import java.util.Map;
/**
* A Java library for Robot Framework.
*/
public interface RobotFrameworkDynamicAPI {
/**
* Returns all the keywords this library contains
*
* @return names of keywords this library contains
*/
List<String> getKeywordNames();
/**
* Runs a keyword and returns the result. If an exception is thrown
* the keyword fails, otherwise it passes.
*
* @param name keyword name to run
* @param arguments arguments for the keyword
* @return keyword return value
*/
Object runKeyword(String name, List arguments);
/**
* Runs a keyword and returns the result. If an exception is thrown
* the keyword fails, otherwise it passes.
*
* @param name keyword name to run
* @param arguments arguments for the keyword
* @param kwargs named arguments for the keyword
* @return keyword return value
*/
Object runKeyword(String name, List arguments, Map kwargs);
// List<String> getKeywordArguments(String name);
//
// List<String> getKeywordTypes(String name);
//
//// List<String> getKeywordTags(String name);
//
// String getKeywordDocumentation(String name);
}
<|start_filename|>src/test/java/org/robotframework/javalib/library/AnnotationLibraryIntegrationTest.java<|end_filename|>
package org.robotframework.javalib.library;
import java.util.*;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
public class AnnotationLibraryIntegrationTest {
private static AnnotationLibrary annotationLibrary;
private String keywordThatReturnsItsArguments = "keywordThatReturnsItsArguments";
private String byteArrayTest = "byteArrayTest";
@BeforeAll
public static void setUp() {
annotationLibrary = new AnnotationLibrary("org/robotframework/**/keyword/**/**.class");
}
@Test
public void findsAnnotatedKeywordsFromClassPath() throws Exception {
List keywordNames = annotationLibrary.getKeywordNames();
List expectedKeywordNames = Arrays.asList("failingKeyword", "someKeyword", "overloaded",
keywordThatReturnsItsArguments, "keywordWithVariableArgumentCount", "variousArgs", "defaultValues",
"keywordWithObjectArgument", "getSomeObject", "keywordWithNumericArguments", byteArrayTest, "defaultAndVarargs", "onlyVarargs",
"useInt", "useInteger", "listAsArgument", "mapAsArgument");
keywordNames.sort(Comparator.naturalOrder());
expectedKeywordNames.sort(Comparator.naturalOrder());
assertIterableEquals(keywordNames, expectedKeywordNames);
}
@Test
public void variousArgsKeywordRuns() {
String keywordName = "variousArgs";
List arguments = null;
Map<String, Object> kwargs = Collections.singletonMap("arg", (Object)"world");
Object executionResult = annotationLibrary.runKeyword(keywordName, arguments, kwargs);
}
@Test
public void runsKeywords() throws Exception {
String keywordArgument = "someArgument";
Object executionResult = annotationLibrary.runKeyword(keywordThatReturnsItsArguments,
Arrays.asList(keywordArgument));
assertEquals(keywordArgument, executionResult);
}
@Test
public void testOverloading() throws Exception {
assertEquals(2, annotationLibrary.runKeyword("overloaded", Arrays.asList("one", 2)));
assertEquals("one", annotationLibrary.runKeyword("overloaded", Arrays.asList("one")));
assertEquals("3", annotationLibrary.runKeyword("overloaded", Arrays.asList("one", "two", "3")));
}
@Test
public void testFindsKeywordDocumentation() throws Exception {
String documentation = annotationLibrary.getKeywordDocumentation("someKeyword");
assertEquals("Some documentation", documentation);
}
@Test
public void testFindsKeywordArguments() throws Exception {
List keywordArguments = annotationLibrary.getKeywordArguments("keywordThatReturnsItsArguments");
assertIterableEquals(keywordArguments, Arrays.asList("arg"));
}
@Test
public void testFindsKeywordArgumentsWithKeywordArgumentsAnnotation() throws Exception {
List keywordArguments = annotationLibrary.getKeywordArguments("someKeyword");
assertIterableEquals(keywordArguments, Arrays.asList("overridenArgumentName"));
}
@Test
public void testExtractsInnerExceptionFromInvocationTargetException() throws Exception {
try {
annotationLibrary.runKeyword("Failing Keyword", null);
fail();
} catch (RuntimeException e) {
assertEquals("Assertion failed", e.getMessage());
}
}
@Test
public void testByteArrayHandling() {
String testString = "testString";
annotationLibrary.runKeyword(byteArrayTest, Arrays.asList(testString, testString.getBytes()));
}
@Test
public void testByteArrayHandlingResponse() {
String testString = "testString";
Object response = annotationLibrary.runKeyword(byteArrayTest, Arrays.asList(testString, testString.getBytes()));
assertEquals(testString, new String((byte[]) response));
}
@Test
public void onlyVarargs() {
annotationLibrary.runKeyword("onlyVarargs", Arrays.asList("one given argument"));
}
@Test
public void defaultAndVarargs() {
annotationLibrary.runKeyword("defaultAndVarargs", Arrays.asList("one given argument"));
}
@Test
public void useInt() {
Object response = annotationLibrary.runKeyword("useInt", Arrays.asList());
assertEquals(0, response);
}
@Test
public void useInteger() {
Object response = annotationLibrary.runKeyword("useInteger", Arrays.asList());
assertEquals(0, response);
}
@Test
public void listAsArgument() {
Object testList = Arrays.asList("first", 2, 4.4);
Object response = annotationLibrary.runKeyword("listAsArgument", Arrays.asList(testList));
assertEquals(testList, response);
}
@Test
public void mapAsArgument() {
Map testMap = Collections.singletonMap("first_key", "first_value");
Object response = annotationLibrary.runKeyword("mapAsArgument", Arrays.asList(testMap));
assertEquals(testMap, response);
}
}
<|start_filename|>src/main/java/org/robotframework/javalib/keyword/PreparableKeyword.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.keyword;
import java.util.List;
import java.util.Map;
public abstract class PreparableKeyword implements Keyword {
public Object execute(List arguments, Map kwargs) {
prepare(arguments);
try {
return operate(arguments);
} finally {
finish(arguments);
}
}
protected void prepare(List arguments) {}
protected void finish(List arguments) {}
protected abstract Object operate(List arguments);
}
<|start_filename|>src/main/java/org/robotframework/javalib/beans/annotation/KeywordBeanLoader.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.beans.annotation;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.JarURLConnection;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import org.robotframework.javalib.beans.common.IClassFilter;
import org.robotframework.javalib.util.AntPathMatcher;
import org.robotframework.javalib.util.KeywordNameNormalizer;
public class KeywordBeanLoader implements IBeanLoader {
protected final String keywordPattern;
private final ClassLoader loader;
private final AntPathMatcher pathMatcher = new AntPathMatcher();
public KeywordBeanLoader(String keywordPattern, ClassLoader loader) {
this.keywordPattern = keywordPattern;
this.loader = loader;
}
public Map loadBeanDefinitions(IClassFilter classFilter) {
Map kws = new HashMap<String, Object>();
Enumeration<URL> entries = getRootResources();
while (entries.hasMoreElements()) {
try {
addURLKeywords(classFilter, kws, entries.nextElement());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return kws;
}
private void addURLKeywords(IClassFilter classFilter, Map kws, URL url) throws IOException {
if (url.getProtocol().startsWith("jar")) {
addJarKeywords(classFilter, kws, url);
} else if (url.getProtocol().startsWith("file")) {
addFileKeywords(classFilter, kws, url);
} else {
throw new RuntimeException("Unsupported URL type "+url);
}
}
private void addFileKeywords(IClassFilter classFilter, Map kws, URL url) throws IOException {
File urlFile = new File(URLDecoder.decode(url.getFile(), "UTF-8"));
if (urlFile.isDirectory()) {
for (String f: getChildrenFrom(pathMatcher.getRoot(keywordPattern), urlFile)) {
addKeyword(classFilter, kws, f);
}
}
}
private void addJarKeywords(IClassFilter classFilter, Map kws, URL url) throws IOException {
JarURLConnection connection =
(JarURLConnection) url.openConnection();
File jar = new File(URLDecoder.decode(connection.getJarFileURL().getFile(), "UTF-8"));
JarInputStream is = new JarInputStream(new FileInputStream(jar));
JarEntry entry;
while( (entry = is.getNextJarEntry()) != null) {
if(entry.getName().endsWith(".class")) {
addKeyword(classFilter, kws, entry.getName());
}
}
}
private Enumeration<URL> getRootResources() {
String root = pathMatcher.getRoot(keywordPattern);
try {
return loader.getResources(root);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private ArrayList<String> getChildrenFrom(String root, File file) {
ArrayList<String> classes = new ArrayList<String>();
for (File f: file.listFiles()) {
if (f.isFile()) {
if (f.getName().endsWith(".class"))
classes.add(root + f.getName());
} else
classes.addAll(getChildrenFrom(root + f.getName() + "/", f));
}
return classes;
}
private void addKeyword(IClassFilter classFilter, Map<String, Object> kws, String className) throws IOException {
if (className.indexOf("$")!=-1)
return;
if (className.startsWith("java/") || className.startsWith("javax/") )
return;
if (!pathMatcher.match(keywordPattern, className))
return;
String name = className.substring(0, className.length() - 6);
Class cls = loadClass(name);
if (classFilter.accept(cls))
putInstance(kws, name, cls);
}
private void putInstance(Map<String, Object> kws, String name, Class cls) {
try {
kws.put(new KeywordNameNormalizer().normalize(name), cls.newInstance());
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
private Class loadClass(String name) {
try {
return loader.loadClass(name.replace("/", "."));
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
}
<|start_filename|>src/main/java/org/robotframework/javalib/keyword/ExpectedArgumentCountAware.java<|end_filename|>
/*
* Copyright 2013 Nokia Solutions and Networks Oyj
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotframework.javalib.keyword;
public interface ExpectedArgumentCountAware {
/**
* Returns the number of arguments the keyword expects to get from
* Robot. Before the call to org.robotframework.javalib.keyword.Keyword#execute,
* the return value of this
* method is used to check whether we have the correct number of
* arguments. All subclasses must implement.
*
* @return number of arguments expected from Robot
*/
int getExpectedArgumentCount();
}
| shan-96/JavalibCore |
<|start_filename|>chat/build/all.js<|end_filename|>
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
var SonicSocket = require('../lib/sonic-socket.js');
var SonicServer = require('../lib/sonic-server.js');
var SonicCoder = require('../lib/sonic-coder.js');
var ALPHABET = ' abcdefghijklmnopqrstuvwxyz';
// Create an ultranet server.
var sonicServer = new SonicServer({alphabet: ALPHABET, debug: true});
// Create an ultranet socket.
var sonicSocket = new SonicSocket({alphabet: ALPHABET});
var history = document.querySelector('#history');
var wrap = document.querySelector('#history-wrap');
var form = document.querySelector('form');
var input = document.querySelector('input');
function init() {
sonicServer.start();
sonicServer.on('message', onIncomingChat);
form.addEventListener('submit', onSubmitForm);
}
function onSubmitForm(e) {
// Get contents of input element.
var message = input.value;
// Send via oscillator.
sonicSocket.send(message);
// Clear the input element.
input.value = '';
// Don't actually submit the form.
e.preventDefault();
}
function onIncomingChat(message) {
console.log('chat inbound.');
history.innerHTML += time() + ': ' + message + '<br/>';
// Scroll history to the bottom.
wrap.scrollTop = history.scrollHeight;
}
function time() {
var now = new Date();
var hours = now.getHours();
hours = (hours > 9 ? hours: ' ' + hours);
var mins = now.getMinutes();
mins = (mins > 9 ? mins : '0' + mins);
var secs = now.getSeconds();
secs = (secs > 9 ? secs : '0' + secs);
return '[' + hours + ':' + mins + ':' + secs + ']';
}
window.addEventListener('load', init);
},{"../lib/sonic-coder.js":3,"../lib/sonic-server.js":4,"../lib/sonic-socket.js":5}],2:[function(require,module,exports){
function RingBuffer(maxLength) {
this.array = [];
this.maxLength = maxLength;
}
RingBuffer.prototype.get = function(index) {
if (index >= this.array.length) {
return null;
}
return this.array[index];
};
RingBuffer.prototype.last = function() {
if (this.array.length == 0) {
return null;
}
return this.array[this.array.length - 1];
}
RingBuffer.prototype.add = function(value) {
// Append to the end, remove from the front.
this.array.push(value);
if (this.array.length >= this.maxLength) {
this.array.splice(0, 1);
}
};
RingBuffer.prototype.length = function() {
// Return the actual size of the array.
return this.array.length;
};
RingBuffer.prototype.clear = function() {
this.array = [];
};
RingBuffer.prototype.copy = function() {
// Returns a copy of the ring buffer.
var out = new RingBuffer(this.maxLength);
out.array = this.array.slice(0);
return out;
};
RingBuffer.prototype.remove = function(index, length) {
//console.log('Removing', index, 'through', index+length);
this.array.splice(index, length);
};
module.exports = RingBuffer;
},{}],3:[function(require,module,exports){
/**
* A simple sonic encoder/decoder for [a-z0-9] => frequency (and back).
* A way of representing characters with frequency.
*/
var ALPHABET = '\n abcdefghijklmnopqrstuvwxyz0123456789,.!?@*';
function SonicCoder(params) {
params = params || {};
this.freqMin = params.freqMin || 18500;
this.freqMax = params.freqMax || 19500;
this.freqError = params.freqError || 50;
this.alphabetString = params.alphabet || ALPHABET;
this.startChar = params.startChar || '^';
this.endChar = params.endChar || '$';
// Make sure that the alphabet has the start and end chars.
this.alphabet = this.startChar + this.alphabetString + this.endChar;
}
/**
* Given a character, convert to the corresponding frequency.
*/
SonicCoder.prototype.charToFreq = function(char) {
// Get the index of the character.
var index = this.alphabet.indexOf(char);
if (index == -1) {
// If this character isn't in the alphabet, error out.
console.error(char, 'is an invalid character.');
index = this.alphabet.length - 1;
}
// Convert from index to frequency.
var freqRange = this.freqMax - this.freqMin;
var percent = index / this.alphabet.length;
var freqOffset = Math.round(freqRange * percent);
return this.freqMin + freqOffset;
};
/**
* Given a frequency, convert to the corresponding character.
*/
SonicCoder.prototype.freqToChar = function(freq) {
// If the frequency is out of the range.
if (!(this.freqMin < freq && freq < this.freqMax)) {
// If it's close enough to the min, clamp it (and same for max).
if (this.freqMin - freq < this.freqError) {
freq = this.freqMin;
} else if (freq - this.freqMax < this.freqError) {
freq = this.freqMax;
} else {
// Otherwise, report error.
console.error(freq, 'is out of range.');
return null;
}
}
// Convert frequency to index to char.
var freqRange = this.freqMax - this.freqMin;
var percent = (freq - this.freqMin) / freqRange;
var index = Math.round(this.alphabet.length * percent);
return this.alphabet[index];
};
module.exports = SonicCoder;
},{}],4:[function(require,module,exports){
var RingBuffer = require('./ring-buffer.js');
var SonicCoder = require('./sonic-coder.js');
var audioContext = new window.AudioContext || new webkitAudioContext();
/**
* Extracts meaning from audio streams.
*
* (assumes audioContext is an AudioContext global variable.)
*
* 1. Listen to the microphone.
* 2. Do an FFT on the input.
* 3. Extract frequency peaks in the ultrasonic range.
* 4. Keep track of frequency peak history in a ring buffer.
* 5. Call back when a peak comes up often enough.
*/
function SonicServer(params) {
params = params || {};
this.peakThreshold = params.peakThreshold || -65;
this.minRunLength = params.minRunLength || 2;
this.coder = params.coder || new SonicCoder(params);
// How long (in ms) to wait for the next character.
this.timeout = params.timeout || 300;
this.debug = !!params.debug;
this.peakHistory = new RingBuffer(16);
this.peakTimes = new RingBuffer(16);
this.callbacks = {};
this.buffer = '';
this.state = State.IDLE;
this.isRunning = false;
this.iteration = 0;
}
var State = {
IDLE: 1,
RECV: 2
};
/**
* Start processing the audio stream.
*/
SonicServer.prototype.start = function() {
// Start listening for microphone. Continue init in onStream.
var constraints = {
audio: { optional: [{ echoCancellation: false }] }
};
navigator.webkitGetUserMedia(constraints,
this.onStream_.bind(this), this.onStreamError_.bind(this));
};
/**
* Stop processing the audio stream.
*/
SonicServer.prototype.stop = function() {
this.isRunning = false;
this.track.stop();
};
SonicServer.prototype.on = function(event, callback) {
if (event == 'message') {
this.callbacks.message = callback;
}
if (event == 'character') {
this.callbacks.character = callback;
}
};
SonicServer.prototype.setDebug = function(value) {
this.debug = value;
var canvas = document.querySelector('canvas');
if (canvas) {
// Remove it.
canvas.parentElement.removeChild(canvas);
}
};
SonicServer.prototype.fire_ = function(callback, arg) {
if (typeof(callback) === 'function') {
callback(arg);
}
};
SonicServer.prototype.onStream_ = function(stream) {
// Store MediaStreamTrack for stopping later. MediaStream.stop() is deprecated
// See https://developers.google.com/web/updates/2015/07/mediastream-deprecations?hl=en
this.track = stream.getTracks()[0];
// Setup audio graph.
var input = audioContext.createMediaStreamSource(stream);
var analyser = audioContext.createAnalyser();
input.connect(analyser);
// Create the frequency array.
this.freqs = new Float32Array(analyser.frequencyBinCount);
// Save the analyser for later.
this.analyser = analyser;
this.isRunning = true;
// Do an FFT and check for inaudible peaks.
this.raf_(this.loop.bind(this));
};
SonicServer.prototype.onStreamError_ = function(e) {
console.error('Audio input error:', e);
};
/**
* Given an FFT frequency analysis, return the peak frequency in a frequency
* range.
*/
SonicServer.prototype.getPeakFrequency = function() {
// Find where to start.
var start = this.freqToIndex(this.coder.freqMin);
// TODO: use first derivative to find the peaks, and then find the largest peak.
// Just do a max over the set.
var max = -Infinity;
var index = -1;
for (var i = start; i < this.freqs.length; i++) {
if (this.freqs[i] > max) {
max = this.freqs[i];
index = i;
}
}
// Only care about sufficiently tall peaks.
if (max > this.peakThreshold) {
return this.indexToFreq(index);
}
return null;
};
SonicServer.prototype.loop = function() {
this.analyser.getFloatFrequencyData(this.freqs);
// Sanity check the peaks every 5 seconds.
if ((this.iteration + 1) % (60 * 5) == 0) {
this.restartServerIfSanityCheckFails();
}
// Calculate peaks, and add them to history.
var freq = this.getPeakFrequency();
if (freq) {
var char = this.coder.freqToChar(freq);
// DEBUG ONLY: Output the transcribed char.
if (this.debug) {
console.log('Transcribed char: ' + char);
}
this.peakHistory.add(char);
this.peakTimes.add(new Date());
} else {
// If no character was detected, see if we've timed out.
var lastPeakTime = this.peakTimes.last();
if (lastPeakTime && new Date() - lastPeakTime > this.timeout) {
// Last detection was over 300ms ago.
this.state = State.IDLE;
if (this.debug) {
console.log('Token', this.buffer, 'timed out');
}
this.peakTimes.clear();
}
}
// Analyse the peak history.
this.analysePeaks();
// DEBUG ONLY: Draw the frequency response graph.
if (this.debug) {
this.debugDraw_();
}
if (this.isRunning) {
this.raf_(this.loop.bind(this));
}
this.iteration += 1;
};
SonicServer.prototype.indexToFreq = function(index) {
var nyquist = audioContext.sampleRate/2;
return nyquist/this.freqs.length * index;
};
SonicServer.prototype.freqToIndex = function(frequency) {
var nyquist = audioContext.sampleRate/2;
return Math.round(frequency/nyquist * this.freqs.length);
};
/**
* Analyses the peak history to find true peaks (repeated over several frames).
*/
SonicServer.prototype.analysePeaks = function() {
// Look for runs of repeated characters.
var char = this.getLastRun();
if (!char) {
return;
}
if (this.state == State.IDLE) {
// If idle, look for start character to go into recv mode.
if (char == this.coder.startChar) {
this.buffer = '';
this.state = State.RECV;
}
} else if (this.state == State.RECV) {
// If receiving, look for character changes.
if (char != this.lastChar &&
char != this.coder.startChar && char != this.coder.endChar) {
this.buffer += char;
this.lastChar = char;
this.fire_(this.callbacks.character, char);
}
// Also look for the end character to go into idle mode.
if (char == this.coder.endChar) {
this.state = State.IDLE;
this.fire_(this.callbacks.message, this.buffer);
this.buffer = '';
}
}
};
SonicServer.prototype.getLastRun = function() {
var lastChar = this.peakHistory.last();
var runLength = 0;
// Look at the peakHistory array for patterns like ajdlfhlkjxxxxxx$.
for (var i = this.peakHistory.length() - 2; i >= 0; i--) {
var char = this.peakHistory.get(i);
if (char == lastChar) {
runLength += 1;
} else {
break;
}
}
if (runLength > this.minRunLength) {
// Remove it from the buffer.
this.peakHistory.remove(i + 1, runLength + 1);
return lastChar;
}
return null;
};
/**
* DEBUG ONLY.
*/
SonicServer.prototype.debugDraw_ = function() {
var canvas = document.querySelector('canvas');
if (!canvas) {
canvas = document.createElement('canvas');
document.body.appendChild(canvas);
}
canvas.width = document.body.offsetWidth;
canvas.height = 480;
drawContext = canvas.getContext('2d');
// Plot the frequency data.
for (var i = 0; i < this.freqs.length; i++) {
var value = this.freqs[i];
// Transform this value (in db?) into something that can be plotted.
var height = value + 400;
var offset = canvas.height - height - 1;
var barWidth = canvas.width/this.freqs.length;
drawContext.fillStyle = 'black';
drawContext.fillRect(i * barWidth, offset, 1, 1);
}
};
/**
* A request animation frame shortcut. This one is intended to work even in
* background pages of an extension.
*/
SonicServer.prototype.raf_ = function(callback) {
var isCrx = !!(window.chrome && chrome.extension);
if (isCrx) {
setTimeout(callback, 1000/60);
} else {
requestAnimationFrame(callback);
}
};
SonicServer.prototype.restartServerIfSanityCheckFails = function() {
// Strange state 1: peaks gradually get quieter and quieter until they
// stabilize around -800.
if (this.freqs[0] < -300) {
console.error('freqs[0] < -300. Restarting.');
this.restart();
return;
}
// Strange state 2: all of the peaks are -100. Check just the first few.
var isValid = true;
for (var i = 0; i < 10; i++) {
if (this.freqs[i] == -100) {
isValid = false;
}
}
if (!isValid) {
console.error('freqs[0:10] == -100. Restarting.');
this.restart();
}
}
SonicServer.prototype.restart = function() {
//this.stop();
//this.start();
window.location.reload();
};
module.exports = SonicServer;
},{"./ring-buffer.js":2,"./sonic-coder.js":3}],5:[function(require,module,exports){
var SonicCoder = require('./sonic-coder.js');
var audioContext = new window.AudioContext || new webkitAudioContext();
/**
* Encodes text as audio streams.
*
* 1. Receives a string of text.
* 2. Creates an oscillator.
* 3. Converts characters into frequencies.
* 4. Transmits frequencies, waiting in between appropriately.
*/
function SonicSocket(params) {
params = params || {};
this.coder = params.coder || new SonicCoder();
this.charDuration = params.charDuration || 0.2;
this.coder = params.coder || new SonicCoder(params);
this.rampDuration = params.rampDuration || 0.001;
}
SonicSocket.prototype.send = function(input, opt_callback) {
// Surround the word with start and end characters.
input = this.coder.startChar + input + this.coder.endChar;
// Use WAAPI to schedule the frequencies.
for (var i = 0; i < input.length; i++) {
var char = input[i];
var freq = this.coder.charToFreq(char);
var time = audioContext.currentTime + this.charDuration * i;
this.scheduleToneAt(freq, time, this.charDuration);
}
// If specified, callback after roughly the amount of time it would have
// taken to transmit the token.
if (opt_callback) {
var totalTime = this.charDuration * input.length;
setTimeout(opt_callback, totalTime * 1000);
}
};
SonicSocket.prototype.scheduleToneAt = function(freq, startTime, duration) {
var gainNode = audioContext.createGain();
// Gain => Merger
gainNode.gain.value = 0;
gainNode.gain.setValueAtTime(0, startTime);
gainNode.gain.linearRampToValueAtTime(1, startTime + this.rampDuration);
gainNode.gain.setValueAtTime(1, startTime + duration - this.rampDuration);
gainNode.gain.linearRampToValueAtTime(0, startTime + duration);
gainNode.connect(audioContext.destination);
var osc = audioContext.createOscillator();
osc.frequency.value = freq;
osc.connect(gainNode);
osc.start(startTime);
};
module.exports = SonicSocket;
},{"./sonic-coder.js":3}]},{},[1]);
| mrigeshparashar/Data-Over-sound |
<|start_filename|>src/lookupPlate.js<|end_filename|>
/* eslint-disable no-undef */
const Tesseract = require('tesseract.js');
const screenshotDOMElement = require('./screenshotDOMElement.js');
module.exports = async (browser, state = 'DC', number = 'ey9285') => {
console.log('lookup Plate');
const page = await browser.newPage();
console.log('browser page created');
await page.setViewport({ height: 768, width: 1024 });
await page.goto(
'https://prodpci.etimspayments.com/pbw/include/dc_parking/input.jsp',
{ waitUntil: ['domcontentloaded', 'networkidle0'] }
);
console.log('loaded');
try {
// Enter license plate number
await page.type('[name=plateNumber]', number);
console.log('typed number');
// Set state
await page.evaluate(state => {
document.querySelector('[name=statePlate]').value = state;
}, state);
console.log('set state');
} catch (e) {
return {
error:
"I'm broken. Look it up yourself: https://prodpci.etimspayments.com/pbw/include/dc_parking/input.jsp"
};
}
// solve the captcha >:D
await screenshotDOMElement(page, {
path: '/tmp/captcha.png',
selector: '#captcha',
padding: 4
});
console.log('screened captcha');
const { text } = await Tesseract.recognize('/tmp/captcha.png');
console.log('solved captcha');
const captcha = text.replace(/\D/g, '');
await page.type('[name=captchaSText]', captcha);
console.log('typed captcha');
// avoid to timeout waitForNavigation() after click()
await Promise.all([page.waitForNavigation(), page.keyboard.press('Enter')]);
console.log('submited form');
const error = await page.evaluate(() => {
if (document.querySelector('[name=selectForm]') === null) {
return (
document.querySelector('.error') &&
document.querySelector('.error').textContent
);
}
});
if (error && error.match && error.match(/Please enter the characters/)) {
return { error: 'captcha error' };
} else if (error) {
return { error };
}
console.log('checked errors');
const total = await page.evaluate(() => {
const totalInput = document.querySelector('input[name=totalAmount]');
if (totalInput) {
return totalInput.value.replace('$', '');
}
return Number(
document
.querySelector('[name=selectForm]')
.textContent.match(
/(The total of all your citations and fees is:|You have a total of \d+\sticket\(s\) on your account in the amount of) \$(\d+\.\d+)/
)[2]
);
});
const regNode = await page.evaluate(
() => document.querySelector('.reg') !== null
);
if (regNode) {
await screenshotDOMElement(page, {
path: '/tmp/tickets.png',
selector: '.reg>table',
padding: 4
});
} else {
// more than I'd like, but the page DOM sucks
await screenshotDOMElement(page, {
path: '/tmp/tickets.png',
selector: '[name=selectForm]',
padding: 4
});
}
console.log('screenshoted tickets!');
const html = await page.evaluate(() => document.body.innerHTML);
return { path: '/tmp/tickets.png', total, html };
};
/* eslint-disable no-enable */
<|start_filename|>src/puppeteer/setup.js<|end_filename|>
const puppeteer = require('puppeteer');
exports.getBrowser = () =>
puppeteer.launch({
headless: true,
executablePath: process.env.IS_LOCAL ? undefined : '/opt/headless_shell',
args: ['--no-sandbox', '--disable-gpu', '--single-process']
});
<|start_filename|>src/getHighscore.js<|end_filename|>
const AWS = require('aws-sdk');
const defaultHighScore = 16198; // https://twitter.com/HowsMyDrivingDC/status/1091565303333572609
module.exports = async function(newScore) {
const simpledb = new AWS.SimpleDB({ });
// Create domain if it doesn't exist yet
const { DomainNames } = await simpledb.listDomains().promise();
if (!(DomainNames || []).includes('howsmydrivingdc')) {
await simpledb.createDomains({DomainName:'howsmydrivingdc'}).promise();
}
// get current highscore
let highScore = defaultHighScore;
try {
const { Attributes: [ { Value } ] } = await simpledb.getAttributes({
AttributeNames: ['high-score'],
DomainName: 'howsmydrivingdc',
'ItemName': 'high-score'
}).promise()
highScore = Value ? parseInt(Value) : defaultHighScore;
} catch (err) {
console.error('error fetching high score, usign default', err)
}
// save new high score if greater
if (newScore > highScore) {
await simpledb.putAttributes({
Attributes:[ {Name:'high-score', Value: newScore.toString(), Replace: true} ],
DomainName: 'howsmydrivingdc',
ItemName: 'high-score'
}).promise()
}
return highScore;
};
<|start_filename|>src/handlers.js<|end_filename|>
const { readFileSync } = require('fs');
const AWS = require('aws-sdk');
const middy = require('middy');
const { ssm, jsonBodyParser } = require('middy/middlewares');
const Twitter = require('twitter');
const setup = require('./puppeteer/setup');
const lookupPlate = require('./lookupPlate.js');
const crc = require('./crc.js');
const getHighscore = require('./getHighscore.js');
const s3 = new AWS.S3();
const PLATE_REGEX = /\b([a-zA-Z]{2}):\s*([a-zA-Z0-9]+)\b/;
module.exports.test = middy(async (event, context) => {
// For keeping the browser launch
context.callbackWaitsForEmptyEventLoop = false;
const browser = await setup.getBrowser();
console.log(event);
return lookupPlate(browser, event.state, event.number);
});
module.exports.test.use(
ssm({
names: {
CONSUMER_KEY: '/howsmydriving/consumer_key',
CONSUMER_SECRET: '/howsmydriving/consumer_secret',
ACCESS_TOKEN: '/howsmydriving/access_token',
ACCESS_TOKEN_SECRET: '/howsmydriving/access_token_secret'
}
})
);
module.exports.crc = middy(async event => {
console.log(event);
if (!event.queryStringParameters || !event.queryStringParameters.crc_token) {
return { statusCode: 400 };
}
const responseToken = crc(
event.queryStringParameters.crc_token,
process.env.CONSUMER_SECRET
);
return {
body: JSON.stringify({ response_token: `sha256=${responseToken}` })
};
});
module.exports.crc.use(
ssm({
cache: true,
names: {
CONSUMER_SECRET: '/howsmydriving/consumer_secret'
}
})
);
module.exports.register = middy(async event => {
const client = new Twitter({
consumer_key: process.env.CONSUMER_KEY,
consumer_secret: process.env.CONSUMER_SECRET,
access_token_key: process.env.ACCESS_TOKEN,
access_token_secret: process.env.ACCESS_TOKEN_SECRET
});
return client.post(
`/account_activity/all/dev/webhooks.json?url=${encodeURIComponent(
event.webhook
)}`
);
});
module.exports.register.use(
ssm({
names: {
CONSUMER_KEY: '/howsmydriving/consumer_key',
CONSUMER_SECRET: '/howsmydriving/consumer_secret',
ACCESS_TOKEN: '/howsmydriving/access_token',
ACCESS_TOKEN_SECRET: '/howsmydriving/access_token_secret'
}
})
);
module.exports.subscribe = middy(async (/*event*/) => {
const client = new Twitter({
consumer_key: process.env.CONSUMER_KEY,
consumer_secret: process.env.CONSUMER_SECRET,
access_token_key: process.env.ACCESS_TOKEN,
access_token_secret: process.env.ACCESS_TOKEN_SECRET
});
return client.post(`/account_activity/all/dev/subscriptions.json`);
});
module.exports.subscribe.use(
ssm({
names: {
CONSUMER_KEY: '/howsmydriving/consumer_key',
CONSUMER_SECRET: '/howsmydriving/consumer_secret',
ACCESS_TOKEN: '/howsmydriving/access_token',
ACCESS_TOKEN_SECRET: '/howsmydriving/access_token_secret'
}
})
);
let browser
module.exports.webhook = middy(async (event, context) => {
console.log(event);
context.callbackWaitsForEmptyEventLoop = false;
if (!browser)
browser = await setup.getBrowser();
const client = new Twitter({
consumer_key: process.env.CONSUMER_KEY,
consumer_secret: process.env.CONSUMER_SECRET,
access_token_key: process.env.ACCESS_TOKEN,
access_token_secret: process.env.ACCESS_TOKEN_SECRET
});
if (!event.body.tweet_create_events) {
return {statusCode: 200};
}
console.log(event.body.tweet_create_events);
if (
event.body.tweet_create_events[0].user.screen_name.toLowerCase() ===
'howsmydrivingdc'
) {
console.log('ignore own tweet');
return {statusCode: 200};
}
if (event.body.tweet_create_events[0].retweeted_status) {
console.log('ignore retweeted status');
return {statusCode: 200};
}
if (
event.body.tweet_create_events[0].is_quote_status &&
!event.body.tweet_create_events[0].text.includes(
event.body.tweet_create_events[0].quoted_status.text
)
) {
console.log('ignore quote tweet');
return {statusCode: 200};
}
let state, number;
try {
const text = event.body.tweet_create_events[0].truncated
? event.body.tweet_create_events[0].extended_tweet.full_text
: event.body.tweet_create_events[0].text;
[, state, number] = text.match(PLATE_REGEX);
} catch (e) {
console.log(e);
return {statusCode: 500};
}
console.log(state, number);
let result;
for (let i = 0; i < 5; i++) {
result = await lookupPlate(browser, state.toUpperCase(), number);
if (result.error !== 'captcha error') {
break;
}
}
console.log('lets tweet!');
const status = {
in_reply_to_status_id: event.body.tweet_create_events[0].id_str,
status: `@${event.body.tweet_create_events[0].user.screen_name} `
};
if (result.path) {
const data = readFileSync(result.path);
console.log('loaded image');
status.status += `${state}:${number} has $${
result.total
} in outstanding tickets:`;
let media_id_string;
try {
const mediaResp = await client.post('media/upload', {
media: data
});
media_id_string = mediaResp.media_id_string;
} catch (e) {
console.log(JSON.stringify(e));
}
status.media_ids = media_id_string;
} else if (result.error) {
status.status += `Result for ${state}:${number} - ${result.error}`;
}
let id_str;
try {
const statusResp = await client.post('statuses/update', status);
id_str = statusResp.id_str;
} catch (e) {
console.log(JSON.stringify(e));
}
await s3
.putObject({
Bucket: process.env.BUCKET,
Key: `${id_str}.html`,
Body: result.html
})
.promise();
if (state.toLowerCase() === 'md' && number.toLowerCase() === '2dh2148') {
console.log('no more high scores for MD:2DH2148');
return {statusCode: 200};
}
if (result.error) {
return {statusCode: 500};
}
const highScore = await getHighscore(result.total);
if (result.total > highScore) {
const highScoreStatus = {
status: `🚨 @${
event.body.tweet_create_events[0].user.screen_name
} set a new high score with ${state}:${number}: $${
result.total
} in unpaid tickets! 🚨
https://twitter.com/HowsMyDrivingDC/status/${id_str}`
};
// await client.post('statuses/update', highScoreStatus);
}
return {statusCode: 200};
});
module.exports.webhook.use(
ssm({
cache: true,
names: {
CONSUMER_KEY: '/howsmydriving/consumer_key',
CONSUMER_SECRET: '/howsmydriving/consumer_secret',
ACCESS_TOKEN: '/howsmydriving/access_token',
ACCESS_TOKEN_SECRET: '/howsmydriving/access_token_secret'
}
})
);
module.exports.webhook.use(jsonBodyParser());
module.exports.archive = middy(async () => {
const sqlite = require('sqlite');
const client = new Twitter({
consumer_key: process.env.CONSUMER_KEY,
consumer_secret: process.env.CONSUMER_SECRET,
access_token_key: process.env.ACCESS_TOKEN,
access_token_secret: process.env.ACCESS_TOKEN_SECRET
});
const db = await sqlite.open('archive.db');
await db.run(
`
CREATE TABLE IF NOT EXISTS tweets (
tweet_id text,
created_at timestamp,
content text,
state varchar(2),
number text,
amount double,
user text,
summoning_text text,
zero_reason text
)
`
);
let { since_id } = await db.get('SELECT max(tweet_id) since_id FROM tweets');
let max_id;
while (true) {
// eslint-disable-line no-constant-condition
const ownTweets = await client.get('/statuses/user_timeline.json', {
screen_name: 'howsmydrivingdc',
count: 200,
since_id: since_id || undefined, // eslint-disable-line no-undef
max_id
});
for (const {
text,
id_str,
created_at,
in_reply_to_status_id_str
} of ownTweets) {
if (id_str === max_id) {
continue;
}
max_id = id_str;
const match = text.match(
/@\S+ ([a-zA-Z]{2}):([a-zA-Z0-9]+) has \$(\d+(\.\d+)?) in outstanding tickets:/
);
let summoningTweet;
try {
summoningTweet = await client.get('/statuses/show.json', {
id: in_reply_to_status_id_str,
tweet_mode: 'extended'
});
} catch (e) {
console.log('Summoning tweet deleted');
}
if (match) {
await db.run(
`INSERT INTO tweets (tweet_id, created_at, content, state, number, amount, user, summoning_text)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`,
[
id_str,
new Date(created_at).toISOString(),
text,
match[1],
match[2],
match[3],
summoningTweet && summoningTweet.user.screen_name,
summoningTweet && summoningTweet.full_text
]
);
console.log(`ADDED ${match[1]}:${match[2]} ${match[3]}`);
} else {
let zeroReason;
if (text.includes('balance of $0')) {
zeroReason = 'paid';
} else if (text.includes('not found')) {
zeroReason = 'unfound';
} else {
console.log(`SKIPPED ${text} - not a response to summoning`);
continue;
}
const [, state, number] = summoningTweet
? summoningTweet.full_text.match(PLATE_REGEX)
: [null, null, null];
await db.run(
`INSERT INTO tweets (tweet_id, created_at, content, state, number, amount, user, summoning_text, zero_reason)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`,
[
id_str,
new Date(created_at).toISOString(),
text,
state,
number,
0,
summoningTweet && summoningTweet.user.screen_name,
summoningTweet && summoningTweet.full_text,
zeroReason
]
);
console.log(`ADDED ${state}:${number} 0`);
}
}
if (ownTweets.length <= 1)
// bc of how max_id works, that tweet itself is always returned
return;
}
});
module.exports.archive.use(
ssm({
cache: true,
names: {
CONSUMER_KEY: '/howsmydriving/consumer_key',
CONSUMER_SECRET: '/howsmydriving/consumer_secret',
ACCESS_TOKEN: '/howsmydriving/access_token',
ACCESS_TOKEN_SECRET: '/howsmydriving/access_token_secret'
}
})
);
| dschep/hows-my-driving-dc |
<|start_filename|>30 Days of Code (C)/D2-Operators.c<|end_filename|>
/*
Objective
In this challenge, you'll work with arithmetic operators. Check out the Tutorial tab for learning materials and an instructional video!
Task
iven the meal price (base cost of a meal), tip percent (the percentage of the meal price being added as tip), and tax percent (the percentage of the meal price being added as tax) for a meal, find and print the meal's total cost.
Note: Be sure to use precise values for your calculations, or you may end up with an incorrectly rounded result!
Input Format
There are lines of numeric input:
The first line has a double, (the cost of the meal before tax and tip).
The second line has an integer, (the percentage of being added as tip).
The third line has an integer, (the percentage of being added as tax).
Output Format
Print the total meal cost, where is the rounded integer result of the entire bill (with added tax and tip).
Sample Input
12.00
20
8
Sample Output
15
Explanation
Given:
, , Calculations:
We round to the nearest dollar (integer) and then print our result, .
*/
#include <assert.h>
#include <limits.h>
#include <math.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
char* readline();
// Complete the solve function below.
void solve(double meal_cost, int tip_percent, int tax_percent) {
double tip_cost= meal_cost * tip_percent / 100;
double tax_cost= meal_cost * tax_percent / 100;
double total_cost= (meal_cost + tip_cost + tax_cost);
printf("%.0lf",total_cost);
}
int main()
{
char* meal_cost_endptr;
char* meal_cost_str = readline();
double meal_cost = strtod(meal_cost_str, &meal_cost_endptr);
if (meal_cost_endptr == meal_cost_str || *meal_cost_endptr != '\0') { exit(EXIT_FAILURE); }
char* tip_percent_endptr;
char* tip_percent_str = readline();
int tip_percent = strtol(tip_percent_str, &tip_percent_endptr, 10);
if (tip_percent_endptr == tip_percent_str || *tip_percent_endptr != '\0') { exit(EXIT_FAILURE); }
char* tax_percent_endptr;
char* tax_percent_str = readline();
int tax_percent = strtol(tax_percent_str, &tax_percent_endptr, 10);
if (tax_percent_endptr == tax_percent_str || *tax_percent_endptr != '\0') { exit(EXIT_FAILURE); }
solve(meal_cost, tip_percent, tax_percent);
return 0;
}
char* readline() {
size_t alloc_length = 1024;
size_t data_length = 0;
char* data = malloc(alloc_length);
while (true) {
char* cursor = data + data_length;
char* line = fgets(cursor, alloc_length - data_length, stdin);
if (!line) { break; }
data_length += strlen(cursor);
if (data_length < alloc_length - 1 || data[data_length - 1] == '\n') { break; }
size_t new_length = alloc_length << 1;
data = realloc(data, new_length);
if (!data) { break; }
alloc_length = new_length;
}
if (data[data_length - 1] == '\n') {
data[data_length - 1] = '\0';
}
data = realloc(data, data_length);
return data;
}
<|start_filename|>Algorithms/Diagonal Difference.js<|end_filename|>
/*
Question: https://www.hackerrank.com/challenges/diagonal-difference/problem
Given a square matrix, calculate the absolute difference between the sums of its diagonals.
For example, the square matrix is shown below:
1 2 3
4 5 6
9 8 9
The left-to-right diagonal = . The right to left diagonal = . Their absolute difference is .
Function description
Complete the function in the editor below.
diagonalDifference takes the following parameter:
int arr[n][m]: an array of integers
Return
int: the absolute diagonal difference
Input Format
The first line contains a single integer, , the number of rows and columns in the square matrix .
Each of the next lines describes a row, , and consists of space-separated integers .
Constraints
Output Format
Return the absolute difference between the sums of the matrix"s two diagonals as a single integer.
Sample Input
3
11 2 4
4 5 6
10 8 -12
Sample Output
15
Explanation
The primary diagonal is:
11
5
-12
Sum across the primary diagonal: 11 + 5 - 12 = 4
The secondary diagonal is:
4
5
10
Sum across the secondary diagonal: 4 + 5 + 10 = 19
Difference: |4 - 19| = 15
Note: |x| is the absolute value of x
*/
"use strict";
const fs = require("fs");
process.stdin.resume();
process.stdin.setEncoding("utf-8");
let inputString = "";
let currentLine = 0;
process.stdin.on("data", function (inputStdin) {
inputString += inputStdin;
});
process.stdin.on("end", function () {
inputString = inputString.split("\n");
main();
});
function readLine() {
return inputString[currentLine++];
}
/*
* Complete the "diagonalDifference" function below.
*
* The function is expected to return an INTEGER.
* The function accepts 2D_INTEGER_ARRAY arr as parameter.
*/
function diagonalDifference(arr) {
// Write your code here
// Dimension of the square array.
const n = arr.length;
let leftDiagSum = 0;
let rightDiagSum = 0;
// Going from top to bottom.
for (let i = 0; i < n; i++) {
// Element contributing to left diagonal sum.
const left = arr[i][i];
leftDiagSum += left;
// Element contributing to right diagonal sum.
const right = arr[i][n - i - 1];
rightDiagSum += right;
}
// Return the absolute difference.
return Math.abs(leftDiagSum - rightDiagSum);
}
function main() {
const ws = fs.createWriteStream(process.env.OUTPUT_PATH);
const n = parseInt(readLine().trim(), 10);
let arr = Array(n);
for (let i = 0; i < n; i++) {
arr[i] = readLine()
.replace(/\s+$/g, "")
.split(" ")
.map((arrTemp) => parseInt(arrTemp, 10));
}
const result = diagonalDifference(arr);
ws.write(result + "\n");
ws.end();
}
<|start_filename|>Problem Solving/Pairs.cpp<|end_filename|>
/*
Pairs
You will be given an array of integers and a target value. Determine the number of pairs of array elements
that have a difference equal to a target value.
For example, given an array of [1, 2, 3, 4] and a target value of 1, we have three values meeting the
condition: , , and .
Function Description
Complete the pairs function below. It must return an integer representing the number of element pairs
having the required difference.
pairs has the following parameter(s):
k: an integer, the target difference
arr: an array of integers
Input Format
The first line contains two space-separated integers and , the size of and the target value.
The second line contains space-separated integers of the array arr.
Constraints
each integer will be unique
Output Format
An integer representing the number of pairs of integers whose difference is .
Sample Input
5 2
1 5 3 4 2
Sample Output
3
Explanation
There are 3 pairs of integers in the set with a difference of 2: [5,3], [4,2] and [3,1] .
*/
#include<bits/stdc++.h>
using namespace std;
#define ll unsigned long long int
#define mp make_pair
#define pb push_back
typedef vector<ll> vll;
typedef pair<ll,ll> pll;
typedef vector<pll> vpll;
typedef map<pll,ll> mpll;
typedef map<ll,ll> mll;
int main()
{
ll n,k;
cin>>n>>k;
vll a(n);
mll hash;
mpll pairHash;
vpll result;
for(auto &i:a)cin>>i,hash[i]=1;
for(auto &i:a){
if(hash.find(k+i)!=hash.end()){
result.pb(mp(k+i,i));
}
}
cout<<result.size();
}
<|start_filename|>30 Days of Code (Kotlin)/day28-Regex.kt<|end_filename|>
/*
Question
Today, we're working with regular expressions. Check out the Tutorial tab for learning materials and an instructional video!
Task
Consider a database table, Emails, which has the attributes First Name and Email ID. Given
rows of data simulating the Emails table, print an alphabetically-ordered list of people whose email address ends in
.
Input Format
The first line contains an integer,
, total number of rows in the table.
Each of the subsequent lines contains
space-separated strings denoting a person's first name and email ID, respectively.
Constraints
Each of the first names consists of lower case letters
only.
Each of the email IDs consists of lower case letters
, and
only.
The length of the first name is no longer than 20.
The length of the email ID is no longer than 50.
Output Format
Print an alphabetically-ordered list of first names for every user with a gmail account. Each name must be printed on a new line.
Sample Input
6
riya <EMAIL>
julia <EMAIL>
julia <EMAIL>
julia <EMAIL>
samantha <EMAIL>
tanya <EMAIL>
Sample Output
julia
julia
riya
samantha
tanya
*/
fun main(args: Array<String>) {
val names=ArrayList<String>();
val n=readInt();
for (i in 0..n-1) {
var input=readStrings().toTypedArray();
var isvalid=input[1].split("@");
if(isvalid[1].equals("gmail.com")){
names.add(input[0]);
}
}
names.sort();
for (i in names) {
println(i);
}
}
private fun readLn() = readLine()!! ;
private fun readInt() = readLn().toInt();
private fun readStrings() = readLn().split(" ") ;
private fun readInts() = readStrings().map { it.toInt() }
<|start_filename|>30 Days of Code (C)/D1-Data-Types.c<|end_filename|>
/*
Objective
Today, we're discussing data types. Check out the Tutorial tab for learning materials and an instructional video!
Task
Complete the code in the editor below. The variables , , and
are already declared and initialized for you.
You must:
Declare variables: one of type int, one of type double, and one of type String.
Read lines of input from stdin (according to the sequence given in the Input Format section below) and initialize your variables.
Use the operator to perform the following operations:
Print the sum of plus your int variable on a new line.
Print the sum of plus your double variable to a scale of one decimal place on a new line.
Concatenate with the string you read as input and print the result on a new line.
Note: If you are using a language that doesn't support using for string concatenation (e.g.: C), you can just print one variable immediately following the other on the same line. The string provided in your editor must be printed first, immediately followed by the string you read as input.
Input Format
The first line contains an integer that you must sum with.
The second line contains a double that you must sum with.
The third line contains a string that you must concatenate with.
Output Format
Print the sum of both integers on the first line, the sum of both doubles (scaled to decimal place) on the second line, and then the two concatenated strings on the third line.
Sample Input
12
4.0
is the best place to learn and practice coding!
Sample Output
16
8.0
HackerRank is the best place to learn and practice coding!
Explanation
When we sum the integers and , we get the integer.When we sum the floating-point numbers and , we get .When we concatenate HackerRank with is the best place to learn and practice coding!, we get HackerRank is the best place to learn and practice coding!.
You will not pass this challenge if you attempt to assign the Sample Case values to your variables instead of following the instructions above and reading input from stdin.
*/
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <stdlib.h>
int main() {
int i = 4;
double d = 4.0;
char s[] = "HackerRank ";
// Declare second integer, double, and String variables.
int integer2;
double double2;
char string2[100];
// Read and save an integer, double, and String to your variables.
scanf("%d\n",&integer2);
scanf("%lf\n",&double2);
fgets(string2, 100, stdin);
// Print the sum of both integer variables on a new line.
printf("%d\n",i+integer2);
// Print the sum of the double variables on a new line.
printf("%.1lf\n",d+double2);
// Concatenate and print the String variables on a new line
// The 's' variable above should be printed first.
printf("%s%s",s,string2);
return 0;
}
<|start_filename|>30 Days of Code (C)/D0-Hello-World.c<|end_filename|>
/*
Objective
In this challenge, we review some basic concepts that will get you started with this series. You will need to use the same (or similar) syntax to read input and write output in challenges throughout HackerRank. Check out the Tutorial tab for learning materials and an instructional video!
Task
To complete this challenge, you must save a line of input from stdin to a variable, print Hello, World. on a single line, and finally print the value of your variable on a second line.
You've got this!
Note: The instructions are Java-based, but we support submissions in many popular languages. You can switch languages using the drop-down menu above your editor, and the
variable may be written differently depending on the best-practice conventions of your submission language.
Input Format
A single line of text denoting
(the variable whose contents must be printed).
Output Format
Print Hello, World. on the first line, and the contents of
on the second line.
Sample Input
Welcome to 30 Days of Code!
Sample Output
Hello, World.
Welcome to 30 Days of Code!
Explanation
On the first line, we print the string literal Hello, World.. On the second line, we print the contents of the
variable which, for this sample case, happens to be Welcome to 30 Days of Code!. If you do not print the variable's contents to stdout, you will not pass the hidden test case.
*/
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <stdlib.h>
int main() {
// Declare a variable named 'input_string' to hold our input.
char input_string[105];
// Read a full line of input from stdin and save it to our variable, input_string.
scanf("%[^\n]", input_string);
// Print a string literal saying "Hello, World." to stdout using printf.
printf("Hello, World.\n");
printf("%s",input_string);
// TODO: Write a line of code here that prints the contents of input_string to stdout.
return 0;
}
<|start_filename|>30 Days Of Code (Java)/Day-0-HelloWorld.java<|end_filename|>
/**
Objective
In this challenge, we review some basic concepts that will get you started with this series. You will need to use the same (or similar) syntax to read input and write output in challenges throughout HackerRank. Check out the Tutorial tab for learning materials and an instructional video!
Task
To complete this challenge, you must save a line of input from stdin to a variable, print Hello, World. on a single line, and finally print the value of your variable on a second line.
You've got this!
Note: The instructions are Java-based, but we support submissions in many popular languages. You can switch languages using the drop-down menu above your editor, and the variable may be written differently depending on the best-practice conventions of your submission language.
Input Format
A single line of text denoting (the variable whose contents must be printed).
Output Format
Print Hello, World. on the first line, and the contents of on the second line.
Sample Input
Welcome to 30 Days of Code!
Sample Output
Hello, World.
Welcome to 30 Days of Code!
Explanation
On the first line, we print the string literal Hello, World.. On the second line, we print the contents of the variable which, for this sample case, happens to be Welcome to 30 Days of Code!. If you do not print the variable's contents to stdout, you will not pass the hidden test case.
* @gayatripalkar
*/
import java.util.Scanner;
public class Day0HelloWorld {
public static void main(String[] args) {
// Create a Scanner object to read input from stdin.
Scanner scan = new Scanner(System.in);
// Read a full line of input from stdin and save it to our variable,
// inputString.
String inputString = scan.nextLine();
// Close the scanner object, because we've finished reading
// all of the input from stdin needed for this challenge.
scan.close();
// Print a string literal saying "Hello, World." to stdout.
System.out.println("Hello, World.");
System.out.println(inputString);
}
}
<|start_filename|>CPP/Largest_Bitonic_Subarray.cpp<|end_filename|>
/* You are given an array of positive integers as input. Write a code to return the length of the largest such subsequence in which the values are arranged first in strictly ascending order and then in strictly descending order.
Such a subsequence is known as bitonic subsequence. A purely increasing or purely decreasing subsequence will also be considered as a bitonic sequence with the other part empty.
Note that the elements in bitonic subsequence need not be consecutive in the given array but the order should remain same.
Input Format:
Line 1 : A positive Integer N, i.e., the size of array
Line 2 : N space-separated integers as elements of the array
Output Format:
Length of Largest Bitonic subsequence
Input Constraints:
1<= N <= 100000
Sample Input 1:
6
15 20 20 6 4 2
Sample Output 1:
5
Sample Output 1 Explanation:
Here, longest Bitonic subsequence is {15, 20, 6, 4, 2} which has length = 5.
Sample Input 2:
2
1 5
Sample Output 2:
2
Sample Input 3:
2
5 1
Sample Output 3:
2 */
#include<iostream>
#include<algorithm>
using namespace std;
int* lis(int* input, int n)
{
int* output = new int[n];
output[0] = 1;
for (int i = 1; i < n; i++)
{
int current_maximum = 0;
for (int j = 0; j < i; j++)
{
if (input[i] > input[j])
{
current_maximum = max(current_maximum, output[j]);
}
}
output[i] = current_maximum + 1;
}
return output;
}
int* lis_back(int* input, int n)
{
int* output = new int[n];
output[n - 1] = 1;
for (int i = n - 2; i >= 0; i--)
{
int current_maximum = 0;
for (int j = i + 1; j < n; j++)
{
if (input[i] > input[j])
{
current_maximum = max(current_maximum, output[j]);
}
}
output[i] = current_maximum + 1;
}
return output;
}
int longestBitonicSubarray(int* input, int n)
{
int* longest_increasing_subsequence = lis(input, n);
int* longest_increasing_subsequence_from_back = lis_back(input, n);
int maximum = 2;
for (int i = 0; i < n; i++)
{
maximum = max(maximum, longest_increasing_subsequence_from_back[i] + longest_increasing_subsequence[i] - 1);
}
return maximum;
}
<|start_filename|>30 Days of Code (Kotlin)/day6-LetsReview.kt<|end_filename|>
/*
Question
Today we're expanding our knowledge of Strings and combining it with what we've already learned about loops. Check out the Tutorial tab for learning materials and an instructional video!
Task
Given a string,
, of length that is indexed from to , print its even-indexed and odd-indexed characters as
space-separated strings on a single line (see the Sample below for more detail).
Note:
is considered to be an even index.
Input Format
The first line contains an integer,
(the number of test cases).
Each line of the subsequent lines contain a String,
.
Constraints
Output Format
For each String
(where ), print 's even-indexed characters, followed by a space, followed by
's odd-indexed characters.
Sample Input
2
Hacker
Rank
Sample Output
Hce akr
Rn ak
Explanation
Test Case 0:
The even indices are , , and , and the odd indices are , , and . We then print a single line of space-separated strings; the first string contains the ordered characters from 's even indices (), and the second string contains the ordered characters from 's odd indices (
).
Test Case 1:
The even indices are and , and the odd indices are and . We then print a single line of space-separated strings; the first string contains the ordered characters from 's even indices (), and the second string contains the ordered characters from 's odd indices ().
*/
fun main(args: Array<String>) {
var T:Int=readInt();
while(T-->0){
val inputString = readLn();
for (i in 0..inputString.length-1 step 2) {
print(inputString[i]);
}
print(" ");
for (i in 1..inputString.length-1 step 2) {
print(inputString[i]);
}
println("");
}
}
//for ease
private fun readLn() = readLine()!!
private fun readInt() = readLn().toInt()
<|start_filename|>Problem Solving/AppleAndOranges.c<|end_filename|>
// Question
// Apple and Orange
// Sam's house has an apple tree and an orange tree that yield an abundance of fruit. Using the information given below, determine the number of apples and oranges that land on Sam's house.
// In the diagram below:
// The red region denotes the house, where
// is the start point, and
// is the endpoint. The apple tree is to the left of the house, and the orange tree is to its right.
// Assume the trees are located on a single point, where the apple tree is at point
// , and the orange tree is at point
// .
// When a fruit falls from its tree, it lands
// units of distance from its tree of origin along the -axis. *A negative value of means the fruit fell units to the tree's left, and a positive value of means it falls units to the tree's right. *
// Giveethe value of d for m apples and n oranges, determine how many apples and oranges will fall on Sam's house(i.e in the inclusive range[s,t])?
// For example, Sam's House is between s = 7 and t = 10. The apple tree is located at a = 4 and the orange at b = 12. There are m=3 apples and n=3 oranges. Apples are thrown apples=[2,3,-4] unit distance from a
// and oranges = [3,-2,-4] units distance. Adding each apples distance to the position of the tree, they land at [4+2,4+3,4+-4] = [6,7,0]. Oranges land at [12+3,12+-2,12+-4] = [15,10,8]. One apple and two oranges
// land in the inclusive range 7-10 so we print
// 1
// 2
// Function Description
// Complete the countApplesAndOranges function in the editor below. It should print the number of apples and oranges that land on Sam's house, each on a separate line.
// countApplesAndOranges has the following parameter(s):
// s: integer, starting point of Sam's house location.
// t: integer, ending location of Sam's house location.
// a: integer, location of the Apple tree.
// b: integer, location of the Orange tree.
// apples: integer array, distances at which each apple falls from the tree.
// oranges: integer array, distances at which each orange falls from the tree.
// Input Format
// The first line contains two space-separated integers denoting the respective values of s
// and t.
// The second line contains two space-separated integers denoting the respective values of a and b.
// The third line contains two space-separated integers denoting the respective values of m and n.
// The fourth line contains m space-separated integers denoting the respective distances that each apple falls from point a.
// The fifth line contains n space-separated integers denoting the respective distances that each orange falls from point b.
// Constraints
// 1 <=s,t,a,b,m,n <= 10^5
// -10^5 <= d <= 10^5
// a < s < t < b
// Sample Input 0
// 7 11
// 5 15
// 3 2
// -2 2 1
// 5 -6
// Sample Output 0
// 1
// 1
// Solution
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
int main()
{
int s,t,a,b,m,n,acount=0,ocount=0,i;
int apple[100002],orange[100002];
scanf("%d %d",&s,&t);
scanf("%d %d",&a,&b);
scanf("%d %d",&m,&n);
for (i = 0; i < m; ++i)
{
scanf("%d",&apple[i]);
apple[i] = apple[i] + a;
}
for (i = 0; i < n; ++i)
{
scanf("%d",&orange[i]);
orange[i] = orange[i] + b;
}
for (i = 0; i < m ; i++)
{
if (apple[i] >= s && apple[i] <= t )
{
acount = acount + 1;
}
}
for (i = 0; i < n; i++)
{
if (orange[i] >= s && orange[i] <= t )
{
ocount = ocount + 1;
}
}
printf("%d\n",acount);
printf("%d",ocount);
}
<|start_filename|>30 Days Of Code (Java)/Day-1-DataTypes.java<|end_filename|>
/**
Objective
Today, we're discussing data types. Check out the Tutorial tab for learning materials and an instructional video!
Task
Complete the code in the editor below. The variables , , and are already declared and initialized for you. You must:
Declare variables: one of type int, one of type double, and one of type String.
Read lines of input from stdin (according to the sequence given in the Input Format section below) and initialize your variables.
Use the operator to perform the following operations:
Print the sum of plus your int variable on a new line.
Print the sum of plus your double variable to a scale of one decimal place on a new line.
Concatenate with the string you read as input and print the result on a new line.
Note: If you are using a language that doesn't support using for string concatenation (e.g.: C), you can just print one variable immediately following the other on the same line. The string provided in your editor must be printed first, immediately followed by the string you read as input.
Input Format
The first line contains an integer that you must sum with .
The second line contains a double that you must sum with .
The third line contains a string that you must concatenate with .
Output Format
Print the sum of both integers on the first line, the sum of both doubles (scaled to decimal place) on the second line, and then the two concatenated strings on the third line.
Sample Input
12
4.0
is the best place to learn and practice coding!
Sample Output
16
8.0
HackerRank is the best place to learn and practice coding!
Explanation
When we sum the integers and , we get the integer .
When we sum the floating-point numbers and , we get .
When we concatenate HackerRank with is the best place to learn and practice coding!, we get HackerRank is the best place to learn and practice coding!.
You will not pass this challenge if you attempt to assign the Sample Case values to your variables instead of following the instructions above and reading input from stdin.
*/
import java.util.Scanner;
public class Day1DataTypes {
public static void main(String[] args) {
int i = 4;
double d = 4.0;
String s = "HackerRank ";
Scanner scan = new Scanner(System.in);
/* Declare second integer, double, and String variables. */
int ii = scan.nextInt();
scan.nextLine();
double dd = scan.nextDouble();
scan.nextLine();
String ss = scan.nextLine();
System.out.println(i + ii);
System.out.println(d + dd);
System.out.println(s + ss);
/* Read and save an integer, double, and String to your variables. */
// Note: If you have trouble reading the entire String, please go back and
// review the Tutorial closely.
/* Print the sum of both integer variables on a new line. */
/* Print the sum of the double variables on a new line. */
/*
* Concatenate and print the String variables on a new line; the 's' variable
* above should be printed first.
*/
scan.close();
}
}
<|start_filename|>30 Days of Code (C)/D3-Intro-to-Conditional-Statements.c<|end_filename|>
/*
Objective
In this challenge, we're getting started with conditional statements. Check out the Tutorial tab for learning materials and an instructional video!
Task
Given an integer , , perform the following conditional actions:
If is odd, print Weird
If is even and in the inclusive range of to , print Not Weird
If is even and in the inclusive range of to , print Weird
If is even and greater than , print Not Weird
Complete the stub code provided in your editor to print whether or not is weird.
Input Format
A single line containing a positive integer, .
Constraints
Output Format
Print Weird if the number is weird; otherwise, print Not Weird.
Sample Input 0
3
Sample Output 0
Weird
Sample Input 1
24
Sample Output 1
Not Weird
Explanation
Sample Case 0:
is odd and odd numbers are weird, so we print Weird.
Sample Case 1:
and is even, so it isn't weird. Thus, we print Not Weird.
*/
#include <assert.h>
#include <limits.h>
#include <math.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
// If is odd, print Weird
// If is even and in the inclusive range of to , print Not Weird
// If is even and in the inclusive range of to , print Weird
// If is even and greater than , print Not Weird
int main()
{
int n;
scanf("%d",&n);
if(n%2 == 1 || (n%2 == 0 && n >= 6 && n <= 20))
printf("Weird\n");
else if(n%2 == 0 && ( (n >= 2 && n <=5) || n > 20))
printf("Not Weird\n");
}
<|start_filename|>Java/Java_Anagrams/Anagram.java<|end_filename|>
/* Two strings, and , are called anagrams if they contain all the same characters in the same frequencies. For example, the anagrams of CAT are CAT, ACT, TAC, TCA, ATC, and CTA.
Complete the function in the editor. If and are case-insensitive anagrams, print "Anagrams"; otherwise, print "Not Anagrams" instead.
Input Format
The first line contains a string denoting .
The second line contains a string denoting .
Constraints
Strings and consist of English alphabetic characters.
The comparison should NOT be case sensitive.
Output Format
Print "Anagrams" if and are case-insensitive anagrams of each other; otherwise, print "Not Anagrams" instead.
Sample Input 0
anagram
margana
Sample Output 0
Anagrams
*/
package Java_Anagrams;
import java.util.Scanner;
public class Anagram {
public static void main(String[] args) {
Scanner scan = new Scanner(System.in);
String a = scan.next();
String b = scan.next();
scan.close();
boolean ret = isAnagram(a, b);
System.out.println( (ret) ? "Anagrams" : "Not Anagrams" );
}
static boolean isAnagram(String a, String b) {
// Declarations
int aLength = a.length();
int bLength = b.length();
int anagramLength = aLength < 25 ? aLength :25;
int[] anagram = new int[anagramLength];
// Check constraints
if (aLength < 1 || aLength > 50) return false;
if (bLength != aLength) return false;
// Convert strings to same case
a = a.toLowerCase();
b = b.toLowerCase();
// Increment / decrement counter for respective element
for (int i = 0; i < aLength; i++) {
anagram[(((int) a.charAt(i)) - 97) % aLength]++;
anagram[(((int) b.charAt(i)) - 97) % aLength]--;
}
// Search for counter not equal to 0
for (int i = 0; i < anagram.length; i++) {
if (anagram[i] != 0) return false;
}
return true;
}
}
<|start_filename|>Problem Solving/Fair_Rations.cpp<|end_filename|>
/* problem-statement
You are the benevolent ruler of Rankhacker Castle, and today you're distributing bread. Your subjects are in a line, and some of them already have some loaves.
Times are hard and your castle's food stocks are dwindling, so you must distribute as few loaves as possible according to the following rules:
Every time you give a loaf of bread to some person,i, you must also give a loaf of bread to the person immediately in front of or behind them in the line (i.e., persons(i+1) or (i-1)
After all the bread is distributed, each person must have an even number of loaves.
Given the number of loaves already held by each citizen, find and print the minimum number of loaves you must distribute to satisfy the two rules above. If this is not possible, print NO.
For example, the people in line have loaves, B=[4,5,6,7]. We can first give a loaf to i=3 and i=4 so B=[4,5,7,8] .
Next we give a loaf to i=2 and i=3 and have B=[4,6,8,8] which satisfies our conditions. We had to distribute 4 loaves.
Function Description
Complete the fairRations function in the editor below. It should return an integer that represents the minimum number of loaves required.
fairRations has the following parameter(s):
B: an array of integers that represent the number of loaves each persons starts with .
Input Format
The first line contains an integer,N, the number of subjects in the bread line.
The second line contains N space-separated integers B[i] .
Output Format
Print a single integer taht denotes the minimum number of loaves that must be distributed so that every person has an even number of loaves. If it's not possible to do this, print NO.
Sample Input 0
5
2 3 4 5 6
Sample Output 0
4
Explanation 0
The initial distribution is(2,3,4,5,6). The requirements can be met as follows:
Given 1 loaf of bread each to the second and third people so that the distribution becomes (2,4,5,5,6).
Give 1 loaf of bread each to the third and fourth people so that the distribution becomes (2,4,6,6,6)
Each of the N subjects has an even number of loaves after 4 loaves were distributed. */
#include <bits/stdc++.h>
using namespace std;
vector<string> split_string(string);
// Complete the fairRations function below.
int fairRations(vector<int> b) {
int count=0;
for(int i=0;i<b.size();i++)
{
if(b[i]%2!=0)
count++;
}
if(count%2!=0)
{
return -1;
}
else
{ count=0;
for(int i=0;i<b.size();i++)
{
if(b[i]%2!=0)
{ count++;
b[i]=b[i]+1;
b[i+1]=b[i+1]+1;
}
}
return count*2;
}
}
//auto-generated code
int main()
{
ofstream fout(getenv("OUTPUT_PATH"));
int N;
cin >> N;
cin.ignore(numeric_limits<streamsize>::max(), '\n');
string B_temp_temp;
getline(cin, B_temp_temp);
vector<string> B_temp = split_string(B_temp_temp);
vector<int> B(N);
for (int i = 0; i < N; i++) {
int B_item = stoi(B_temp[i]);
B[i] = B_item;
}
int result = fairRations(B);
if(result==-1)
fout<<"NO"<<"\n";
else
fout<<result<<"\n";
fout.close();
return 0;
}
vector<string> split_string(string input_string) {
string::iterator new_end = unique(input_string.begin(), input_string.end(), [] (const char &x, const char &y) {
return x == y and x == ' ';
});
input_string.erase(new_end, input_string.end());
while (input_string[input_string.length() - 1] == ' ') {
input_string.pop_back();
}
vector<string> splits;
char delimiter = ' ';
size_t i = 0;
size_t pos = input_string.find(delimiter);
while (pos != string::npos) {
splits.push_back(input_string.substr(i, pos - i));
i = pos + 1;
pos = input_string.find(delimiter, i);
}
splits.push_back(input_string.substr(i, min(pos, input_string.length()) - i + 1));
return splits;
}
<|start_filename|>Mathematics/Find the Point.java<|end_filename|>
/*
link = https://www.hackerrank.com/challenges/find-point/problem
Find the Point
Consider two points , p = (px,py) and q= (qx , qy). We consider the inversion or point reflection,r = (rx,ry),
of point p across point q to be a 180 rotation of point p around q.
Given n sets of points p and q , find r for each pair of points and print two space-separated integers
denoting the respective values of rx and ry on a new line.
Input Format
The first line contains an integer, , denoting the number of sets of points.
Each of the subsequent lines contains four space-separated integers describing the respective values of
, , , and defining points and .
Constraints
Output Format
For each pair of points and , print the corresponding respective values of and as two spaceseparated integers on a new line.
Sample Input
2
0 0 1 1
1 1 2 2
Sample Output
2 2
3 3
Explanation
The graphs below depict points , , and for the points given as Sample Input:
1.
Thus, we print and as 2 2 on a new line.
2.
Thus, we print and as 3 3 on a new line.
*/
import java.io.*;
import java.math.*;
import java.text.*;
import java.util.*;
import java.util.regex.*;
public class Solution {
/*
* Complete the findPoint function below.
*/
static int[] findPoint(int px, int py, int qx, int qy) {
/*
* Write your code here.
*/
int[] arr = new int[2];
arr[0] = (2*qx - px);
arr[1] = (2*qy - py);
return arr;
}
private static final Scanner scanner = new Scanner(System.in);
public static void main(String[] args) throws IOException {
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(System.getenv("OUTPUT_PATH")));
int n = Integer.parseInt(scanner.nextLine().trim());
for (int nItr = 0; nItr < n; nItr++) {
String[] pxPyQxQy = scanner.nextLine().split(" ");
int px = Integer.parseInt(pxPyQxQy[0].trim());
int py = Integer.parseInt(pxPyQxQy[1].trim());
int qx = Integer.parseInt(pxPyQxQy[2].trim());
int qy = Integer.parseInt(pxPyQxQy[3].trim());
int[] result = findPoint(px, py, qx, qy);
for (int resultItr = 0; resultItr < result.length; resultItr++) {
bufferedWriter.write(String.valueOf(result[resultItr]));
if (resultItr != result.length - 1) {
bufferedWriter.write(" ");
}
}
bufferedWriter.newLine();
}
bufferedWriter.close();
}
}
<|start_filename|>Java/Welcome to java.java<|end_filename|>
/* Welcome to the world of Java! In this challenge, we practice printing to stdout.
The code stubs in your editor declare a Solution class and a main method. Complete the main method by copying the two lines of code below and pasting them inside the body of your main method.
System.out.println("Hello, World.");
System.out.println("Hello, Java.");
Input Format
There is no input for this challenge.
Output Format
You must print two lines of output:
Print Hello, World. on the first line.
Print Hello, Java. on the second line.
Sample Output
Hello, World.
Hello, Java. */
public class Solution {
public static void main(String[] args) {
System.out.println("Hello, World.");
System.out.println("Hello, Java.");
}
}
<|start_filename|>30 Days of Code (Kotlin)/day0-HelloWorld.kt<|end_filename|>
/*
Question
In this challenge, we review some basic concepts that will get you started with this series. You will need to use the same (or similar) syntax to read input and write output in challenges throughout HackerRank. Check out the Tutorial tab for learning materials and an instructional video!
Task
To complete this challenge, you must save a line of input from stdin to a variable, print Hello, World. on a single line, and finally print the value of your variable on a second line.
You've got this!
Note: The instructions are Java-based, but we support submissions in many popular languages. You can switch languages using the drop-down menu above your editor, and the
variable may be written differently depending on the best-practice conventions of your submission language.
Input Format
A single line of text denoting
(the variable whose contents must be printed).
Output Format
Print Hello, World. on the first line, and the contents of
on the second line.
Sample Input
Welcome to 30 Days of Code!
Sample Output
Hello, World.
Welcome to 30 Days of Code!
Explanation
On the first line, we print the string literal Hello, World.. On the second line, we print the contents of the
variable which, for this sample case, happens to be Welcome to 30 Days of Code!. If you do not print the variable's contents to stdout, you will not pass the hidden test case.
*/
fun main() {
val inputString=readLine()!!;
println("Hello, World.");
println(inputString);
} | gayatripalkar/Hackerrank-Codes |
<|start_filename|>src/util/JWT.cpp<|end_filename|>
//
// Created by 74079 on 2022/2/12.
//
#include "JWT.h"
QString JWT::getSignature(const QString &cookie) {
// 分割
QList<QString> arr = cookie.split('.');
// 解析payload
JWTPayload payload;
QByteArray payloadStr = QByteArray::fromBase64(arr[1].toLatin1());
xpack::json::decode(payloadStr.toStdString(), payload);
QByteArray val = QByteArray::fromBase64(payload.val.toLatin1());
return QString::fromLatin1(val.mid(9, 16));
}
QString JWT::setMxid(const QString &cookie, const QString &mxid) {
// 分割
QList<QString> arr = cookie.split('.');
// 解析payload
JWTPayload payload;
QByteArray payloadStr = QByteArray::fromBase64(arr[1].toLatin1());
xpack::json::decode(payloadStr.toStdString(), payload);
// 追加mxid
QByteArray val = QByteArray::fromBase64(payload.val.toLatin1());
val.append(0x10);
val.append(mxid.toLatin1());
val.append(0x01);
payload.val = val.toBase64();
// 重编码payload
payloadStr = QByteArray::fromStdString(xpack::json::encode(payload));
arr[1] = QString::fromLatin1(payloadStr.toBase64());
return arr.join('.');
}
<|start_filename|>src/lib/Hex.cpp<|end_filename|>
//
// Created by arc on 2022/1/20.
//
#include "Hex.h"
std::string Hex::stringify(const std::string &data) {
const std::string hex = "0123456789ABCDEF";
std::stringstream ss;
for (char i: data) {
ss << hex[(unsigned char) i >> 4] << hex[(unsigned char) i & 0xf];
}
return ss.str();
}
std::string Hex::parse(const std::string &str) {
std::string result;
for (size_t i = 0; i < str.length(); i += 2) {
std::string byte = str.substr(i, 2);
char chr = (char) (int) strtol(byte.c_str(), nullptr, 16);
result.push_back(chr);
}
return result;
}
<|start_filename|>src/model/DateDetail.h<|end_filename|>
//
// Created by 74079 on 2022/1/22.
//
#ifndef QT_ZMYY_DATEDETAIL_H
#define QT_ZMYY_DATEDETAIL_H
#include <QList>
#include <QString>
#include "../lib/xpack/json.h"
class DateDetail {
public:
// id
int customerid;
// 名称
QString customer;
// 开始时间
QString StartTime;
// 结束时间
QString EndTime;
// 提交资料时需要参数
QString mxid;
// 剩余数量
int qty;
XPACK(O(customerid, customer, StartTime, EndTime, mxid, qty));
};
#endif //QT_ZMYY_DATEDETAIL_H
<|start_filename|>src/util/Http.cpp<|end_filename|>
//
// Created by arc on 2022/1/24.
//
#include "Http.h"
static QString COOKIE;
enum class HttpMethod {
GET, POST, PUT, DELETE
};
// 缓存请求时的成员信息
class HttpCache {
public:
std::function<void(const QString &, int)> successHandler = nullptr;
std::function<void(const QString &, int)> failHandler = nullptr;
bool internal = true;
QString charset;
QNetworkAccessManager *manager = nullptr;
};
// Http请求辅助
class HttpPrivate {
friend class Http;
HttpPrivate(const QString &url);
~HttpPrivate();
// 获取manager
QNetworkAccessManager *getManager();
// 获取缓存
HttpCache cache();
// 添加公共数据。header、cookie
static void addCommon(HttpPrivate *d);
// 创建Request
static QNetworkRequest createRequest(HttpPrivate *d, HttpMethod method);
// 执行请求
static void exec(HttpPrivate *d, HttpMethod method);
// 执行请求
static HttpResponse execSync(HttpPrivate *d, HttpMethod method);
// 读取响应数据
static QString readReply(QNetworkReply *reply, const QString &charset = "UTF-8");
// 请求结束的处理函数
static void
handleFinish(HttpCache cache, QNetworkReply *reply, const QString &successMessage, const QString &failMessage);
// 请求的URL
QString url;
// json数据
QString json;
// 请求参数form格式
QUrlQuery params;
// 字符集
QString charset = "UTF-8";
// 请求头
QHash<QString, QString> headers;
// 执行HTTP请求的QNetworkAccessManager对象
QNetworkAccessManager *manager = nullptr;
// 为true则上传json格式,否则使用form格式
bool useJson = false;
// 是否使用自动创建的 manager
bool internal = true;
// 成功的回调函数,参数为响应的字符串
std::function<void(const QString &, int)> successHandler = nullptr;
// 失败的回调函数,参数为失败原因和 HTTP status code
std::function<void(const QString &, int)> failHandler = nullptr;
};
HttpPrivate::HttpPrivate(const QString &url) : url(url) {}
HttpPrivate::~HttpPrivate() {
manager = nullptr;
successHandler = nullptr;
failHandler = nullptr;
}
QNetworkAccessManager *HttpPrivate::getManager() {
return internal ? new QNetworkAccessManager() : manager;
}
HttpCache HttpPrivate::cache() {
HttpCache cache;
cache.successHandler = successHandler;
cache.failHandler = failHandler;
cache.internal = internal;
cache.charset = charset;
cache.manager = getManager();
return cache;
}
void HttpPrivate::addCommon(HttpPrivate *d) {
QString time = QString("zfsw_%1").arg(QDateTime::currentSecsSinceEpoch());
time = time.mid(0, time.length() - 1);
QString zftsl = QCryptographicHash::hash(time.toLatin1(), QCryptographicHash::Md5).toHex();
d->headers["content-type"] = "application/json";
d->headers["User-Agent"] = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36 MicroMessenger/7.0.9.501 NetType/WIFI MiniProgramEnv/Windows WindowsWechat";
d->headers["zftsl"] = zftsl;
d->headers["Referer"] = "https://servicewechat.com/wx2c7f0f3c30d99445/92/page-frame.html";
d->headers["Accept-Encoding"] = "gzip, deflate, br";
d->headers["Cookie"] = COOKIE;
}
QNetworkRequest HttpPrivate::createRequest(HttpPrivate *d, HttpMethod method) {
bool get = method == HttpMethod::GET;
// URL参数
if (!d->params.isEmpty()) {
d->url += (d->url.contains("?") ? "&" : "?") + d->params.toString(QUrl::FullyEncoded);
}
// 非GET请求设置 Content-Type
if (!get && !d->useJson) {
d->headers["Content-Type"] = "application/x-www-form-urlencoded";
} else if (!get && d->useJson) {
d->headers["Content-Type"] = "application/json; charset=utf-8";
}
// 添加请求头到request
QNetworkRequest request(QUrl(d->url));
HttpPrivate::addCommon(d);
for (auto i = d->headers.cbegin(); i != d->headers.cend(); ++i) {
request.setRawHeader(i.key().toUtf8(), i.value().toUtf8());
}
return request;
}
void HttpPrivate::exec(HttpPrivate *d, HttpMethod method) {
// 1. 缓存需要的变量,在 lambda 中使用 = 捕获进行值传递 (不能使用引用 &,因为 d 已经被析构)
HttpCache cache = d->cache();
// 2. 创建请求需要的变量
QNetworkRequest request = createRequest(d, method);
QNetworkReply *reply = nullptr;
// 3. 根据 method 执行不同的请求
switch (method) {
case HttpMethod::GET:
reply = cache.manager->get(request);
break;
case HttpMethod::POST:
reply = cache.manager->post(request, d->useJson ? d->json.toUtf8() : d->params.toString(
QUrl::FullyEncoded).toUtf8());
break;
case HttpMethod::PUT:
reply = cache.manager->put(request, d->useJson ? d->json.toUtf8() : d->params.toString(
QUrl::FullyEncoded).toUtf8());
break;
case HttpMethod::DELETE:
reply = cache.manager->deleteResource(request);
break;
default:
break;
}
// 4. 请求结束时一次性获取响应数据,在 handleFinish 中执行回调函数
QObject::connect(reply, &QNetworkReply::finished, [=] {
// 加密key从cookie中获取,这里不再进行保存cookie,防止key更改
// 保存返回的cookie。返回的cookie携带有新的内容,加密key未变动,不影响数据加解密
if (reply->hasRawHeader("Set-Cookie")) {
COOKIE = reply->rawHeader("Set-Cookie");
}
QString successMessage = HttpPrivate::readReply(reply, cache.charset.toUtf8());
QString failMessage = reply->errorString();
HttpPrivate::handleFinish(cache, reply, successMessage, failMessage);
});
}
HttpResponse HttpPrivate::execSync(HttpPrivate *d, HttpMethod method) {
// 1. 缓存需要的变量,在 lambda 中使用 = 捕获进行值传递 (不能使用引用 &,因为 d 已经被析构)
HttpCache cache = d->cache();
// 2. 创建请求需要的变量
QNetworkRequest request = createRequest(d, method);
QNetworkReply *reply = nullptr;
// 3. 根据 method 执行不同的请求
switch (method) {
case HttpMethod::GET:
reply = cache.manager->get(request);
break;
case HttpMethod::POST:
reply = cache.manager->post(request, d->useJson ? d->json.toUtf8() : d->params.toString(
QUrl::FullyEncoded).toUtf8());
break;
case HttpMethod::PUT:
reply = cache.manager->put(request, d->useJson ? d->json.toUtf8() : d->params.toString(
QUrl::FullyEncoded).toUtf8());
break;
case HttpMethod::DELETE:
reply = cache.manager->deleteResource(request);
break;
default:
break;
}
// 4. 请求结束时一次性获取响应数据,在 handleFinish 中执行回调函数
QEventLoop eventLoop;
QObject::connect(reply, &QNetworkReply::finished, &eventLoop, &QEventLoop::quit);
eventLoop.exec(QEventLoop::ExcludeUserInputEvents);
// 加密key从cookie中获取,这里不再进行保存cookie,防止key更改
// 保存返回的cookie。返回的cookie携带有新的内容,加密key未变动,不影响数据加解密
if (reply->hasRawHeader("Set-Cookie")) {
COOKIE = reply->rawHeader("Set-Cookie");
}
// 返回数据组装
HttpResponse response;
response.isSuccess = reply->error() == QNetworkReply::NoError;
response.status = reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt();
response.success = HttpPrivate::readReply(reply, cache.charset.toUtf8());
response.fail = reply->errorString();
// 3. 释放 reply 和 manager 对象
reply->deleteLater();
if (cache.internal && cache.manager != nullptr) {
cache.manager->deleteLater();
}
return response;
}
QString HttpPrivate::readReply(QNetworkReply *reply, const QString &charset) {
QTextStream in(reply);
QString result;
in.setCodec(charset.toUtf8());
while (!in.atEnd()) {
result += in.readLine();
}
return result;
}
void HttpPrivate::handleFinish(HttpCache cache, QNetworkReply *reply, const QString &successMessage,
const QString &failMessage) {
if (reply->error() == QNetworkReply::NoError) {
// 1. 执行请求成功的回调函数
if (nullptr != cache.successHandler) {
cache.successHandler(successMessage, reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt());
}
} else {
// 2. 执行请求失败的回调函数
if (nullptr != cache.failHandler) {
cache.failHandler(failMessage, reply->error());
}
}
// 3. 释放 reply 和 manager 对象
reply->deleteLater();
if (cache.internal && cache.manager != nullptr) {
cache.manager->deleteLater();
}
}
/*--------------------------------------------------- 外部实现 -------------------------------------------------------*/
// 注意: 在异步请求中 Http 的 HttpPrivate 成员变量 d 已经被析构,所以需要先缓存相关变量为栈对象,使用 = 以值的方式访问
Http::Http(const QString &url) : d(new HttpPrivate(url)) {}
Http::~Http() {
delete d;
}
Http &Http::manager(QNetworkAccessManager *manager) {
d->manager = manager;
d->internal = (nullptr == manager);
return *this;
}
Http &Http::param(const QString &name, const QVariant &value) {
d->params.addQueryItem(name, value.toString());
return *this;
}
Http &Http::params(const QMap<QString, QVariant> &ps) {
for (auto iter = ps.cbegin(); iter != ps.cend(); ++iter) {
d->params.addQueryItem(iter.key(), iter.value().toString());
}
return *this;
}
Http &Http::json(const QString &json) {
d->useJson = true;
d->json = json;
return *this;
}
Http &Http::header(const QString &name, const QString &value) {
d->headers[name] = value;
return *this;
}
Http &Http::headers(const QMap<QString, QString> &nameValues) {
for (auto i = nameValues.cbegin(); i != nameValues.cend(); ++i) {
d->headers[i.key()] = i.value();
}
return *this;
}
Http &Http::charset(const QString &cs) {
d->charset = cs;
return *this;
}
Http &Http::success(std::function<void(const QString &, int)> successHandler) {
d->successHandler = successHandler;
return *this;
}
Http &Http::fail(std::function<void(const QString &, int)> failHandler) {
d->failHandler = failHandler;
return *this;
}
void Http::setCookie(const QString &cookie) {
if (!cookie.isEmpty()) {
COOKIE = QString("ASP.NET_SessionId=%1").arg(cookie);
}
}
void Http::get() {
HttpPrivate::exec(d, HttpMethod::GET);
}
void Http::post() {
HttpPrivate::exec(d, HttpMethod::POST);
}
void Http::put() {
HttpPrivate::exec(d, HttpMethod::PUT);
}
void Http::remove() {
HttpPrivate::exec(d, HttpMethod::DELETE);
}
HttpResponse Http::getSync() {
return HttpPrivate::execSync(d, HttpMethod::GET);
}
HttpResponse Http::postSync() {
return HttpPrivate::execSync(d, HttpMethod::POST);
}
HttpResponse Http::putSync() {
return HttpPrivate::execSync(d, HttpMethod::PUT);
}
HttpResponse Http::removeSync() {
return HttpPrivate::execSync(d, HttpMethod::DELETE);
}
<|start_filename|>src/model/OrderPost.h<|end_filename|>
//
// Created by 74079 on 2022/1/23.
//
#ifndef QT_ZMYY_ORDERPOST_H
#define QT_ZMYY_ORDERPOST_H
#include <QString>
#include "../lib/xpack/json.h"
class OrderPost {
public:
// 出生日期
QString birthday;
// 电话
QString tel;
// 性别。1: 男; 2: 女
int sex;
// 姓名
QString cname;
// 证件类型。1:身份证,2:护照;3:港澳证件;4:台胞证
int doctype;
// 证件号码
QString idcard;
// mxid
QString mxid;
// 接种日期
QString date;
// 产品id
QString pid;
// 接种次数
int Ftime;
// 用户id
QString guid;
XPACK(O(birthday, tel, sex, cname, doctype, idcard, mxid, date, pid, Ftime, guid));
};
#endif //QT_ZMYY_ORDERPOST_H
<|start_filename|>src/lib/xpack/json_data.h<|end_filename|>
/*
* Copyright (C) 2021 Duowan Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __X_PACK_JSON_DATA_H
#define __X_PACK_JSON_DATA_H
/*
It is more reasonable to rely on JsonData for encoder and decoder,
but in order to implement decode and String in JsonData for easy use,
so the other way around
*/
#include "json_encoder.h"
#include "json_decoder.h"
namespace xpack {
// wrapper for rapidjson::Value.
// If we use other json parser someday, users won’t have to modify the code.
// Most of the code is copied from rapidjson.
enum JsonType {
kNullType = 0, //!< null
kFalseType = 1, //!< false
kTrueType = 2, //!< true
kObjectType = 3, //!< object
kArrayType = 4, //!< array
kStringType = 5, //!< string
kNumberType = 6 //!< number
};
class JsonEncoder;
// Currently JsonData is read-only and may support modification in the future
class JsonData:private noncopyable {
class MemberIterator {
public:
MemberIterator(rapidjson::Value::ConstMemberIterator iter, JsonData* parent):_iter(iter),_parent(parent){}
bool operator != (const MemberIterator &that) const {
return _iter != that._iter;
}
MemberIterator& operator ++ () {
++_iter;
return *this;
}
const char *Key() const {
return _iter->name.GetString();
}
JsonData& Val() const {
return _parent->member(&(_iter->value), *(_parent->alloc()));
}
private:
rapidjson::Value::ConstMemberIterator _iter;
JsonData* _parent;
};
public:
typedef MemberIterator Iterator;
// check type
JsonType Type() const {
if (_val == NULL) {
return kNullType;
}
return static_cast<JsonType>(_val->GetType());
}
bool IsNull() const { return _val->IsNull(); }
bool IsBool() const { return _val->IsBool(); }
bool IsObject() const { return _val->IsObject(); }
bool IsArray() const { return _val->IsArray(); }
bool IsNumber() const { return _val->IsNumber(); }
bool IsDouble() const { return _val->IsDouble(); }
bool IsString() const { return _val->IsString(); }
std::string GetString() const {return _val->GetString(); }
bool GetBool() const {return _val->GetBool();}
int GetInt() const {return _val->GetInt();}
unsigned int GetUint() const {return _val->GetUint();}
int64_t GetInt64() const {return _val->GetInt64();}
uint64_t GetUint64() const {return _val->GetUint64();}
float GetFloat() const {return _val->GetFloat();}
double GetDouble() const {return _val->GetDouble();}
// check is valid JsonData
operator bool() const {
return NULL != _val;
}
// get array size
size_t Size() const {
if (_val->IsArray()) {
return (size_t)_val->Size();
} else {
return 0;
}
}
JsonData& operator[](size_t index) {
JsonData *d = alloc();
if (NULL != _val && _val->IsArray()) {
if (index < (size_t)_val->Size()) {
d->_val = &(*_val)[(rapidjson::SizeType)index];
} else {
// TODO decode_exception("Out of index", NULL);
}
} else {
// TODO decode_exception("not array", NULL);
}
return *d;
}
JsonData& operator[](const char*key) {
JsonData *d = alloc();
if (NULL != _val && _val->IsObject()) {
rapidjson::Value::ConstMemberIterator iter;
if (_val->MemberEnd() != (iter=_val->FindMember(key))) {
d->_val = &(iter->value);
}
} else {
// TODO decode_exception("not object", key);
}
return *d;
}
// iter
Iterator Begin() {
return Iterator(_val->MemberBegin(), this);
}
Iterator End() {
return Iterator(_val->MemberEnd(), this);
}
// JsonData is noncopyable, if need to pass it outside the function, use Swap
// DO NOT Swap child node. JsonData[0].Swap will crash
void Swap(JsonData& d) {
rapidjson::MemoryPoolAllocator<> *allocator = _allocator;
const rapidjson::Value *val = _val;
bool alloc = _alloc;
std::string raw_data = _raw_data;
_allocator = d._allocator;
_val = d._val;
_alloc = d._alloc;
_raw_data = d._raw_data;
d._allocator = allocator;
d._val = val;
d._alloc = alloc;
d._raw_data = raw_data;
}
template <class T>
bool decode(T &val) const {
JsonDecoder d(_val);
return d.decode(NULL, val, NULL);
}
std::string String() {
if (_raw_data.empty()) {
JsonEncoder e(-1);
xpack_encode(e, NULL, NULL);
_raw_data = e.String();
}
return _raw_data;
}
public:
//friend class JsonDecoder;
JsonData():_allocator(NULL),_val(NULL), _alloc(false) {
}
~JsonData() {
reset();
if (NULL != _allocator) {
delete _allocator;
_allocator = NULL;
}
}
bool xpack_decode(JsonDecoder &obj, const char*key, const Extend *ext) {
const rapidjson::Value *v = obj.get_val(key);
if (NULL == v) {
if (NULL!=key && Extend::Mandatory(ext)) {
obj.decode_exception("mandatory key not found", key);
}
return false;
} else {
copy(v);
return true;
}
}
bool xpack_encode(JsonEncoder &obj, const char*key, const Extend *ext) const {
switch (Type()){
case kNullType:
return false; // not support write null now
case kFalseType:
case kTrueType:
return obj.encode(key, GetBool(), ext);
case kStringType:
return obj.encode(key, GetString(), ext);
case kNumberType:
if (IsDouble()) {
return obj.encode(key, GetDouble(), ext);
} else {
return obj.encode(key, GetInt64(), ext);
}
case kObjectType:
obj.ObjectBegin(key, ext);
for (rapidjson::Value::ConstMemberIterator iter = _val->MemberBegin(); iter!=_val->MemberEnd(); ++iter){
JsonData d;
d._val = &iter->value;
d.xpack_encode(obj, iter->name.GetString(), ext);
}
obj.ObjectEnd(key, ext);
break;
case kArrayType:{
obj.ArrayBegin(key, ext);
size_t max = Size();
for (size_t i = 0; i<max; ++i) {
JsonData d;
d._val = &(*_val)[(rapidjson::SizeType)i];
d.xpack_encode(obj, NULL, ext);
}
obj.ArrayEnd(key, ext);
}
break;
}
return true;
}
private:
JsonData(const rapidjson::Value *v):_allocator(NULL), _val(v), _alloc(false) {
}
JsonData* alloc() {
JsonData *d = new JsonData();
_collector.push_back(d);
return d;
}
// after xpack::json::decode, JsonDecoder will destruct, so we need copy data
// to JsonData, and copy can only be called by decode
void copy(const rapidjson::Value *v) {
if (NULL == _allocator) {
_allocator = new rapidjson::MemoryPoolAllocator<>();
}
reset();
_val = new rapidjson::Value(*v, *_allocator, true);
_alloc = true;
}
void reset() {
if (_alloc && _val!=NULL) {
delete _val;
_val = NULL;
_alloc = false;
}
for (size_t i=0; i<_collector.size(); ++i) {
delete _collector[i];
}
_collector.clear();
}
JsonData& member(const rapidjson::Value *v, JsonData&d) const {
d._val = v;
return d;
}
rapidjson::MemoryPoolAllocator<> *_allocator;
const rapidjson::Value *_val;
bool _alloc;
std::vector<JsonData*> _collector;
std::string _raw_data;
};
template<>
struct is_xpack_xtype<JsonData> {static bool const value = true;};
inline bool xpack_xtype_decode(JsonDecoder &obj, const char*key, JsonData &val, const Extend *ext) {
return val.xpack_decode(obj, key, ext);
}
inline bool xpack_xtype_encode(JsonEncoder &obj, const char*key, const JsonData &val, const Extend *ext) {
return val.xpack_encode(obj, key, ext);
}
}
#endif
<|start_filename|>src/util/JWT.h<|end_filename|>
//
// Created by 74079 on 2022/2/12.
// 自建库
//
#ifndef ZMYY_JWT_H
#define ZMYY_JWT_H
#include <QString>
#include <QList>
#include <QByteArray>
#include "../lib/xpack/json.h"
// jwt载荷
class JWTPayload {
public:
long double iat = 0;
long double exp = 0;
QString sub;
QString jti;
QString val;
XPACK(O(iat, exp, sub, jti, val));
};
class JWT {
public:
// 获取签名signature
static QString getSignature(const QString &cookie);
// 混入mxid
static QString setMxid(const QString &cookie, const QString &mxid);
};
#endif //ZMYY_JWT_H
<|start_filename|>src/util/WeChatUtil.h<|end_filename|>
//
// Created by arc on 2022/1/19.
//
#ifndef INJECTDLL_WECHATUTIL_H
#define INJECTDLL_WECHATUTIL_H
#include <QDebug>
#include <QString>
#include <QList>
#include <QDir>
#include <windows.h>
#include <tlhelp32.h>
class WeChatUtil {
public:
/**
* 注入DLL
* @param pName - 进程名称。区分大小写
* @param dllPath - DLL完整路径
* @return
*/
static bool InjectDLL(const QString& pName, QString dllPath);
/**
* 卸载DLL
* @param pName - 进程名称。区分大小写
* @param dllPath - DLL完整路径
* @return
*/
static bool EjectDLL(const QString& pName, QString dllPath);
private:
/**
* 根据进程名字查找进程id
* @param pName - 进程名称
* @return
*/
static int findPid(const QString& pName);
/**
* 根据DLL路径获取DLL名称
* @param dllPath - 进程名称
* @return
*/
static QString getDLLName(const QString& dllPath);
/**
* 权限提升
*/
static bool setPrivilege();
};
#endif //INJECTDLL_WECHATUTIL_H
<|start_filename|>main.cpp<|end_filename|>
#include <QApplication>
#include <QStyleFactory>
#include "src/ui/MainWidget.h"
int main(int argc, char *argv[]) {
QApplication a(argc, argv);
// 使用系统自带样式 QT帮助文档搜索“gallery” 获取更多主题样式的介绍
QApplication::setStyle(QStyleFactory::create("fusion"));
MainWidget widget;
widget.show();
return QApplication::exec();
}
| a976606645/qt-zmyy |
<|start_filename|>docs/styles/main.js<|end_filename|>
var currentYear= new Date().getFullYear();
document.getElementById("currentYear").innerHTML = currentYear; | nunit/docs |
<|start_filename|>src/main.lua<|end_filename|>
local overload = require 'autograd.overload'
local RuntimeDirect = require 'autograd.runtime.direct'
local RuntimeCodegen = require 'autograd.runtime.codegen'
local util = require 'autograd.util'
-- Support functions
include 'support.lua'
-- Standard overloaded functions with gradients
include 'gradfuns.lua'
local defaultOptimize = false
local function optimize(opt)
defaultOptimize = opt
end
local function optimizing()
return defaultOptimize
end
local defaultProtected = false
local function protected(prot)
defaultProtected = prot
end
local profile = {
SUMMARY = "summary",
DETAILED = "detailed",
OFF = "off"
}
local function grad(fn, gradOpt)
gradOpt = gradOpt or { }
local opt = util.shallowCopy(gradOpt)
opt.argnum = opt.gradArg or 1
opt.optimize = util.defaultBool(opt.optimize, defaultOptimize)
opt.protected = util.defaultBool(opt.protected, defaultProtected)
opt.reduceFootprint = util.defaultBool(opt.reduceFootprint, false)
opt.withForward = util.defaultBool(opt.withForward, true)
opt.withGradients = util.defaultBool(opt.withGradients, true)
opt.showCode = util.defaultBool(opt.showCode, false)
opt.dotFile = opt.dotFile or nil
opt.partialGrad = util.defaultBool(opt.partialGrad, false)
opt.profile = opt.profile or profile.OFF
opt.profileReportFrequency = opt.profileReportFrequency or 10
if opt.optimize then
if opt.profile == profile.DETAILED then
error("detailed profile not available in optimized mode")
end
return RuntimeCodegen.create(fn, opt)
else
if opt.stableGradients then
error("stable gradient tensors only available in optimized mode")
end
return RuntimeDirect.create(fn, opt)
end
end
-- Main functions:
local autograd = {
grad = grad,
overload = overload,
optimize = optimize,
optimizing = optimizing,
protected = protected,
profile = {
SUMMARY = "summary",
DETAILED = "detailed",
OFF = "off"
}
}
-- Shortcut:
setmetatable(autograd, {
__call = function(self,...)
return grad(...)
end
})
-- Return package
return autograd
<|start_filename|>src/nnwrapper.lua<|end_filename|>
local util = require 'autograd.util'
local nodeApply
local function directApply(fun, gradFun, ...)
return fun.fn(...)
end
local function setApplyFn(fn)
nodeApply = fn or directApply
end
setApplyFn()
local function hasParams(nnObject)
local hasParamFn, params = pcall(nnObject.parameters, nnObject)
params = params or {}
if not hasParamFn or #params == 0 then
return false
else
return true
end
end
local function isCriterion(nnObject)
local isCriterion = false
local mt = getmetatable(nnObject)
if mt then
local mmt = getmetatable(mt)
if mmt then
if mmt.__typename == 'nn.Criterion' then
isCriterion = true
end
end
end
return isCriterion
end
local function isModule(nnObject)
local isModule = false
local mt = getmetatable(nnObject)
if mt then
local mmt = getmetatable(mt)
if mmt then
local t
local mmmt = getmetatable(mmt)
if mmmt then
t = mmmt.__typename
else
t = mmt.__typename
end
if t == "nn.Module" or t == "nn.Sequential" or t == "nn.Container" or t == "nn.Threshold" then
isModule = true
end
end
end
return isModule
end
local function getInputType(x)
local dataType = nil
if torch.isTensor(x) then
dataType = torch.type(x)
elseif type(x) == "table" then
if x[1] then
dataType = torch.type(x[1])
end
end
return dataType
end
local function updateType(nnObject, lastType, newType)
if not newType then
error("Input is neither a tensor or a table of tensors. Type is " .. type(newType))
end
if lastType ~= newType then
lastType = newType
nnObject:type(newType)
end
return nnObject, lastType
end
local function wrapCriterion(nnObject)
local lastType = ""
local mod = {}
local function forward(x, y)
nnObject, lastType = updateType(nnObject, lastType, getInputType(x))
return nnObject:forward(x, y)
end
local function backward(g, x, y)
nnObject, lastType = updateType(nnObject, lastType, getInputType(x))
return nnObject:backward(x, y)
end
local fn = function(x, y)
local backFnDesc = {
object = mod,
raw = nnObject,
method = "backward",
name = "criterion",
fn = backward,
capture = true,
}
local gradFn = {
function(g,ans,x,y)
return nodeApply(backFnDesc, nil, g, x, y)
end,
function(g,ans,x,y)
-- NOTE: should we throw error as uniplemented here?
return util.fillSameSizeAs(y, 0)
end,
}
local fnDesc = {
object = mod,
raw = nnObject,
method = "forward",
name = "criterion",
fn = forward,
capture = true,
}
return nodeApply(fnDesc, gradFn, x, y)
end
mod.entry = fn
mod.forward = forward
mod.backward = backward
mod.module = nnObject
-- Shortcut:
setmetatable(mod, {
__call = function(self, ...)
return self.entry(...)
end
})
return mod
end
local function wrapModuleWithoutParams(nnObject)
local lastType = ""
local mod = {}
local function forward(x)
nnObject, lastType = updateType(nnObject, lastType, getInputType(x))
return nnObject:forward(x)
end
local function backward(g,x)
-- NOTE: Is this necessary if it's done forward?
nnObject, lastType = updateType(nnObject, lastType, getInputType(x))
nnObject:zeroGradParameters()
local gradInput = nnObject:backward(x, g)
return gradInput
end
local fn = function(x)
local grads = nil
local backFnDesc = {
object = mod,
raw = nnObject,
method = "backward",
name = "model",
fn = backward,
capture = true,
}
local gradFn = {
function(g,ans,x)
return nodeApply(backFnDesc, nil, g, x)
end
}
local fnDesc = {
object = mod,
raw = nnObject,
method = "forward",
name = "model",
fn = forward,
capture = true,
}
return nodeApply(fnDesc, gradFn, x)
end
mod.entry = fn
mod.forward = forward
mod.backward = backward
mod.module = nnObject
-- Shortcut:
setmetatable(mod, {
__call = function(self, ...)
return self.entry(...)
end
})
return mod
end
local function wrapModuleWithParams(nnObject)
local lastType = ""
local mod = {}
local params = nnObject:parameters()
local function forward(params,x)
nnObject, lastType = updateType(nnObject, lastType, getInputType(x))
local modelParams, modelGradParams = nnObject:parameters()
for i,p in ipairs(modelParams) do
if p ~= params[i] then
-- NOTE: need a better error message
-- if there's a type mismatch
p:view(params[i], params[i]:size())
end
end
return nnObject:forward(x)
end
local function backward(g,params,x)
-- NOTE: Is this necessary if it's done forward?
nnObject, lastType = updateType(nnObject, lastType, getInputType(x))
local modelParams, modelGradParams = nnObject:parameters()
for i,p in ipairs(modelParams) do
if p ~= params[i] then
p:view(params[i], params[i]:size())
end
end
nnObject:zeroGradParameters()
local gradInput = nnObject:backward(x, g)
return {modelGradParams, gradInput}
end
local fn = function(params, x)
local grads = nil
local backFnDesc = {
object = mod,
raw = nnObject,
method = "backward",
name = "model",
fn = backward,
capture = true,
}
local gradFn = {
function(g,ans,params,x)
if grads == nil then
grads = nodeApply(backFnDesc, nil, g, params, x)
end
return grads[1]
end,
function(g,ans,params,x)
if grads == nil then
grads = nodeApply(backFnDesc, nil, g, params, x)
end
return grads[2]
end,
}
local fnDesc = {
object = mod,
raw = nnObject,
method = "forward",
name = "model",
fn = forward,
capture = true,
}
return nodeApply(fnDesc, gradFn, params, x)
end
mod.entry = fn
mod.forward = forward
mod.backward = backward
mod.module = nnObject
-- Shortcut:
setmetatable(mod, {
__call = function(self, ...)
return self.entry(...)
end
})
return mod, params
end
-- Take in an nn module and functionalize it
local functionalize, functionalizePackage
functionalize = function(nnObject)
if type(nnObject) == "string" then
return functionalizePackage(nnObject)
end
if isModule(nnObject) then
if hasParams(nnObject) then
return wrapModuleWithParams(nnObject)
else
return wrapModuleWithoutParams(nnObject)
end
elseif isCriterion(nnObject) then
return wrapCriterion(nnObject)
else
error("Input is not a package name or nn object")
end
end
functionalizePackage = function(packageName)
assert(type(packageName) == 'string')
local loaded, mod = pcall(require, packageName)
if not loaded then
error("Could not load package '" .. packageName .. "'")
else
-- Iterate through every module in the package,
-- and functionalize it
local map = {}
for modName, nnClass in pairs(mod) do
if isModule(nnClass) or isCriterion(nnClass) then
map[modName] = function(...)
local out = {functionalize(nnClass(...))}
return table.unpack(out)
end
end
end
return map
end
end
return {
functionalize = functionalize,
functionalizePackage = functionalizePackage,
setApplyFn = setApplyFn
}
<|start_filename|>examples/train-mnist-autoencoder.lua<|end_filename|>
-- A comparison between autograd and nngraph
-- using an L2-regularized autoencoder with tied weights.
-- Libs
local grad = require 'autograd'
local lossFuns = require 'autograd.loss'
local util = require 'autograd.util'
local Value = require 'autograd.runtime.codegen.Value'
local gradcheck = require 'autograd.gradcheck'
local optim = require 'optim'
grad.optimize(true)
-- Load in MNIST
local trainData, testData, classes = require('./get-mnist.lua')()
trainData.x = trainData.x:view(trainData.x:size(1), -1):double()
local inputSize = trainData.x[1]:nElement()
-- What model to train:
local predict,f,params
-- Define our neural net
function predict(params, input)
-- Encoder
local h1 = util.sigmoid(input * params.W[1] + torch.expand(params.B[1], torch.size(input, 1), torch.size(params.B[1], 2)))
local h2 = util.sigmoid(h1 * params.W[2] + torch.expand(params.B[2], torch.size(input, 1), torch.size(params.B[2], 2)))
local h3 = util.sigmoid(h2 * params.W[3] + torch.expand(params.B[3], torch.size(input, 1), torch.size(params.B[3], 2)))
-- Decoder
local h4 = util.sigmoid(h3 * torch.t(params.W[3]) + torch.expand(params.B[4], torch.size(input, 1), torch.size(params.B[4], 2)))
local h5 = util.sigmoid(h4 * torch.t(params.W[2]) + torch.expand(params.B[5], torch.size(input, 1), torch.size(params.B[5], 2)))
local out = util.sigmoid(h5 * torch.t(params.W[1]) + torch.expand(params.B[6], torch.size(input, 1), torch.size(params.B[6], 2)))
return out
end
-- Define our training loss
function f(params, input, l2Lambda)
-- Reconstruction loss
local prediction = predict(params, input)
local loss = lossFuns.logBCELoss(prediction, input, 1e-6) / torch.size(input, 1)
-- L2 penalty on the weights
for i=1,Value.len(params.W) do
loss = loss + l2Lambda * torch.sum(torch.pow(params.W[i],2))
end
return loss, prediction
end
-- Get the gradients closure magically:
local df = grad(f, { optimize = true })
sizes = {}
sizes['input'] = inputSize
sizes['h1'] = 50
sizes['h2'] = 25
sizes['h3'] = 10
-- L2 penalty strength
l2Lambda = 0.0
-- Define our parameters
-- [-1/sqrt(#output), 1/sqrt(#output)]
torch.manualSeed(0)
local W1 = torch.DoubleTensor(sizes['input'],sizes['h1']):uniform(-1/math.sqrt(sizes['h1']),1/math.sqrt(sizes['h1']))
local W2 = torch.DoubleTensor(sizes['h1'],sizes['h2']):uniform(-1/math.sqrt(sizes['h2']),1/math.sqrt(sizes['h2']))
local W3 = torch.DoubleTensor(sizes['h2'],sizes['h3']):uniform(-1/math.sqrt(sizes['h3']),1/math.sqrt(sizes['h3']))
local B1 = torch.DoubleTensor(1, sizes['h1']):fill(0)
local B2 = torch.DoubleTensor(1, sizes['h2']):fill(0)
local B3 = torch.DoubleTensor(1, sizes['h3']):fill(0)
local B4 = torch.DoubleTensor(1, sizes['h2']):fill(0)
local B5 = torch.DoubleTensor(1, sizes['h1']):fill(0)
local B6 = torch.DoubleTensor(1, sizes['input']):fill(0)
-- Trainable parameters:
params = {
W = {W1, W2, W3},
B = {B1, B2, B3, B4, B5, B6},
}
-- Train a neural network
for epoch = 1,100 do
print('Training Epoch #'..epoch)
for i = 1,trainData.size / 1000 do
-- Next minibatch:
local x = trainData.x[{{(i-1) * 100 + 1, i * 100}, {}}]
-- Grads:
local grads, loss, prediction = df(params,x,l2Lambda)
-- Update weights and biases
for i=1,#params.W do
params.W[i] = params.W[i] - grads.W[i] * 0.01
end
for i=1,#params.B do
params.B[i] = params.B[i] - grads.B[i] * 0.01
end
end
-- Log performance:
print('Cross-entropy loss: '..f(params, trainData.x[{{1,10000}, {}}], l2Lambda))
end
<|start_filename|>src/runtime/direct/DirectNode.lua<|end_filename|>
local isTensor = torch.isTensor
local getOutgrad, newStartNode, node
local DirectNode = { }
function DirectNode:init(value, fun, gradFun, args, values, tape)
local o = {}
tape[tape.nextIndex] = o
tape.nextIndex = tape.nextIndex + 1
o.value = value
o.fun = fun
o.gradFun = gradFun
o.args = args
o.argValues = values
o.size = function(self, ...)
return self.value.size(self.value,...)
end
o.dim = function(self, ...)
return self.value.dim(self.value,...)
end
o.new = function(...)
return o.value.new(...)
end
o.view = function(...)
return torch.view(...)
end
o.viewAs = function(...)
return torch.viewAs(...)
end
o.expand = function(...)
return torch.expand(...)
end
o.expandAs = function(...)
return torch.expandAs(...)
end
setmetatable(o, self)
return o
end
function DirectNode.isNode(n)
return getmetatable(n) == DirectNode
end
function DirectNode.getValue(v)
if (getmetatable(v) == DirectNode) then
return v.value
else
return v
end
end
-- If we passed in just a tensor, return the outgrad.
-- If we passed in a table, return all the outgrads.
function DirectNode.getOutgrad(arg)
local val = DirectNode.getValue(arg)
-- If we have a tensor, we just have one out gradient
if isTensor(val) then
return arg.outgrad
-- If we have a table, then we can recurse the table and spit out the gradient
elseif type(val) == "table" and not (getmetatable(val) == DirectNode) then
local out = {}
for k,v in pairs(arg) do
out[k] = DirectNode.getOutgrad(v)
end
return out
elseif type(val) == "number" then
return arg.outgrad
end
end
-- local newStartNode
function DirectNode.newStartNode(val, tape)
-- If our argument is a tensor, just nodify it straight-up
if isTensor(val) then
return DirectNode:init(val, nil, nil, { }, { }, tape)
-- If our target argument is a table, we'll need to walk its members and node-ify them.
elseif type(val) == "table" then
local valCopy = { }
for k,v in pairs(val) do
valCopy[k] = DirectNode.newStartNode(v, tape)
end
return valCopy
elseif type(val) == "number" then
return DirectNode:init(val, nil, nil, {}, {}, tape)
end
end
function DirectNode.__internal_set(s, k, v)
s[k] = v
return s
end
function DirectNode.__internal_get(s, k)
return s[k]
end
function DirectNode:__index(i)
local value = rawget(self, "value")
if torch.isTensor(value) and value[i] ~= nil then
if type(i) ~= "string" then
return DirectNode.__internal_get(self, i)
else
return value[i]
end
end
return rawget(DirectNode, i)
end
function DirectNode:__newindex(k, v)
local value = rawget(self, "value")
if torch.isTensor(value) then
if type(k) ~= "string" then
return DirectNode.__internal_set(self, k, v)
end
end
return rawset(self, k, v)
end
-- These exist only to be overloaded and called with flattened tensor or number arguments
function DirectNode.__add(a, b)
return a + b
end
function DirectNode.__sub(a, b)
return a - b
end
function DirectNode.__mul(a, b)
return a * b
end
function DirectNode.__div(a, b)
return a / b
end
function DirectNode.__pow(a, b)
return a ^ b
end
function DirectNode.__unm(a)
return -a
end
return DirectNode
<|start_filename|>src/model/SpatialNetwork.lua<|end_filename|>
local sequence = require 'autograd.model.common'.sequence
local hasCudnn, cudnn = pcall(require, 'cudnn')
hasCudnn = hasCudnn and cudnn
local functionalize = require('autograd.nnwrapper').functionalize
local cast = require('autograd.util').cast
if hasCudnn then
cudnn = functionalize('cudnn')
end
local nn = functionalize('nn')
local function SpatialLayer(opt, params, layers, layer2params)
-- options:
opt = opt or {}
local kernelSize = opt.kernelSize or 5
local padding = opt.padding or math.ceil(kernelSize-1)/2
local inputFeatures = opt.inputFeatures or 3
local outputFeatures = opt.outputFeatures or 16
local batchNormalization = opt.batchNormalization or false
local dropoutProb = opt.dropoutProb or 0
local activations = opt.activations
local pooling = opt.pooling or 1
local inputStride = opt.inputStride or 1
local cuda = opt.cuda or false
-- Set up modules
local SpatialConvolution = nn.SpatialConvolutionMM
local SpatialMaxPooling = nn.SpatialMaxPooling
if cuda and hasCudnn then
SpatialConvolution = cudnn.SpatialConvolution
SpatialMaxPooling = cudnn.SpatialMaxPooling
end
-- container
layers = layers or {}
params = params or {}
layer2params = layer2params or {}
-- Dropout
--------------------------------------
if dropoutProb > 0 then
table.insert(layers, nn.SpatialDropout(dropoutProb) )
end
-- Convolution
--------------------------------------
local l,p = SpatialConvolution(inputFeatures, outputFeatures, kernelSize, kernelSize, inputStride, inputStride, padding, padding)
table.insert(layers, l)
table.insert(params, p)
layer2params[#layers] = #params
-- Batch normalization
--------------------------------------
if batchNormalization then
local l,p = nn.SpatialBatchNormalization(outputFeatures)
table.insert(layers, l)
table.insert(params, p)
layer2params[#layers] = #params
end
-- Activations
--------------------------------------
if opt.activations then
local activation
if hasCudnn and cuda then
activation = cudnn[activations]()
else
activation = nn[activations]()
end
table.insert(layers, activation)
end
-- Pooling
--------------------------------------
if pooling > 1 then
table.insert(layers, SpatialMaxPooling(pooling, pooling))
end
return sequence(layers, layer2params), params, layers
end
return function(opt, params, layers, layer2params)
-- options:
opt = opt or {}
local kernelSize = opt.kernelSize or 5
local padding = opt.padding
local inputFeatures = opt.inputFeatures or 3
local hiddenFeatures = opt.hiddenFeatures or {16,32,64}
local batchNormalization = opt.batchNormalization or false
local dropoutProb = opt.dropoutProb or 0
local dropoutProbs = opt.dropoutProbs or {}
local activations = opt.activations or 'ReLU'
local poolings = opt.poolings or {1,1,1}
local inputStride = opt.inputStride or 1
local cuda = opt.cuda or false
-- container
layers = layers or {}
params = params or {}
layer2params = layer2params or {}
-- add layers:
for i,hiddens in ipairs(hiddenFeatures) do
SpatialLayer({
inputStride = inputStride,
inputFeatures = inputFeatures,
outputFeatures = hiddens,
pooling = poolings[i],
dropoutProb = dropoutProbs[i] or dropoutProb,
activations = activations,
batchNormalization = batchNormalization,
kernelSize = kernelSize,
padding = padding,
cuda = cuda,
}, params, layers, layer2params)
inputFeatures = hiddens
inputStride = 1
end
-- Type cast, if CUDA
--------------------------------------
if cuda then
params = cast(params, "cuda")
end
-- layers
return sequence(layers, layer2params), params, layers
end
<|start_filename|>src/auto/AutoModule.lua<|end_filename|>
local auto = require 'autograd.auto'
local autograd = require 'autograd.main'
-- This generates a new autograd.nn.AutoModule.[moduleName]
-- that takes a suitable forward function executed in :updateOutput
-- it automatically deals with the updateGradInput and accGradParameters differentiation
return function(moduleName)
moduleName = moduleName or error('You must provide a name for your auto-differentiated module.')
if not auto.factory('autograd.nn.module.'..moduleName) then
local AutoModule, parent = torch.class('autograd.nn.module.'..moduleName, 'nn.Module', auto)
local module = auto[moduleName]
-- Constructor needs a function and params (if any)
-- The input function has the format (for Linear + ReLU):
-- function(input, weight, bias)
-- local y = params.weight * input + params.bias
-- local output = torch.mul( torch.abs( y ) + y, 0.5)
-- return output
-- end
function module:__init(fn, weight, bias)
parent.__init(self)
local mt = getmetatable(self)
self.fn = fn or error('An autograd function must be specified as input to AutoModule')
self.weight,self.gradWeight = weight and weight, weight and weight:clone()
self.bias,self.gradBias = bias and bias, bias and bias:clone()
self.fnWrapper = function(params)
return self.fn(params.input, params.weight, params.bias)
end
end
function forward(self, input)
self.output = self.f(input)
return self.output
end
function module:validate()
if not self.validated then
local mt = getmetatable(self)
mt.validated = true
mt.f = mt.f or autograd(self.fnWrapper, { withForward = true, withGradients = false })
mt.b = mt.b or autograd(self.fnWrapper, { withForward = false, withGradients = true, partialGrad = true })
end
end
function module:updateOutput(input)
self.grads = nil
self:validate()
self.output = self.f({input=input, weight=self.weight, bias=self.bias})
return self.output
end
function module:updateGradInput(input, gradOutput)
self.grads = self.b({input=input, weight=self.weight, bias=self.bias}, gradOutput)
self.gradInput = self.grads.input
return self.gradInput
end
function module:accGradParameters(input, gradOutput, scale)
if self.weight then
if not self.grads then
self.grads = self.b({input=input, weight=self.weight, bias=self.bias}, gradOutput)
end
self.gradWeight:add(scale, self.grads.weight)
self.gradBias:add(scale, self.grads.bias)
end
end
end
local module = auto[moduleName]
return module
end
<|start_filename|>src/util.lua<|end_filename|>
-- Utilities
local util = {}
local Value = require 'autograd.runtime.codegen.Value'
local cast
function cast(tableOfParams, typeName)
-- Some nice aliases
if typeName == "float" then typeName = "torch.FloatTensor" end
if typeName == "double" then typeName = "torch.DoubleTensor" end
if typeName == "cuda" then typeName = "torch.CudaTensor" end
-- If we passed in a tensor, just cast it
if torch.isTensor(tableOfParams) then
return tableOfParams:type(typeName)
end
-- Recursively cast
local out = {}
for key,value in pairs(tableOfParams) do
if torch.isTensor(value) then
out[key] = value:type(typeName)
elseif type(value) == "table" then
out[key] = cast(value,typeName)
else
out[key] = value
end
end
return out
end
util.cast = cast
-- Comparison functions
-- We need to define our own for comparing scalars
-- to Nodes, because Lua checks for type equality before using comparison metamethods e.g. __lt,
-- which fails in autograd because we may be comparing numbers and Nodes. Node type is table, not number,
-- and we cannot override this default behavior, so our metamethods will never be called.
-- This unfortunate state of things is a good argument for a DSL, to improve the user experience.
local Value = require 'autograd.runtime.codegen.Value'
local DirectNode = require 'autograd.runtime.direct.DirectNode'
local function getValue(v)
if Value.isValue(v) then
return v:get()
elseif DirectNode.isNode(v) then
return DirectNode.getValue(v)
else
return v
end
end
function util.lt(a, b)
return getValue(a) < getValue(b)
end
function util.le(a, b)
return getValue(a) <= getValue(b)
end
function util.gt(a, b)
return getValue(a) > getValue(b)
end
function util.ge(a, b)
return getValue(a) >= getValue(b)
end
function util.eq(a, b)
return getValue(a) == getValue(b)
end
function util.oneHot(labels, n)
--[[
Assume labels is a 1D tensor of contiguous class IDs, starting at 1.
Turn it into a 2D tensor of size labels:size(1) x nUniqueLabels
This is a pretty dumb function, assumes your labels are nice.
]]
local n = n or labels:max()
local nLabels = labels:size(1)
local out = labels.new(nLabels, n):fill(0)
for i=1,nLabels do
out[i][labels[i]] = 1.0
end
return out
end
-- Helpers:
function util.logSumExp(array)
local max = torch.max(array)
return torch.log(torch.sum(torch.exp(array-max))) + max
end
function util.logSoftMax(array)
return array - util.logSumExp(array)
end
function util.sigmoid(array)
return torch.sigmoid(array)
end
function util.sigmoidInPlace(output, input)
output:resizeAs(input):copy(input)
output:mul(-1):exp():add(1):pow(-1)
return output
end
function util.lookup(tble, indexes)
local indexSize = torch.size(indexes):totable()
local rows = torch.index(tble, 1, torch.long(torch.view(indexes, -1)))
table.insert(indexSize, torch.size(rows, 2))
return torch.view(rows, table.unpack(indexSize))
end
function util.dropout(state, dropout)
dropout = dropout or 0
local keep = 1 - dropout
local s = util.newTensorLike(state)
local keep = torch.mul(torch.bernoulli(s, keep), 1 / keep)
return torch.cmul(state, keep)
end
-- NOTE
-- Made inefficient for grads of grads
function util.setNotEqual(a, b, c, v)
local mask = torch.eq(a, b)
return torch.cmul(v,torch.typeAs(mask,v))
end
function util.setNotEqualInPlace(o, a, b, c, v)
local mask = torch.ne(a, b)
local copy = o:copy(v)
copy[mask] = 0
return copy
end
function util.newTensorLike(a)
return a.new(torch.size(a))
end
function util.newTensorLikeInPlace(o, a)
return o
end
function util.fillSameSizeAs(a, b)
return util.fill(a,b)
end
function util.fillSameSizeAsInPlace(o, a, b)
return o:fill(b)
end
function util.zerosLike(a, b)
b = b or a
return a.new(torch.size(b)):fill(0)
end
function util.zerosLikeInPlace(o, a, b)
return o:zero()
end
function util.selectSliceCopy(g, x, dim, index)
local out = g.new(x:size()):zero()
local slice = out:select(dim,index)
slice:copy(g)
return out
end
function util.selectSliceCopyInPlace(o, g, x, dim, index)
local out = o:zero()
local slice = out:select(dim,index)
slice:copy(g)
return out
end
function util.narrowSliceCopy(g, x, dim, index, size)
local out = g.new(x:size()):zero()
local slice = out:narrow(dim,index,size)
slice:copy(g)
return out
end
function util.narrowSliceCopyInPlace(o, g, x, dim, index, size)
local out = o:zero()
local slice = out:narrow(dim,index,size)
slice:copy(g)
return out
end
function util.indexAdd(g, x, dim, index)
local out = util.zerosLike(g, x)
for i=1,torch.size(index, 1) do
torch.narrow(out,dim,index[i],1):add(torch.narrow(g,dim,i,1))
end
return out
end
function util.indexAddInPlace(o, g, x, dim, index)
local out = o:zero()
for i=1,torch.size(index, 1) do
torch.narrow(out,dim,index[i],1):add(torch.narrow(g,dim,i,1))
end
return out
end
function util.catTableGradient(g, x, dim)
dim = dim or torch.nDimension(x[1])
local ln=Value.len(x)
local out = {}
local currentIndex = 1
for i=1,ln do
local thisSize = torch.size(x[i], dim)
out[i] = torch.narrow(g,dim,currentIndex,thisSize)
currentIndex = currentIndex + thisSize
end
return out
end
function util.catNumberGradient(g, x, dim)
local ln=Value.len(x)
local out = {}
local currentIndex = 1
for i=1,ln do
out[i] = torch.select(g,1,i)
end
return out
end
function util.cat(x, y, dim)
if torch.isTensor(x) then
dim = dim or torch.nDimension(x)
return torch.cat(x,y,dim)
else -- x should be a table filled with stuff of all the same type
if torch.isTensor(x[1]) then
dim = y or torch.nDimension(x[1]) -- second arg becomes dimension
return x[1].cat(x,dim)
else
-- We're concatenating numbers, and we'll yield the default Tensor type
return torch.Tensor(x)
end
end
end
function util.defaultBool(b, db)
if b == nil then
return db
end
return b
end
function util.sortedFlatten(tbl, flat, noRecurse)
flat = flat or { }
if type(tbl) == "table" then
local keys = { }
for k, v in pairs(tbl) do
keys[#keys + 1] = k
end
local ok = pcall(function()
return table.sort(keys)
end)
if not ok then
table.sort(keys, function(a, b)
return tostring(a) < tostring(b)
end)
end
for i = 1, #keys do
local val = tbl[keys[i]]
if type(val) == "table" and not noRecurse then
util.sortedFlatten(val, flat)
else
flat[#flat + 1] = val
end
end
return flat
else
flat[#flat + 1] = tbl
end
return flat
end
function util.sortedFlattenKeys(tbl, flat, flatKeys, parentKey, noRecurse)
flat = flat or { }
flatKeys = flatKeys or { }
parentKey = parentKey or { }
if type(tbl) == "table" then
local keys = { }
for k, v in pairs(tbl) do
keys[#keys + 1] = k
end
local ok = pcall(function()
return table.sort(keys)
end)
if not ok then
table.sort(keys, function(a, b)
return tostring(a) < tostring(b)
end)
end
for i = 1, #keys do
local val = tbl[keys[i]]
parentKey[#parentKey + 1] = keys[i]
if type(val) == "table" and not noRecurse then
util.sortedFlattenKeys(val, flat, flatKeys, parentKey)
else
flat[#flat + 1] = val
flatKeys[#flatKeys + 1] = util.shallowCopy(parentKey)
end
parentKey[#parentKey] = nil
end
else
flat[#flat + 1] = tbl
flatKeys[#flatKeys + 1] = util.shallowCopy(parentKey)
end
return flat, flatKeys
end
function util.nestedGet(tbl, nestedKey, startInd)
nestedKey = nestedKey or { }
startInd = startInd or 1
if startInd > #nestedKey then
return tbl
else
return util.nestedGet(tbl[nestedKey[startInd]], nestedKey, startInd+1)
end
end
function util.nestedSet(tbl, nestedKey, val, startInd)
local startInd = startInd or 1
if startInd == #nestedKey then
tbl[nestedKey[startInd]] = val
return nil
else
return util.nestedSet(tbl[nestedKey[startInd]], nestedKey, val, startInd+1)
end
end
function util.shallowCopy(tbl)
if type(tbl) == "table" then
local copy = { }
for k, v in pairs(tbl) do
copy[k] = v
end
return copy
else
return tbl
end
end
function util.deepCopy(tbl)
if type(tbl) == "table" then
local copy = { }
for k, v in pairs(tbl) do
if type(v) == "table" then
copy[k] = util.deepCopy(v)
else
copy[k] = v
end
end
return copy
else
return tbl
end
end
function util.fill(A,b)
return A.new(torch.size(A)):fill(b)
end
function util.fillInPlace(o,A,b)
return o:fill(b)
end
function util.cloneInPlace(o,A)
return o:copy(A)
end
function util.newInPlace(o,s)
return o
end
function util.typeAsIfNeeded(A, B)
if torch.type(A) ~= torch.type(B) then
return torch.typeAs(A, B)
end
return A
end
function util.typeAsInPlace(o, A, B)
o:copy(A)
return o
end
return util
<|start_filename|>src/model/RecurrentFWNetwork.lua<|end_filename|>
-- util
local util = require 'autograd.util'
local functionalize = require('autograd.nnwrapper').functionalize
local nn = functionalize('nn')
return function(opt, params)
-- options:
opt = opt or {}
local inputFeatures = opt.inputFeatures or 10
local hiddenFeatures = opt.hiddenFeatures or 100
local l = opt.lambda or 0.9
local e = opt.eta or 0.5
local S = opt.S or 1
local LayerNorm = opt.LayerNorm or true
local eps = eps or 1e-5
local outputType = opt.outputType or 'last' -- 'last' or 'all'
local relu = nn.ReLU()
local mm = nn.MM(false, false) -- A * B
local mmT = nn.MM(false, true) -- A * B'
-- container:
params = params or {}
-- parameters:
local p = {
W = torch.zeros(inputFeatures+hiddenFeatures, hiddenFeatures),
b = torch.zeros(1, hiddenFeatures),
}
table.insert(params, p)
-- function:
local f = function(params, x, prevState)
-- dims:
local p = params[1] or params
if torch.nDimension(x) == 2 then
x = torch.view(x, 1, torch.size(x, 1), torch.size(x, 2))
end
local batch = torch.size(x, 1)
local steps = torch.size(x, 2)
-- hiddens:
prevState = prevState or {}
-- prev h
local hp = prevState.h or torch.zero(x.new(batch, hiddenFeatures))
-- fast weights
local A = prevState.A or
torch.zero(x.new(batch, hiddenFeatures, hiddenFeatures))
local hs = {}
-- go over time:
for t = 1, steps do
-- xt
local xt = torch.select(x, 2, t)
-- prev h
hp = hs[t-1] or hp
-- vector to matrix
local hpMat = torch.view(hp, batch, -1, 1)
-- fast weights update
A = l * A + e * mmT{hpMat, hpMat}
-- pack all dot products:
local dot = torch.cat(xt, hp, 2) * p.W
+ torch.expand(p.b, batch, hiddenFeatures)
hs[t] = torch.zero(x.new(batch, hiddenFeatures))
for s = 0, S do
-- vector to matrix
local hstMat = torch.view(hs[t], batch, -1, 1)
-- next h:
hs[t] = dot + torch.view(mm{A, hstMat}, batch, -1)
if LayerNorm then
local h = hs[t]
if torch.nDimension(hs[t]) == 1 then
h = torch.view(hs[t], 1, torch.size(hs[t], 1))
end
local n = torch.size(h, 2)
h = h - torch.expand(torch.mean(h, 2), torch.size(h))
local std = torch.expand(
torch.sqrt(torch.sum(torch.cmul(h, h) / n, 2) + eps),
torch.size(h))
hs[t] = torch.view(torch.cdiv(h, std), torch.size(hs[t]))
end
-- apply non-linearity
hs[t] = relu(hs[t])
end
end
-- save state
local newState = {h=hs[#hs]}
-- output:
if outputType == 'last' then
-- return last hidden code:
return hs[#hs], newState
else
-- return all:
for i in ipairs(hs) do
hs[i] = torch.view(hs[i], batch,1,hiddenFeatures)
end
return x.cat(hs, 2), newState
end
end
-- layers
return f, params
end
<|start_filename|>src/runtime/codegen/Node.lua<|end_filename|>
local Value = require 'autograd.runtime.codegen.Value'
local Source = require 'autograd.runtime.codegen.Source'
local util = require 'autograd.util'
local Node = { }
Node.__index = Node
function Node.new(forwardFn, gradientFn, inputs, mutationFlow)
local v = { }
setmetatable(v, Node)
v:init(forwardFn, gradientFn, inputs, mutationFlow)
return v
end
function Node:init(forwardFn, gradientFn, inputs, mutationFlow)
self.forwardFn = forwardFn
self.gradientFn = gradientFn
self.inputs = { }
for i = 1, #inputs do
local input = inputs[i]
if not Value.isValue(input) then
if torch.isTensor(input) and torch.nDimension(input) > 1 then
error("constant tensor with more than one dimension. is this an upvalue that should be a function argument?")
end
end
self.inputs[i] = Value.from(input, Source.constant(input), false, mutationFlow)
end
self.outputs = { }
self.outputTargets = { }
end
function Node:differentiable(differentiableMap)
local outputSource = self.outputs[1].source
local isDiff = differentiableMap[outputSource]
if isDiff == nil then
if self.gradientFn or self.forwardFn.differentiable then
for i = 1, #self.inputs do
if self.inputs[i].source:differentiable(differentiableMap) then
differentiableMap[outputSource] = true
return true
end
end
end
differentiableMap[outputSource] = false
return false
else
return isDiff
end
end
function Node:evaluateForward(mutationFlow)
local evalArgs = { }
for i = 1, #self.inputs do
local input = self.inputs[i]
local source = input.source
if source.type == Source.COMPUTED then
source.node:linkOutputNode(source.index, self, i)
elseif source.type == Source.CONSTANT and input.type == Value.TABLE then
-- Constant table assembled by the user.
for k, v in pairs(input:get()) do
if Value.isValue(v) then
if v.source.type == Source.COMPUTED then
v.source.node:linkOutputNode(v.source.index, self, i)
end
end
end
end
evalArgs[i] = self.inputs[i]:flatten()
end
self.outputs = { }
self.outputTargets = { }
local outputs = {self.forwardFn.fn(table.unpack(evalArgs))}
if self.forwardFn.name == "Value.__internal_set" then
-- This was an mutation in the form of x[k] = v
-- The output of the assignment is simply the input param x wrapped in a new Value pointing to this node, x'
-- All future user references to x will be remapped to x', to preserve the order of operations in the graph.
local valueAlias = Value.from(outputs[1], Source.computed(self, 1))
mutationFlow:alias(self.inputs[1], valueAlias)
self.outputs[1] = valueAlias
self.outputTargets[1] = { }
else
for i = 1, #outputs do
self.outputs[i] = Value.from(outputs[i], Source.computed(self, i))
self.outputTargets[i] = { }
end
end
return table.unpack(self.outputs)
end
function Node:evaluateBackward(mutationFlow, intermediateGrads, differentiableMap)
-- Only eval one gradient for now?
local numGrads = 1 --#self.outputs
for o = 1, numGrads do
local output = self.outputs[o]
for i = #self.inputs, 1, -1 do
local input = self.inputs[i]
local source = input.source
if source:differentiable(differentiableMap) then
if self.gradientFn ~= nil and self.gradientFn[i] ~= nil then
local outputGradient = intermediateGrads[output.source]
if outputGradient == nil then
if output.type == Value.TENSOR then
outputGradient = Value.from(util.zerosLike(output), Source.constant(0, torch.type(output), torch.size(output)))
elseif output.type == Value.NUMBER then
outputGradient = Value.from(0.0, Source.constant(0))
end
intermediateGrads[output.source] = outputGradient
end
if input.type == Value.TABLE then
local gradUpdates = (self.gradientFn[i])(outputGradient, output, table.unpack(self.inputs))
if gradUpdates then
for k, v in pairs(input:get()) do
local gradUpdate = mutationFlow:remap(gradUpdates[k])
if gradUpdate ~= nil then
local subArg = v
local source = subArg.source
local sourceGradient = intermediateGrads[source]
if sourceGradient == nil or sourceGradient == 0 then
intermediateGrads[source] = gradUpdate
else
intermediateGrads[source] = sourceGradient + gradUpdate
end
end
end
end
else
local gradUpdate = (self.gradientFn[i])(outputGradient, output, table.unpack(self.inputs))
if gradUpdate then
gradUpdate = mutationFlow:remap(gradUpdate)
local sourceGradient = intermediateGrads[source]
if sourceGradient == nil or sourceGradient == 0 then
intermediateGrads[source] = gradUpdate
else
intermediateGrads[source] = sourceGradient + gradUpdate
end
end
end
elseif self.forwardFn.differentiable then
error("missing gradient for argument " .. tostring(i) .. " in function " .. self.forwardFn.name)
end
end
end
end
end
local function removeFromTargetsArray(arr, node)
for i = #arr, 1, -1 do
if arr[i].node == node then
table.remove(arr, i)
end
end
end
function Node:unlinkInputs()
for i = 1, #self.inputs do
if self.inputs[i].source.type == Source.COMPUTED then
self.inputs[i].source.node:unlinkOutputNode(self)
end
end
self.inputs = { }
end
function Node:replaceInput(replaceInput, withInput)
for i = 1, #self.inputs do
local input = self.inputs[i]
if input == replaceInput then
if replaceInput.source.type == Source.COMPUTED then
replaceInput.source.node:unlinkOutputNode(self)
end
if withInput.source.type == Source.COMPUTED then
local inputIndex = withInput.source.node:outputParamIndex(withInput)
withInput.source.node:linkOutputNode(inputIndex, self, i)
end
self.inputs[i] = withInput
end
end
end
function Node:linkOutputNode(srcIndex, node, dstIndex)
local outputTargets = self.outputTargets[srcIndex]
outputTargets[#outputTargets + 1] = {
node = node,
index = dstIndex
}
end
function Node:unlinkOutputNode(node)
for k = 1, #self.outputTargets do
removeFromTargetsArray(self.outputTargets[k], node)
end
end
function Node:outputParamIndex(outputValue)
for k = 1, #self.outputs do
if self.outputs[k] == outputValue then
return k
end
end
return 0
end
function Node:changeTargetIndex(param, target, currentIdx, newIdx)
for i = 1, #self.outputTargets[param] do
local ot = self.outputTargets[param][i]
if ot.node == self and ot.index == currentIdx then
out.index = newIdx
end
end
end
return Node
<|start_filename|>src/runtime/codegen/MutationFlow.lua<|end_filename|>
local MutationFlow = { }
MutationFlow.__index = MutationFlow
function MutationFlow.new()
local v = {
history = { },
map = { },
reverseMap = { },
}
setmetatable(v, MutationFlow)
return v
end
function MutationFlow:alias(from, to)
self.history[#self.history + 1] = {
from = from,
to = to,
}
local reverse = self.reverseMap[from]
if reverse ~= nil then
self.map[reverse] = to
end
self.map[from] = to
self.reverseMap[to] = from
end
function MutationFlow:remap(a)
local alias = self.map[a]
if alias ~= nil then
return alias
else
return a
end
end
function MutationFlow:clear()
self.history = { }
self.map = { }
end
return MutationFlow
<|start_filename|>src/auto/init.lua<|end_filename|>
local auto = {
}
return auto
<|start_filename|>src/runtime/codegen/StringBuilder.lua<|end_filename|>
local StringBuilder = { }
StringBuilder.__index = StringBuilder
function StringBuilder.new()
local v = { }
setmetatable(v, StringBuilder)
v.strings = { }
return v
end
function StringBuilder:write(...)
local arg = {...}
for i = 1, #arg do
self.strings[#self.strings + 1] = arg[i]
end
end
function StringBuilder:writeln(...)
self:write(...)
self:write("\n")
end
function StringBuilder:indent(n)
for i = 1, n do
self:write(" ")
end
end
function StringBuilder:finish()
return table.concat(self.strings, "")
end
return StringBuilder
<|start_filename|>src/module/MaskedBatchNormalization.lua<|end_filename|>
local util = require 'autograd.util'
return function(opt, params)
local opt = opt or {}
local params = params or {}
local nOutputs = opt.nOutputs or 10
local momentum = opt.momentum or 0.1
local batchNormState = {momentum = momentum, train = 1,
running_mean = torch.zeros(1, nOutputs),
running_std = torch.ones(1, nOutputs)}
-- initializing gain to < 1 is recommended for LSTM batch norm.
local p = {gain = torch.zeros(1, nOutputs):fill(0.1),
bias = torch.zeros(1, nOutputs)}
table.insert(params, p)
local function masked_batch_norm(params, x, mask, state, eps)
--[[ Masked batch normalization for minibatches with variable length sequences.
Based on sequence batch norm from Batch Normalized Recurrent Neural Networks by Laurent et al.
(http://arxiv.org/abs/1510.01378)
Parameters:
* `params` - Gain and bias parameters to adjust normalized output.
* `x` - ([batch, [time,], nOutputs]) tensor to be normalized.
* `mask` - Tensor with the same size as x that is 1 where x is valid and 0 otherwise.
* `state` - Running mean and std estimates, momentum for estimates, and train flag.
* `eps` - Small constant to avoid divide by zero for small std.
Returns:
* `x_corrected` - ([batch, [time,], nOutputs]) batch normalized tensor.
--]]
local p = params[1] or params
local eps = eps or 1e-5
local train = state.train or 1
local momentum = (state.momentum or 0.1) * train -- kill state updates during evaluation
local x_in = x
local mask_in = mask
if torch.nDimension(x) == 3 then -- collapse batch and time dimensions
x_in = torch.view(x, -1, torch.size(x, 3))
mask_in = torch.view(mask, -1, torch.size(mask, 3))
elseif torch.nDimension(x) == 1 then -- expand batch dimension
x_in = torch.view(x, 1, torch.size(x, 1))
mask_in = torch.view(mask, 1, torch.size(mask, 1))
end
local n = torch.sum(mask)
mask_in = torch.expand(mask_in, torch.size(x_in))
local x_masked = torch.cmul(x_in, mask_in)
local mean = torch.sum(x_masked / n, 1)
state.running_mean = momentum * mean + (1 - momentum) * state.running_mean
local x_centered = torch.cmul(x_masked - torch.expand(state.running_std, torch.size(x_in)), mask_in)
local var = torch.sum(torch.cmul(x_centered, x_centered) / n, 1) + eps
local std = torch.sqrt(var)
state.running_std = momentum * std + (1 - momentum) * state.running_std
local x_normed = torch.cdiv(x_centered, torch.expand(state.running_std, torch.size(x_in)))
local gain = torch.expand(p.gain, torch.size(x_in))
local bias = torch.expand(p.bias, torch.size(x_in))
local x_corrected = torch.view(torch.cmul(x_normed, gain) + bias, torch.size(x))
return x_corrected
end
return masked_batch_norm, params, batchNormState
end
<|start_filename|>src/runtime/direct/init.lua<|end_filename|>
local DirectNode = require 'autograd.runtime.direct.DirectNode'
local DirectTape = require 'autograd.runtime.direct.DirectTape'
local Profiler = require 'autograd.runtime.direct.Profiler'
local function create(fn, opt)
local pf = nil
if opt.profile ~= 'off' then
pf = Profiler.new()
end
return function(...)
if pf ~= nil and math.fmod(pf.times + 1, opt.profileReportFrequency) == 0 then
pf:printReport(opt.profile)
end
if opt.withForward and opt.withGradients then
return DirectTape.grad(fn, opt.argnum, nil, pf, ...)
elseif opt.withForward then
return fn(...)
elseif opt.withGradients then
local args = {...}
local partialGrad = table.remove(args, #args)
return DirectTape.grad(fn, opt.argnum, partialGrad, nil, table.unpack(args))
end
end
end
return {
create = create
}
<|start_filename|>src/runtime/codegen/Value.lua<|end_filename|>
local Source = require 'autograd.runtime.codegen.Source'
local Value = { }
Value.TABLE = "table"
Value.TENSOR = "tensor"
Value.NUMBER = "number"
Value.LONG_STORAGE = "long_storage"
Value.BOOLEAN = "boolean"
Value.STRING = "string"
function Value.create(type, val, source)
local v = {
type = type,
raw = val,
source = source
}
setmetatable(v, Value)
return v
end
function Value.from(v, source, skipWrapTables, mutationFlow)
if v == nil then
error("nil parameter value")
elseif Value.isValue(v) then
if mutationFlow ~= nil then
return mutationFlow:remap(v)
end
return v
elseif type(v) == "table" then
local vcopy = { }
for k,v in pairs(v) do
vcopy[k] = Value.from(v, Source.table(source, k), skipWrapTables, mutationFlow)
end
if skipWrapTables then
return vcopy
else
return Value.create(Value.TABLE, vcopy, source)
end
elseif torch.isTensor(v) then
return Value.create(Value.TENSOR, v, source)
elseif type(v) == "number" then
return Value.create(Value.NUMBER, v, source)
elseif type(v) == "boolean" then
return Value.create(Value.BOOLEAN, v, source)
elseif type(v) == "string" then
return Value.create(Value.STRING, v, source)
elseif v.totable then
return Value.create(Value.LONG_STORAGE, v, source)
else
error("unknown type " .. type(v) .. " for value '" .. tostring(v) .. "'")
end
end
function Value.isValue(v)
return getmetatable(v) == Value
end
function Value.len(v)
if Value.isValue(v) then
return #v.raw
else
return #v
end
end
function Value:get()
return self.raw
end
function Value.__internal_set(s, k, v)
s[k] = v
return s
end
function Value.__internal_get(s, k)
return s[k]
end
function Value:__index(i)
local rtype = rawget(self, "type")
if rtype == Value.TABLE then
local raw = rawget(self, "raw")
if raw[i] ~= nil then
return raw[i]
end
elseif rtype == Value.TENSOR then
local raw = rawget(self, "raw")
if type(i) ~= "string" then
return Value.__internal_get(self, i)
else
if raw[i] ~= nil then
return raw[i]
end
end
end
return rawget(Value, i)
end
function Value:__newindex(k, v)
local rtype = rawget(self, "type")
if rtype == Value.TABLE then
local raw = rawget(self, "raw")
return rawset(raw, k, v)
elseif rtype == Value.TENSOR then
local raw = rawget(self, "raw")
if type(k) ~= "string" then
return Value.__internal_set(self, k, v)
end
end
return rawset(self, k, v)
end
function Value:__len()
return #self.raw
end
function Value.flatten(v)
if not Value.isValue(v) then
if type(v) == "table" then
local rawTable = { }
for k,v in pairs(v) do
rawTable[k] = Value.flatten(v)
end
return rawTable
else
return v
end
elseif v.type == Value.TABLE then
return Value.flatten(v.raw)
else
return v.raw
end
end
function Value.flattenGrads(v, intermediateGrads)
if not Value.isValue(v) then
if type(v) == "table" then
local rawTable = { }
for k,v in pairs(v) do
rawTable[k] = Value.flattenGrads(v, intermediateGrads)
end
return rawTable
end
elseif v.type == Value.TABLE then
return Value.flattenGrads(v.raw, intermediateGrads)
else
local grad = intermediateGrads[v.source]
if grad ~= nil then
return intermediateGrads[v.source]:flatten()
end
end
end
function Value.collectGrads(v, intermediateGrads)
if not Value.isValue(v) then
if type(v) == "table" then
local rawTable = { }
for k,v in pairs(v) do
rawTable[k] = Value.collectGrads(v, intermediateGrads)
end
return rawTable
end
elseif v.type == Value.TABLE then
return Value.collectGrads(v.raw, intermediateGrads)
else
return intermediateGrads[v.source]
end
end
-- These exist only to be overloaded and called with flattened tensor or number arguments
function Value.__add(a, b)
return a + b
end
function Value.__sub(a, b)
return a - b
end
function Value.__mul(a, b)
return a * b
end
function Value.__div(a, b)
return a / b
end
function Value.__pow(a, b)
return a ^ b
end
function Value.__unm(a)
return -a
end
return Value
<|start_filename|>src/model/common.lua<|end_filename|>
return {
-- generic generator, from sequential list of layers:
sequence = function(layers, layer2params)
return function(params, input)
for i,layer in ipairs(layers) do
local paramsi = layer2params[i]
if paramsi then
input = layer(params[paramsi], input)
else
input = layer(input)
end
end
return input
end
end
}
<|start_filename|>src/runtime/codegen/Debugger.lua<|end_filename|>
local Source = require 'autograd.runtime.codegen.Source'
local Value = require 'autograd.runtime.codegen.Value'
local StringBuilder = require 'autograd.runtime.codegen.StringBuilder'
local stringx = require 'pl.stringx'
local function Debugger(opt)
opt = opt or { }
local debugHook = opt.debugHook
local newlineChar = opt.newlineChar or "\n"
local debugNodes = { }
local function debugNode(node)
if not node.debug then
table.insert(debugNodes, node)
node.debug = { index = #debugNodes }
end
end
local debugValues = { }
local function debugValue(value)
if not value.debug then
table.insert(debugValues, value)
value.debug = { index = #debugValues }
end
end
local function captureCallStack(node)
debugNode(node)
local tb = debug.traceback()
if not tb:match("'evaluateBackward'") then
node.debug.isForward = true
local lines = stringx.split(tb, "\n")
table.remove(lines, 1) -- Remove the header line
local infos = { }
for i,line in ipairs(lines) do
local info = debug.getinfo(i)
if info ~= nil then
if info.name == 'createGraph' then
for j = #infos,1,-1 do
if infos[j].what == 'tail' then
break
end
node.debug.callStack = node.debug.callStack or { }
table.insert(node.debug.callStack, infos[j])
end
break
else
table.insert(infos, info)
end
end
end
end
end
local function rcsvFindCallStack(node)
if node then
if node.debug and node.debug.callStack then
return node
end
if node.inputs and #node.inputs > 0 then
return rcsvFindCallStack(node.inputs[1].source.node)
end
end
end
local function isNanOrInf(x)
if x ~= x then
return true
else
local s = tostring(x)
if s == "inf" or s == "-inf" then
return true
end
end
end
local main
local function setMain(symbols, grads, answers)
main = {
symbols = symbols,
grads = grads,
answers = answers,
}
end
local function setCode(code)
main.code = code
end
local function walkGraph(value, node, parentNode, callback)
callback(value, node, parentNode)
if node then
for i,input in ipairs(node.inputs) do
walkGraph(input, input.source.node, node, callback)
end
end
end
local function valueKey(value)
return 'x' .. value.source:symbolPath(main.symbols):gsub("[^a-zA-Z0-9]+", "_")
end
local function valueName(value)
for _,grad in pairs(main.grads) do
if grad.grad == value then
return "grad(" .. grad.param.source:symbolPath(main.symbols) .. ")", "trapezium"
end
end
for i,answer in ipairs(main.answers) do
if answer == value then
return "answer[" .. i .. "]", "octagon"
end
end
local shape = "ellipse"
if value.source.type ~= Source.COMPUTED then
shape = "invtrapezium"
end
return value.source:symbolPath(main.symbols), shape
end
local function generateDotValue(out, value)
local label, shape = valueName(value)
local parts = { label }
if torch.isTensor(value.raw) then
table.insert(parts, torch.typename(value.raw):sub(7) .. "(" .. table.concat(value.raw:size():totable(), ", ") .. ")")
elseif torch.isStorage(value.raw) then
table.insert(parts, torch.typename(value.raw):sub(7) .. "(" .. value.raw:size() .. ")")
end
local color = "black"
if value.debug and value.debug.min ~= nil then
table.insert(parts, "[" .. value.debug.min .. ", " .. value.debug.max .. "]")
if isNanOrInf(value.debug.min) or isNanOrInf(value.debug.max) then
color = "red"
end
end
out:write('\t' .. valueKey(value) .. ' [label="<' .. table.concat(parts, newlineChar) .. '>" color="' .. color .. '" shape="' .. shape .. '"];\n')
end
local function generateDotNode(out, node)
debugNode(node)
local label = node.forwardFn.name
color = "black"
for _,output in ipairs(node.outputs) do
if output.debug and (isNanOrInf(output.debug.min) or isNanOrInf(output.debug.max)) then
color = "red"
local forwardNode = rcsvFindCallStack(node)
if forwardNode then
for i,info in ipairs(forwardNode.debug.callStack) do
label = label .. newlineChar .. i .. ": " .. tostring(info.name) .. info.source .. ":" .. info.currentline
end
end
end
end
out:write('\tnode' .. node.debug.index .. ' [label="<' .. label .. '>" color="'..color..'" shape="box"];\n')
end
local function generateEdge(out, node, value, reverse)
local color = (node.debug.isForward and 'green') or 'blue'
if reverse then
out:write('\t' .. valueKey(value) .. ' -> node' .. node.debug.index .. ' [color="'..color..'"];\n')
else
out:write('\tnode' .. node.debug.index .. ' -> ' .. valueKey(value) .. ' [color="'..color..'"];\n')
end
end
local function generateDot(fileName, value, node)
local out = StringBuilder.new()
out:write('digraph graphname {\n')
local seenNodes = { }
local seenEdges = { }
local function callback(value, node, parentNode)
if value and not seenNodes[value] then
seenNodes[value] = true
generateDotValue(out, value)
end
if node and not seenNodes[node] then
seenNodes[node] = true
generateDotNode(out, node)
end
if node and value then
local shouldDraw = false
if seenEdges[node] and not seenEdges[node][value] then
seenEdges[node][value] = true
shouldDraw = true
elseif not seenEdges[node] then
seenEdges[node] = {}
seenEdges[node][value] = true
shouldDraw = true
end
if shouldDraw then
generateEdge(out, node, value)
end
end
if parentNode and value then
local shouldDraw = false
if not seenEdges[parentNode] then
seenEdges[parentNode] = {}
end
if not seenEdges[parentNode][value] then
seenEdges[parentNode][value] = true
shouldDraw = true
end
if shouldDraw then
generateEdge(out, parentNode, value, true)
end
end
end
if value then
-- Walk from the provided root value and node
walkGraph(value, node, nil, callback)
else
-- Walk the entire graph
for _,grad in ipairs(main.grads) do
walkGraph(grad.grad, grad.grad.source.node, nil, callback)
end
for _,answer in ipairs(main.answers) do
walkGraph(answer, answer.source.node, nil, callback)
end
end
out:write('}\n')
local f = io.open(fileName, "w")
f:write(out:finish())
f:close()
end
local function generateJson(fileName, value, node)
local dot = generateDot(nil, value, node)
local _,_,name,graph = dot:find('digraph%s*(%w*)%s*{(.*)}')
local elts = stringx.split(stringx.strip(graph),'\n')
local edges = {}
local nodes = {}
local function parseMeta(meta)
local rest = meta
local _,key,val
local elts = {}
while true do
_,_,key,val,rest = rest:find('(.-)%=%"(.-)%"%s*(.*)')
if not rest then break end
elts[key] = val
end
return elts
end
for i,elt in ipairs(elts) do
local elt = stringx.strip(elt)
local _,_,content,meta = elt:find('(.-)%[(.*)%];$')
meta = parseMeta(meta)
if content:find('%-') then
-- edge
local _,_,name1,edge,name2 = content:find('^(.-) (.-) (.*)$')
table.insert(edges, {
from = stringx.strip(name1),
to = stringx.strip(name2),
edge = edge,
meta = meta,
})
else
-- node
local name = stringx.strip(content)
nodes[name] = {
name = name,
meta = meta,
}
end
end
local graph = {
name = name,
nodes = nodes,
edges = edges,
}
local f = io.open(fileName, 'w')
f:write(require('cjson').encode(graph))
f:close()
end
local function showDot(value, node)
if sys.uname() ~= 'macos' then
print('showDot() only implemented on OSX')
return
end
local fileName = os.tmpname()
generateDot(fileName, value, node)
os.execute('dot -O -Tsvg ' .. fileName)
os.remove(fileName)
os.execute('open -a Safari ' .. fileName ..'.svg')
end
local function valueCheck(value, raw, min, max, node)
value.debug = value.debug or { }
value.debug.min = (value.debug.min ~= nil and math.min(value.debug.min, min)) or min
value.debug.max = (value.debug.max ~= nil and math.max(value.debug.max, max)) or max
if isNanOrInf(value.debug.min) or isNanOrInf(value.debug.max) then
local debugger = {
generateDot = function(fileName) generateDot(fileName, value, node) end,
generateJson = function(fileName) generateJson(fileName, value, node) end,
showDot = function() showDot(value, node) end,
}
local msg = "autograd debugger detected a nan or inf value for " .. valueName(value)
local forwardNode = rcsvFindCallStack(node)
if forwardNode then
for i,info in ipairs(forwardNode.debug.callStack) do
msg = msg .. "\n\t\t" .. i .. ": " .. tostring(info.name) .. info.source .. ":" .. info.currentline
end
end
local info = debug.getinfo(3)
debugHook(debugger, msg, {
source = info.source,
line = info.currentline - 1
})
end
end
local function outputCheckTensor(nodeIndex, outputIndex, raw)
local node = debugNodes[nodeIndex]
local value = node.outputs[outputIndex]
valueCheck(value, raw, raw:min(), raw:max(), node)
end
local function outputCheckNumber(nodeIndex, outputIndex, raw)
local node = debugNodes[nodeIndex]
local value = node.outputs[outputIndex]
valueCheck(value, raw, raw, raw, node)
end
local function generateOutputCheck(node, outputIndex, symbol, out)
debugNode(node)
if node.forwardFn.operator == nil then
local fnName = string.gsub(node.forwardFn.name, "%.", "_")
if fnName:sub(#fnName - 3, #fnName) == "_new" then
-- Don't check new memory as it contains random junk
return
end
end
local output = node.outputs[outputIndex]
if output.type == Value.TENSOR then
out:write(" debugger.outputCheckTensor(" .. table.concat({ node.debug.index, outputIndex, symbol }, ", ") .. ")\n")
elseif output.type == Value.NUMBER then
out:write(" debugger.outputCheckNumber(" .. table.concat({ node.debug.index, outputIndex, symbol }, ", ") .. ")\n")
end
end
local function inputCheckTensor(valueIndex, raw)
local value = debugValues[valueIndex]
valueCheck(value, raw, raw:min(), raw:max())
end
local function inputCheckNumber(valueIndex, raw)
local value = debugValues[valueIndex]
valueCheck(value, raw, raw, raw)
end
local function generateInputCheck(value, symbol, out)
debugValue(value)
if value.type == Value.TENSOR then
out:write(" debugger.inputCheckTensor(" .. table.concat({ value.debug.index, symbol }, ", ") .. ")\n")
elseif value.type == Value.NUMBER then
out:write(" debugger.inputCheckNumber(" .. table.concat({ value.debug.index, symbol }, ", ") .. ")\n")
elseif value.type == Value.TABLE then
for k,v in pairs(value.raw) do
generateInputCheck(v, symbol .. "." .. k, out)
end
end
end
return {
captureCallStack = captureCallStack,
setMain = setMain,
setCode = setCode,
generateDot = generateDot,
showDot = showDot,
outputCheckTensor = outputCheckTensor,
outputCheckNumber = outputCheckNumber,
generateOutputCheck = generateOutputCheck,
inputCheckTensor = inputCheckTensor,
inputCheckNumber = inputCheckNumber,
generateInputCheck = generateInputCheck,
}
end
return Debugger
<|start_filename|>src/model/AlexNet.lua<|end_filename|>
local sequence = require 'autograd.model.common'.sequence
local NeuralNetwork = require 'autograd.model.NeuralNetwork'
local SpatialNetwork = require 'autograd.model.SpatialNetwork'
-- The ImageNet architecture for AlexNet would be set with the following options:
-- http://arxiv.org/pdf/1404.5997v2.pdf
-- https://github.com/eladhoffer/ImageNet-Training/blob/2cd055056082c05f7a7f5392fb7897c706cdb38a/Models/AlexNet_BN.lua
-- convOpt = {
-- kernelSize = 3, -- NOTE: currently don't support per-layer kernel sizes. If we did, it'd be {11,5,3,3,3}
-- hiddenFeatures = {64,192,384,256,256},
-- batchNormalization = true,
-- padding = 2, -- NOTE: currently don't support per-layer paddings. If we did, it'd be {2,2,1,1,1}
-- dropoutProb = 0,
-- activations = 'ReLU',
-- inputStride = 1, -- NOTE: currently don't supported per-layer inputStrides. If we did, it'd be {4,1,1,1,1}
-- poolings = {3,3,3,3,3} -- We don't set kW/H and dW/H separately.
-- }
local mlpOpt = {
hiddenFeatures = {4096,4096,1000},
batchNormalization = true,
dropoutProbs = {0.5,0.5,0},
classifier = true,
activations = "ReLU",
}
return function(imageDepth, imageHeight, imageWidth, convOpt, mlpOpt, params, layers, layer2params)
-- Convolution options
--------------------------------------------
convOpt = convOpt or {}
if convOpt.inputFeatures then
print("Input features set will be overridden with the imageDepth value provided: " .. tostring(imageDepth))
end
convOpt.inputFeatures = imageDepth
convOpt.kernelSize = convOpt.kernelSize or 5
if convOpt.kernelSizes then
error("Currently, per-layer kernel sizes are not supported")
end
convOpt.padding = convOpt.padding
if convOpt.paddings then
error("Currently, per-layer paddings are not supported")
end
convOpt.hiddenFeatures = convOpt.hiddenFeatures or {16,32,64}
convOpt.batchNormalization = convOpt.batchNormalization or false
convOpt.dropoutProb = convOpt.dropoutProb or 0
convOpt.dropoutProbs = convOpt.dropoutProbs or {}
convOpt.activations = convOpt.activations or 'ReLU'
convOpt.poolings = convOpt.poolings or {1,1,1}
convOpt.inputStride = convOpt.inputStride or 1
convOpt.cuda = convOpt.cuda or false
-- MLP options
--------------------------------------------
mlpOpt = mlpOpt or {}
if mlpOpt.inputFeatures then
error("Input features on the fully-connected layers cannot be manually set, do not specify")
end
mlpOpt.hiddenFeatures = mlpOpt.hiddenFeatures or {100,2}
mlpOpt.batchNormalization = mlpOpt.batchNormalization or false
mlpOpt.dropoutProb = mlpOpt.dropoutProb or 0
mlpOpt.dropoutProbs = mlpOpt.dropoutProbs or {}
mlpOpt.activations = mlpOpt.activations or 'ReLU'
mlpOpt.classifier = mlpOpt.classifier or true -- classifier by default.
mlpOpt.cuda = mlpOpt.cuda or false
mlpOpt = mlpOpt or {}
if (mlpOpt.cuda and not convOpt.cuda) or (not mlpOpt.cuda and convOpt.cuda) then
print("")
print("CUDA set on one, but not both of spatial and fully-connected layers. Setting all to CUDA.")
mlpOpt.cuda = true
convOpt.cuda = true
end
-- container
layers = layers or {}
params = params or {}
layer2params = layer2params or {}
-- Build convnet layers
local sp,params,layers = SpatialNetwork(convOpt, params, layers, layer2params)
-- Figure out convolution net output size (dependent on image size)
local testInput = torch.randn(1, imageDepth, imageHeight, imageWidth):typeAs(params[1][1]):contiguous()
local res = sp(params, testInput)
mlpOpt.inputFeatures = res:size(2)*res:size(3)*res:size(4)
-- Set up fully-connected layers to accept convolutional layer output
local fn,params,layers = NeuralNetwork(mlpOpt, params, layers, layer2params)
return sequence(layers, layer2params), params, layers
end
<|start_filename|>test/test.lua<|end_filename|>
-- Tester:
local torch = require 'torch'
local autograd = require 'autograd'
local util = require 'autograd.util'
local gradcheck = require 'autograd.gradcheck' {randomizeInput = true}
local gradcheckConstant = require 'autograd.gradcheck' {randomizeInput = false}
local tester = torch.Tester()
local stringx = require 'pl.stringx'
autograd.protected(true)
-- List of tests:
local tests = {
AutoModule = function()
local linear = function(input, weight, bias)
local y = weight * input + bias
return y
end
local linearReLU = function(input, weight, bias)
local y = weight * input + bias
local output = torch.mul( torch.abs( y ) + y, 0.5)
return output
end
local mse = function(input, target)
local buffer = input-target
return torch.sum( torch.cmul(buffer, buffer) ) / (torch.nDimension(input) == 2 and torch.size(input, 1) * torch.size(input, 2) or torch.size(input, 1))
end
local inputSize, outputSize = torch.random(10,100), torch.random(100,1000)
local inputSize = 24
local outputSize = 848
local model = nn.Sequential()
local linear1 = nn.Linear(inputSize, outputSize):reset()
local linear2 = nn.Linear(outputSize, inputSize):reset()
model:add( linear1 )
model:add( nn.ReLU() )
model:add( linear2 )
local mseCriterion = nn.MSECriterion()
local autoModel = nn.Sequential()
local autoLinear1ReLU = autograd.nn.AutoModule('AutoLinearReLU')(linearReLU, linear1.weight:clone(), linear1.bias:clone())
local autoLinear2 = autograd.nn.AutoModule('AutoLinear')(linear, linear2.weight:clone(), linear2.bias:clone())
autoModel:add( autoLinear1ReLU )
autoModel:add( autoLinear2 )
local autoMseCriterion = autograd.nn.AutoCriterion('AutoMSE')(mse)
-- Test
local n = 1000
local lr = 0.001
local autoParams,autoGradParams = autoModel:parameters()
local params,gradParams = model:parameters()
tester:asserteq(#params == #autoParams and #autoParams == #autoGradParams and #autoGradParams == #gradParams, true, 'Wrong number of parameters/gradients parameters')
for i=1,n do
model:zeroGradParameters()
autoModel:zeroGradParameters()
local input = torch.Tensor(inputSize):uniform(-5,5)
local target = input:clone():exp()
-- Forward
local output1 = model:forward(input)
local output2 = autoModel:forward(input)
local mseOut1 = mseCriterion:forward(output1, target)
local mseOut2 = autoMseCriterion:forward(output2, target)
-- Backward
local gradOutput1 = mseCriterion:backward(output1, target)
local gradOutput2 = autoMseCriterion:backward(output2, target)
local gradInput1 = model:backward(input, gradOutput1)
local gradInput2 = autoModel:backward(input, gradOutput2)
model:accGradParameters(input, gradOutput1)
autoModel:accGradParameters(input, gradOutput2)
for i=1,#autoParams do
autoParams[i]:add(-lr, autoGradParams[i])
end
for i=1,#params do
params[i]:add(-lr, gradParams[i])
end
end
tester:asserteq((model.modules[1].weight - autoModel.modules[1].weight):abs():max() < 1e-6 , true, "gradient accumulation must be the same.")
tester:asserteq((model.modules[1].bias - autoModel.modules[1].bias):abs():max() < 1e-6, true, "gradient accumulation must be the same.")
tester:asserteq((model.modules[3].weight - autoModel.modules[2].weight):abs():max() < 1e-6, true, "gradient accumulation must be the same.")
tester:asserteq((model.modules[3].bias - autoModel.modules[2].bias):abs():max() < 1e-6, true, "gradient accumulation must be the same.")
end,
AutoModuleLoaded = function()
local inputSize = 24
local outputSize = 848
local version = (jit and 'JIT') or (_VERSION:find('5%.1') and '51') or (_VERSION:find('5%.2') and '52') or (_VERSION:find('5%.3') and '53') or assert('version of Lua not supported: ', _VERSION)
local mseCriterion = torch.load(sys.fpath()..'/data/criterion.th.'..version)
local model = torch.load(sys.fpath()..'/data/model.th.'..version)
local autoModel = torch.load(sys.fpath()..'/data/autoModel.th.'..version)
local autoMseCriterion = torch.load(sys.fpath()..'/data/autoCriterion.th.'..version)
-- Test
local n = 1000
local lr = 0.001
local autoParams,autoGradParams = autoModel:parameters()
local params,gradParams = model:parameters()
tester:asserteq(#params == #autoParams and #autoParams == #autoGradParams and #autoGradParams == #gradParams, true, 'Wrong number of parameters/gradients parameters')
for i=1,n do
model:zeroGradParameters()
autoModel:zeroGradParameters()
local input = torch.Tensor(inputSize):uniform(-5,5)
local target = input:clone():exp()
-- Forward
local output1 = model:forward(input)
local output2 = autoModel:forward(input)
local mseOut1 = mseCriterion:forward(output1, target)
local mseOut2 = autoMseCriterion:forward(output2, target)
-- Backward
local gradOutput1 = mseCriterion:backward(output1, target)
local gradOutput2 = autoMseCriterion:backward(output2, target)
local gradInput1 = model:backward(input, gradOutput1)
local gradInput2 = autoModel:backward(input, gradOutput2)
model:accGradParameters(input, gradOutput1)
autoModel:accGradParameters(input, gradOutput2)
for i=1,#autoParams do
autoParams[i]:add(-lr, autoGradParams[i])
end
for i=1,#params do
params[i]:add(-lr, gradParams[i])
end
end
tester:asserteq((model.modules[1].weight - autoModel.modules[1].weight):abs():max() < 1e-6 , true, "gradient accumulation must be the same.")
tester:asserteq((model.modules[1].bias - autoModel.modules[1].bias):abs():max() < 1e-6, true, "gradient accumulation must be the same.")
tester:asserteq((model.modules[3].weight - autoModel.modules[2].weight):abs():max() < 1e-6, true, "gradient accumulation must be the same.")
tester:asserteq((model.modules[3].bias - autoModel.modules[2].bias):abs():max() < 1e-6, true, "gradient accumulation must be the same.")
end,
NNWrapperTableInput = function()
local A = torch.eye(10)
local B = torch.eye(10):mul(3)
local mmFn = autograd.nn.MM()
local fn = function(inputs)
return torch.sum(mmFn({inputs.A,inputs.B}))
end
tester:assert(gradcheck(fn,{A=A,B=B}), "Incorrect gradient")
end,
Select = function()
local W = torch.Tensor(5,25):normal()
local x = torch.Tensor(1,25):normal()
-- Function:
local selectFn = function(inputs)
return torch.sum(torch.select(inputs.W,1,1) + inputs.x)
end
local selectFn2 = function(inputs)
local a = torch.select(torch.viewAs(torch.select(inputs.W,1,1), inputs.x), 2, 1)
local b = torch.select(inputs.x, 2, 1)
return torch.sum(a + b)
end
-- Check grads:
tester:assert(gradcheck(selectFn, {W=W,x=x}), "Incorrect gradient")
tester:assert(gradcheck(selectFn2, {W=W,x=x}), "Incorrect gradient")
end,
Index = function()
local W = torch.Tensor(5,25):normal()
local x = torch.Tensor(100,25):normal()
-- Test with index bigger than the index param + very likely collision of indexes.
-- i.e. worst case scenario
local idx = torch.LongTensor(100)
for i = 1,idx:size(1) do
idx[i] = torch.random(1,5)
end
-- Function:
local selectFn = function(inputs)
return torch.sum(torch.index(inputs.W,1,idx) + inputs.x)
end
-- Check grads:
tester:assert(gradcheck(selectFn, {W=W,x=x}), "Incorrect gradient")
end,
Narrow = function()
local W = torch.Tensor(5,25):normal()
local x1 = torch.Tensor(1,25):normal()
local x2 = torch.Tensor(3,25):normal()
-- Function:
local NarrowFn1D = function(inputs)
return torch.sum(torch.narrow(inputs.W,1,1,1) + inputs.x)
end
local NarrowFn2D = function(inputs)
return torch.sum(torch.narrow(inputs.W,1,1,3) + inputs.x)
end
-- Check grads:
tester:assert(gradcheck(NarrowFn1D, {W=W,x=x1}), "Incorrect gradient")
tester:assert(gradcheck(NarrowFn2D, {W=W,x=x2}), "Incorrect gradient")
end,
Reshape = function()
local function f(params)
return torch.sum(torch.reshape(params.x,1,9)*3)
end
tester:assert(gradcheck(f, {x=torch.randn(3,3)}), "Incorrect gradient")
end,
Clamp = function()
local W = torch.Tensor(5,25):normal()
local clampFn = function(inputs)
return torch.sum(torch.clamp(inputs.W,0,math.huge))
end
tester:assert(clampFn({W=W})>0, "Basic sanity check failed")
tester:assert(gradcheck(clampFn,{W=W}), "Incorrect gradient")
end,
Clone = function()
local x = torch.Tensor(2,25):normal()
-- Function:
local f = function(inputs)
local res = torch.clone(torch.select(inputs.x, 1, 1) * 10 )
return torch.sum(res)
end
-- Check grads:
tester:assert(gradcheck(f, {x=x}), "Incorrect gradient")
end,
NarrowCopy = function()
local x = torch.Tensor(2,25):normal()
-- Function:
local f = function(inputs)
local res = inputs.x.new(torch.size(inputs.x))
local out1 = torch.copy( torch.select(res, 1,1), torch.select(inputs.x, 1, 1) * 10 )
local out2 = torch.copy( torch.select(res, 1,2), torch.select(inputs.x, 1, 2) * 3 )
return torch.sum(out1) + torch.sum(out2)
end
-- Check grads:
tester:assert(gradcheck(f, {x=x}), "Incorrect gradient")
end,
SelfView = function()
local W = torch.Tensor(5,5):normal()
local x = torch.Tensor(1,25):normal()
-- Function:
local viewFn = function(inputs)
return torch.sum(inputs.x:view(5,5) + inputs.W)
end
local viewAsFn = function(inputs)
return torch.sum(inputs.x:viewAs(inputs.W) + inputs.W)
end
-- Check grads:
tester:assert(gradcheck(viewFn, {W=W,x=x}), "Incorrect gradient")
tester:assert(gradcheck(viewAsFn, {W=W,x=x}), "Incorrect gradient")
-- Check floating point
gd = autograd(viewFn)({W=W,x=x})
gf = autograd(viewFn)({W=W:float(),x=x:float()})
tester:assertTensorEq(gd.W, gf.W:double(), 1e-8, "Incorrect floating point gradient")
tester:assertTensorEq(gd.x, gf.x:double(), 1e-8, "Incorrect floating point gradient")
gd = autograd(viewAsFn)({W=W,x=x})
gf = autograd(viewAsFn)({W=W:float(),x=x:float()})
tester:assertTensorEq(gd.W, gf.W:double(), 1e-8, "Incorrect floating point gradient")
tester:assertTensorEq(gd.x, gf.x:double(), 1e-8, "Incorrect floating point gradient")
end,
View = function()
local W = torch.Tensor(5,5):normal()
local x = torch.Tensor(1,25):normal()
-- Function:
local viewFn = function(inputs)
return torch.sum(torch.view(inputs.x,5,5) + inputs.W)
end
local viewAsFn = function(inputs)
return torch.sum(torch.viewAs(inputs.x, inputs.W) + inputs.W)
end
-- Check grads:
tester:assert(gradcheck(viewFn, {W=W,x=x}), "Incorrect gradient")
tester:assert(gradcheck(viewAsFn, {W=W,x=x}), "Incorrect gradient")
-- Check floating point
gd = autograd(viewFn)({W=W,x=x})
gf = autograd(viewFn)({W=W:float(),x=x:float()})
tester:assertTensorEq(gd.W, gf.W:double(), 1e-8, "Incorrect floating point gradient")
tester:assertTensorEq(gd.x, gf.x:double(), 1e-8, "Incorrect floating point gradient")
end,
SelfExpand = function()
local W = torch.Tensor(32,100):normal()
local x1 = torch.Tensor(1,100):normal()
local x2 = torch.Tensor(32,1):normal()
local x3 = torch.Tensor(1,1):normal()
-- Function:
local expandFn = function(inputs)
return torch.sum(torch.sum(inputs.x:expand(32,100) + inputs.W, 2))
end
local expandAsFn = function(inputs)
return torch.sum(torch.sum(inputs.x:expandAs(inputs.W) + inputs.W, 2))
end
-- Check grads:
for ix,x in pairs({x1,x2,x3}) do
tester:assert(gradcheck(expandFn, {W=W, x=x}), "Incorrect gradient")
tester:assert(gradcheck(expandAsFn, {W=W, x=x}), "Incorrect gradient")
end
-- Check floating point
for ix,x in pairs({x1,x2,x3}) do
gd = autograd(expandFn)({W=W,x=x})
gf = autograd(expandFn)({W=W:float(),x=x:float()})
tester:assertTensorEq(gd.W, gf.W:double(), 1e-8, "Incorrect floating point gradient")
tester:assertTensorEq(gd.x, gf.x:double(), 1e-8, "Incorrect floating point gradient")
end
end,
Expand = function()
local W = torch.Tensor(32,100):normal()
local x1 = torch.Tensor(1,100):normal()
local x2 = torch.Tensor(32,1):normal()
local x3 = torch.Tensor(1,1):normal()
-- Function:
local expandFn = function(inputs)
return torch.sum(torch.sum(torch.expand(inputs.x, 32, 100) + inputs.W, 2))
end
local expandAsFn = function(inputs)
return torch.sum(torch.sum(torch.expandAs(inputs.x, inputs.W) + inputs.W, 2))
end
-- Check grads:
for ix,x in pairs({x1,x2,x3}) do
tester:assert(gradcheck(expandFn, {W=W, x=x}), "Incorrect gradient")
tester:assert(gradcheck(expandAsFn, {W=W, x=x}), "Incorrect gradient")
end
-- Check floating point
for ix,x in pairs({x1,x2,x3}) do
gd = autograd(expandFn)({W=W,x=x})
gf = autograd(expandFn)({W=W:float(),x=x:float()})
tester:assertTensorEq(gd.W, gf.W:double(), 1e-8, "Incorrect floating point gradient")
tester:assertTensorEq(gd.x, gf.x:double(), 1e-8, "Incorrect floating point gradient")
end
end,
Transpose = function()
local fn = function(inputs)
return torch.sum(torch.t(inputs.x))
end
-- Check grads:
local x = torch.Tensor(10,5):normal()
tester:assert(gradcheck(fn, {x=x}), "Incorrect gradient")
end,
Cat = function()
-- Concat along 1st dim:
local x1 = torch.Tensor(3,5):normal()
local x2 = torch.Tensor(7,5):normal()
-- Function:
local fn = function(inputs)
return torch.sum(torch.cat(inputs.x1, inputs.x2, 1))
end
-- Check grads:
tester:assert(gradcheck(fn, {x1=x1, x2=x2}), "Incorrect gradient")
-- Transpose, and cat along the last dim
local x1 = x1:t():contiguous()
local x2 = x2:t():contiguous()
-- Function:
local fn = function(inputs)
return torch.sum(torch.cat(inputs.x1, inputs.x2))
end
-- Check grads:
tester:assert(gradcheck(fn, {x1=x1, x2=x2}), "Incorrect gradient")
-- Tables of tensors
local xs = {torch.Tensor(10):normal(), torch.Tensor(10):normal(), torch.Tensor(10):normal()}
-- Function:
local fn = function(inputs)
return torch.sum(torch.cat(inputs,1))
end
-- Check grads:
tester:assert(gradcheck(fn, xs), "Incorrect gradient")
end,
Dot = function()
-- Parameters:
local W = torch.Tensor(32,100):fill(.5)
local x = torch.Tensor(100):fill(.5)
-- Function:
local func = function(inputs)
return torch.sum(inputs.W * inputs.x)
end
-- Grads:
local dFunc = autograd(func)
-- Compute func and grads:
local pred = func({W=W, x=x})
local grads = dFunc({W=W, x=x})
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, 800, 'incorrect prediction')
tester:asserteq(grads.W:dim(), 2, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(1), 32, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(2), 100, 'incorrect dims for gradients')
tester:asserteq(grads.x:dim(), 1, 'incorrect dims for gradients')
tester:asserteq(grads.x:size(1),100, 'incorrect dims for gradients')
end,
GradCheck_Ger = function()
local A = torch.Tensor(10):normal()
local B = torch.Tensor(10):normal()
local func = function(inputs)
return torch.sum(torch.ger(inputs.A, inputs.B))
end
tester:assert(gradcheck(func, {A=A,B=B}), "incorrect gradients")
end,
GradCheck_Dot = function()
-- Parameters:
local matrices = {
{torch.Tensor(10,20):normal(), torch.Tensor(20):normal()}, -- 2D x 1D
{torch.Tensor(10,20):normal(), torch.Tensor(20,1):normal()}, -- 2D x 1D, with second dim
{torch.Tensor(10,20):normal(), torch.Tensor(20,20):normal()}, -- 2D x 2D
{torch.Tensor(20,1):normal(), torch.Tensor(1,20):normal()}, -- 1D x 1D
}
-- Function:
local func = function(inputs)
return torch.sum(inputs.A * inputs.B)
end
-- Check grads:
for i,M in pairs(matrices) do
local A = M[1]
local B = M[2]
tester:assert(gradcheck(func, {A=A,B=B}), 'incorrect gradients')
end
end,
Inverse = function()
-- Parameters:
local x = torch.Tensor(20):fill(.5)
local K = torch.eye(20) + torch.ger(x,x)
-- Function:
local func = function(inputs)
return torch.sum(torch.inverse(inputs.K))
end
-- Grads:
local dFunc = autograd(func)
-- Compute func and grads:
local pred = func({K=K})
local grads = dFunc({K=K})
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, torch.sum(torch.inverse(K)), 'incorrect prediction')
tester:asserteq(grads.K:dim(), 2, 'incorrect dims for gradients')
end,
GradCheck_Inverse = function()
-- Parameters:
local x = torch.Tensor(10):normal()
local K = torch.eye(10) + torch.ger(x,x)
-- Function:
local func = function(inputs)
return torch.sum(torch.inverse(inputs.K))
end
-- Check grads:
tester:assert(gradcheckConstant(func, {K=K}), 'incorrect gradients')
end,
Scale = function()
-- Parameters:
local W = torch.Tensor(32,100):fill(.5)
local x = torch.Tensor(100):fill(.5)
-- Function:
local func = function(inputs)
return torch.sum(inputs.W * inputs.x * 3.0 + 1.0)
end
-- Grads:
local dFunc = autograd(func)
-- Compute func and grads:
local pred = func({W=W, x=x})
local grads = dFunc({W=W, x=x})
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, 2432, 'incorrect prediction')
tester:asserteq(grads.W:dim(), 2, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(1), 32, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(2), 100, 'incorrect dims for gradients')
tester:asserteq(grads.x:dim(), 1, 'incorrect dims for gradients')
tester:asserteq(grads.x:size(1),100, 'incorrect dims for gradients')
end,
GradientTensorSize = function()
local f = function(beta)
-- beta: 2x2
local maxed = torch.max(beta)
-- beta: 2x2, maxed: number
local diff = beta - maxed
-- diff: 2x2
local summed = torch.sum(diff, 2)
-- summed: 2x1, maxed: number
local out = summed + maxed -- if you comment out maxed, this works
-- out: 2x1
return torch.sum(out)
end
local beta = torch.eye(2,2)
local pred = f(beta)
local g = autograd(f)
local grad = g(beta)
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(grad:dim(), 2, 'incorrect dims for grad')
end,
MinMax = function()
local fns = {"min", "max"}
local preds = {{1,5},{2,10}}
for i=1,2 do
local W = torch.ones(5,5):fill(2)
W[1] = 1
local fn = fns[i]
local func1 = function(inputs)
return torch[fn](inputs.W)
end
local func2 = function(inputs)
local minVal = torch[fn](inputs.W, 1)
return torch.sum(minVal)
end
-- Grads:
local dFunc1 = autograd(func1)
local dFunc2 = autograd(func2)
-- Compute func and grads:
local grads, pred = dFunc1({W=W})
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, preds[i][1], 'incorrect prediction')
tester:asserteq(grads.W:dim(), 2, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(1), 5, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(2), 5, 'incorrect dims for gradients')
tester:assert(gradcheck(func1, {W=W}), 'incorrect gradients')
-- Compute func and grads:
local W = torch.ones(5,5):fill(2)
W[1] = 1
local grads, pred = dFunc2({W=W})
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, preds[i][2], 'incorrect prediction')
tester:asserteq(grads.W:dim(), 2, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(1), 5, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(2), 5, 'incorrect dims for gradients')
tester:assert(gradcheck(func1, {W=W}), 'incorrect gradients')
end
end,
CMinCMax = function()
local fns = {"cmin", "cmax"}
local preds = {{1,5},{2,10}}
for i=1,2 do
local A = torch.eye(5)
local B = torch.eye(5)*2
local fn = fns[i]
local fn = function(inputs)
return torch.sum(torch[fn](inputs.A,inputs.B))
end
tester:assert(gradcheck(fn, {A=A,B=B}), 'incorrect gradients')
end
end,
SinCosTan = function()
local fns = {'sin', 'cos', 'tan'}
for _,f in pairs(fns) do
local x = torch.Tensor(5):normal()
local fn = function(params)
return 2 * torch.sum(torch[f](params.x))
end
tester:assert(gradcheck(fn, {x=x}), string.format('incorrect gradient for %s', f))
end
end,
SinhCoshTanh = function()
local fns = {'sinh', 'cosh', 'tanh'}
for _,f in pairs(fns) do
local x = torch.Tensor(5):normal()
local fn = function(params)
return 2 * torch.sum(torch[f](params.x))
end
tester:assert(gradcheck(fn, {x=x}), string.format('incorrect gradient for %s', f))
end
end,
GradCheck_Scale = function()
-- Parameters:
local W = torch.Tensor(32,100):normal()
local x = torch.Tensor(100):normal()
-- Function:
local func = function(inputs)
return torch.sum(inputs.W * inputs.x * 3.0 + 1.0)
end
-- Check grads:
tester:assert(gradcheck(func, {W=W, x=x}), 'incorrect gradients')
end,
Unary = function()
-- Parameters:
local x = torch.Tensor(100):fill(.5)
-- Function:
local func = function(inputs)
return torch.sum(- inputs.x)
end
-- Grads:
local dFunc = autograd(func)
-- Compute func and grads:
local pred = func({x=x})
local grads = dFunc({x=x})
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, -50, 'incorrect prediction')
tester:asserteq(grads.x:dim(), 1, 'incorrect dims for gradients')
tester:asserteq(grads.x:size(1),100, 'incorrect dims for gradients')
end,
DotNonLinear = function()
-- Parameters:
local W = torch.Tensor(32,100):fill(.5)
local x = torch.Tensor(100):fill(.5)
-- Function:
local func = function(inputs)
return torch.sum(torch.tanh(inputs.W * inputs.x))
end
-- Grads:
local dFunc = autograd(func)
-- Compute func and grads:
local pred = func({W=W, x=x})
local grads = dFunc({W=W, x=x})
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, 32, 'incorrect prediction')
tester:asserteq(grads.W:dim(), 2, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(1), 32, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(2), 100, 'incorrect dims for gradients')
tester:asserteq(grads.x:dim(), 1, 'incorrect dims for gradients')
tester:asserteq(grads.x:size(1),100, 'incorrect dims for gradients')
end,
GradCheck_DotNonLinear = function()
-- Parameters:
local W = torch.Tensor(32,100):normal()
local x = torch.Tensor(100):normal()
-- Function:
local func = function(inputs)
return torch.sum(torch.tanh(inputs.W * inputs.x))
end
-- Check grads:
tester:assert(gradcheck(func, {W=W, x=x}), 'incorrect gradients')
end,
FloatType = function()
-- Parameters:
local W = torch.FloatTensor(32,100):fill(.5)
local x = torch.FloatTensor(100):fill(.5)
-- Function:
local func = function(inputs)
return torch.sum(inputs.W * inputs.x)
end
-- Grads:
local dFunc = autograd(func)
-- Compute func and grads:
local pred = func({W=W, x=x})
local grads = dFunc({W=W, x=x})
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, 800, 'incorrect prediction')
tester:asserteq(grads.W:dim(), 2, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(1), 32, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(2), 100, 'incorrect dims for gradients')
tester:asserteq(grads.x:dim(), 1, 'incorrect dims for gradients')
tester:asserteq(grads.x:size(1),100, 'incorrect dims for gradients')
end,
CudaType = function()
-- Cuda only:
if not cutorch then
return
end
-- Parameters:
local W = torch.CudaTensor(32,100):fill(.5)
local x = torch.CudaTensor(100):fill(.5)
-- Function:
local func = function(inputs)
return torch.sum(inputs.W * inputs.x)
end
-- Grads:
local dFunc = autograd(func)
-- Compute func and grads:
local pred = func({W=W, x=x})
local grads = dFunc({W=W, x=x})
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, 800, 'incorrect prediction')
tester:asserteq(grads.W:dim(), 2, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(1), 32, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(2), 100, 'incorrect dims for gradients')
tester:asserteq(grads.x:dim(), 1, 'incorrect dims for gradients')
tester:asserteq(grads.x:size(1),100, 'incorrect dims for gradients')
end,
NCalls = function()
-- Parameters:
local W = torch.Tensor(32,100):fill(.5)
local x = torch.Tensor(100):fill(.5)
-- Function:
local func = function(inputs)
return torch.sum(inputs.W * inputs.x)
end
-- Grads:
local dFunc = autograd(func)
-- Compute func and grads:
local pred = func({W=W, x=x})
local grads = dFunc({W=W, x=x})
for i = 1,5 do
pred = func({W=W, x=x})
grads = dFunc({W=W, x=x})
end
-- Tests:
tester:asserteq(type(pred), 'number', 'incorrect prediction')
tester:asserteq(pred, 800, 'incorrect prediction')
tester:asserteq(grads.W:dim(), 2, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(1), 32, 'incorrect dims for gradients')
tester:asserteq(grads.W:size(2), 100, 'incorrect dims for gradients')
tester:asserteq(grads.x:dim(), 1, 'incorrect dims for gradients')
tester:asserteq(grads.x:size(1),100, 'incorrect dims for gradients')
end,
GradCheck_MLP = function()
local inputSize = 1024
local classes = {0,1,2,3,4,5,6,7,8,9}
-- What model to train:
local predict,f,params
-- Define our neural net
function predict(params, input, target)
local h1 = torch.tanh(input * params.W[1] + params.B[1])
local h2 = torch.tanh(h1 * params.W[2] + params.B[2])
local h3 = h2 * params.W[3] + params.B[3]
local out = autograd.util.logSoftMax(h3)
return out
end
-- Define our training loss
function f(params, input, target)
local prediction = predict(params, input, target)
local loss = autograd.loss.logMultinomialLoss(prediction, target)
return loss, prediction
end
-- Define our parameters
-- [-1/sqrt(#output), 1/sqrt(#output)]
torch.manualSeed(0)
local W1 = torch.Tensor(inputSize,50):uniform(-1/math.sqrt(50),1/math.sqrt(50))
local B1 = torch.Tensor(50):fill(0)
local W2 = torch.Tensor(50,50):uniform(-1/math.sqrt(50),1/math.sqrt(50))
local B2 = torch.Tensor(50):fill(0)
local W3 = torch.Tensor(50,#classes):uniform(-1/math.sqrt(#classes),1/math.sqrt(#classes))
local B3 = torch.Tensor(#classes):fill(0)
-- Trainable parameters:
params = {
W = {W1, W2, W3},
B = {B1, B2, B3},
}
input = torch.randn(1,1024)
target = torch.zeros(1,10)
target[1][3] = 1
p = {W=params.W,B=params.B, input=input, target=target}
function testme(params)
return f({W=params.W,B=params.B}, params.input, params.target)
end
tester:assert(gradcheck(testme, p), "Incorrect Gradient")
end,
NNFunc_Basic = function()
-- Tested params:
local inputSize = 100
local outputSize = 50
local x = torch.Tensor(inputSize):normal()
-- nn modules:
local linear1, linearParams = autograd.nn.Linear(inputSize, outputSize)
params = {linearParams=linearParams, x=x}
-- nn version:
local function f_nn(params)
local funcout = linear1(params.linearParams, params.x)
return torch.sum(funcout)
end
-- autograd version:
local function f_autograd(params)
return torch.sum(params.linearParams[1] * params.x + params.linearParams[2])
end
-- Get the NN predictions
local pred_nn = f_nn(params)
local g_nn = autograd(f_nn)
local grad_nn = g_nn(params)
-- Get the autograd predictions
local pred_autograd = f_autograd(params)
local g_autograd = autograd(f_autograd)
local grad_autograd = g_autograd(params)
-- Check
tester:asserteq((grad_nn.linearParams[1]-grad_autograd.linearParams[1]):abs():max(), 0, "Incorrect gradients")
tester:asserteq((grad_nn.x-grad_autograd.x):abs():max(), 0, "Incorrect gradients")
-- Run a 2nd time - gradients should get recomputed:
params.linearParams[1]:normal()
params.linearParams[2]:normal()
params.x:normal()
-- Get the NN predictions
local pred_nn = f_nn(params)
local g_nn = autograd(f_nn)
local grad_nn = g_nn(params)
-- Get the autograd predictions
local pred_autograd = f_autograd(params)
local g_autograd = autograd(f_autograd)
local grad_autograd = g_autograd(params)
-- Check
tester:asserteq((grad_nn.linearParams[1]-grad_autograd.linearParams[1]):abs():max(), 0, "Incorrect gradients")
tester:asserteq((grad_nn.x-grad_autograd.x):abs():max(), 0, "Incorrect gradients")
end,
NNFunc_MLP = function()
-- More complex model:
local inputSize = 100
local hiddenSize = 50
local outputSize = 10
-- nn modules and their parameters:
local params = {}
local linear1, linear2, acts1, acts2
linear1, params.linear1 = autograd.nn.Linear(inputSize, hiddenSize)
acts1 = autograd.nn.Tanh()
linear2,params.linear2 = autograd.nn.Linear(hiddenSize, outputSize)
acts2 = autograd.nn.Tanh()
-- input data
params.x = torch.Tensor(inputSize):normal()
-- nn version:
local function mlp(params)
local h1 = acts1(linear1(params.linear1, params.x))
local h2 = acts2(linear2(params.linear2, h1))
local o = torch.sum(h2)
return o
end
-- Eval:
local pred = mlp(params)
local grads = autograd(mlp)(params)
-- Check grads:
tester:assert(gradcheck(mlp, params), 'incorrect gradients')
end,
NNFunc_CNN = function()
-- Start params with input data:
local params = {x=torch.Tensor(3, 8, 8):normal()}
-- nn modules:
local conv1, acts1, flatten, linear2, acts2
conv1,params.conv1 = autograd.nn.SpatialConvolutionMM(3, 16, 3, 3, 1, 1, 1, 1)
acts1 = autograd.nn.Tanh()
flatten = autograd.nn.Reshape(16*8*8)
linear2, params.linear2 = autograd.nn.Linear(16*8*8, 10)
acts2 = autograd.nn.Tanh()
-- nn version:
local function cnn(params)
local h1 = acts1(conv1(params.conv1, params.x))
local h2 = acts2(linear2(params.linear2, flatten(h1)))
local o = torch.sum(h2)
return o
end
-- Eval:
local pred = cnn(params)
local grads = autograd(cnn)(params)
-- Check grads:
tester:assert(gradcheck(cnn, params), 'incorrect gradients')
end,
NNFunc_Float = function()
-- More complex model:
local inputSize = 100
local hiddenSize = 50
local outputSize = 10
-- Input data
local x = torch.FloatTensor(inputSize):normal()
-- nn modules:
local linear1, pLinear1 = autograd.nn.Linear(inputSize, hiddenSize)
local acts1 = autograd.nn.Tanh()
local linear2, pLinear2 = autograd.nn.Linear(hiddenSize, outputSize)
local acts2 = autograd.nn.Tanh()
params = autograd.util.cast({
linear1 = pLinear1,
linear2 = pLinear2,
x = x}, "float")
-- nn version:
local function mlp(params)
local h1 = acts1(linear1(params.linear1, params.x))
local h2 = acts2(linear2(params.linear2,h1))
local o = torch.sum(h2)
return o
end
-- Eval:
local grads = autograd(mlp)(params)
local pred = mlp(params)
-- Check grads:
tester:asserteq(torch.typename(grads.linear1[1]), 'torch.FloatTensor', 'incorrect type')
tester:asserteq(torch.typename(grads.linear1[2]), 'torch.FloatTensor', 'incorrect type')
tester:asserteq(torch.typename(grads.linear2[1]), 'torch.FloatTensor', 'incorrect type')
tester:asserteq(torch.typename(grads.linear2[2]), 'torch.FloatTensor', 'incorrect type')
end,
NNFunc_DynamicWrap = function()
-- Define regular nn model:
local model = nn.Sequential()
model:add(nn.SpatialConvolutionMM(3, 16, 3, 3, 1, 1, 1, 1))
model:add(nn.Tanh())
model:add(nn.Reshape(16*8*8))
model:add(nn.Linear(16*8*8, 10))
model:add(nn.Tanh())
-- Functionalize!
local modelf, params = autograd.functionalize(model)
-- Loss
-- local loss = autograd.nn.MSECriterion()
local loss = autograd.functionalize(nn.MSECriterion())
-- Input
local x = torch.Tensor(3, 8, 8):normal()
local y = torch.Tensor(10):normal()
-- Force to double:
params = autograd.util.cast(params, "double")
-- nn version:
local function cnn(params, y)
local h2 = modelf(params, params.x)
return loss(h2, y)
end
-- Eval:
params.x = x
local pred = cnn(params, y)
local grads = autograd(cnn)(params, y)
-- Clone model to compare to built-in nn grad eval:
local model2 = model:clone()
model2:zeroGradParameters()
local yhat = model2:forward(x)
local gx = model2:backward( x, nn.MSECriterion():backward(yhat,y) )
local _,grads2 = model:parameters()
-- Check errs:
for i in ipairs(grads) do
local err = (grads[i] - grads2[i]):abs():max()
tester:asserteq(err, 0, 'incorrect grad wrapper')
end
local err = (gx - grads.x):abs():max()
tester:asserteq(err, 0, 'incorrect grad wrapper')
end,
Models_NeuralNetwork = function()
-- Define model:
local f,params = autograd.model.NeuralNetwork({
inputFeatures = 100,
hiddenFeatures = {50,2},
classifier = true,
})
-- Loss:
local loss = function(params, input, target)
local pred = f(params, input)
local loss = autograd.loss.crossEntropy(pred,target)
return loss,pred
end
params[1][1]:normal(0,0.01)
params[2][1]:normal(0,0.01)
local i = torch.randn(100)
local t = torch.Tensor({1,0})
local l,pred = loss(params, i, t)
local grads = autograd(loss)(params, i, t)
tester:asserteq(type(l), 'number', 'loss should be a scalar')
tester:asserteq(grads[1][1]:dim(), 2, 'weights for layer 2 have incorrect dims')
tester:asserteq(grads[1][2]:dim(), 1, 'biases for layer 2 have incorrect dims')
tester:asserteq(grads[2][1]:dim(), 2, 'weights for layer 4 have incorrect dims')
tester:asserteq(grads[2][2]:dim(), 1, 'biases for layer 4 have incorrect dims')
-- Gradcheck
tester:assert(gradcheck(loss, params, i, t), 'incorrect gradients')
end,
Models_SpatialNetwork = function()
-- Define conv layers:
local f1,params1 = autograd.model.SpatialNetwork({
inputFeatures = 3,
hiddenFeatures = {16, 16},
poolings = {4, 2},
kernelSize = 3,
activations = 'Tanh',
})
-- Define upper regular layers:
local f2,params2 = autograd.model.NeuralNetwork({
inputFeatures = 16,
hiddenFeatures = {32,2},
classifier = true,
})
-- Loss == full conv-net with least-squares loss:
local loss = function(params, input, target)
local conv = f1(params[1], input)
local pred = f2(params[2], conv)
local loss = autograd.loss.leastSquares(pred,target)
return loss,pred
end
local params = {params1, params2}
params[1][1][1]:normal(0,0.01)
params[1][2][1]:normal(0,0.01)
params[2][1][1]:normal(0,0.01)
params[2][2][1]:normal(0,0.01)
local i = torch.randn(3,8,8)
local t = torch.randn(2)
local l,pred = loss(params, i, t)
local grads = autograd(loss)(params, i, t)
tester:asserteq(type(l), 'number', 'loss should be a scalar')
-- Gradcheck:
tester:assert(gradcheck(loss, params, i, t), 'incorrect gradients')
end,
Models_RecurrentNetwork = function()
-- Define RNN:
local f,params = autograd.model.RecurrentNetwork({
inputFeatures = 10,
hiddenFeatures = 10,
outputType = 'last',
})
-- Params:
params[1].W:normal(0,0.01)
params[1].b:normal(0,0.01)
-- Loss
local loss = function(params, input)
local v = f(params, input)
return torch.sum(v)
end
-- Test on sequence data:
local i = torch.randn(13, 10)
local o = loss(params, i)
local g = autograd(loss)(params, i)
-- Checks
tester:asserteq(type(g), 'table', 'gradients could not be computed')
-- Gradcheck:
tester:assert(gradcheck(loss, params, i), 'incorrect gradients')
end,
Models_RecurrentLSTMNetwork = function()
-- Define RNN:
local f,params = autograd.model.RecurrentLSTMNetwork({
inputFeatures = 10,
hiddenFeatures = 10,
outputType = 'last',
})
-- Params:
params[1].W:normal(0,0.01)
params[1].b:normal(0,0.01)
-- Loss
local loss = function(params, input)
local v = f(params, input)
return torch.sum(v)
end
-- Test on sequence data:
local i = torch.randn(13, 10)
local o = loss(params, i)
local g = autograd(loss)(params, i)
-- Checks
tester:asserteq(type(g), 'table', 'gradients could not be computed')
-- Gradcheck:
tester:assert(gradcheck(loss, params, i), 'incorrect gradients')
-- Define RNN with all states exposed:
local f,params = autograd.model.RecurrentLSTMNetwork({
inputFeatures = 10,
hiddenFeatures = 10,
outputType = 'all',
})
-- Loss
local loss = function(params, input)
local v = f(params, input)
return torch.sum(v)
end
-- Test on sequence data:
local o = loss(params, i)
local g = autograd(loss)(params, i)
-- Checks
tester:asserteq(type(g), 'table', 'gradients could not be computed')
tester:assert(gradcheck(loss, params, i), 'incorrect gradients')
end,
Models_RecurrentGRUNetwork = function()
-- Define RNN:
local f,params = autograd.model.RecurrentGRUNetwork({
inputFeatures = 10,
hiddenFeatures = 10,
outputType = 'last',
})
-- Params:
params[1].W:normal(0,0.01)
params[1].b:normal(0,0.01)
params[1].V:normal(0,0.01)
params[1].c:normal(0,0.01)
-- Loss
local loss = function(params, input)
local v = f(params, input)
return torch.sum(v)
end
-- Test on sequence data:
local i = torch.randn(13, 10)
local o = loss(params, i)
local g = autograd(loss)(params, i)
-- Checks
tester:asserteq(type(g), 'table', 'gradients could not be computed')
-- Gradcheck:
tester:assert(gradcheck(loss, params, i), 'incorrect gradients')
-- Define RNN with all states exposed:
local f,params = autograd.model.RecurrentGRUNetwork({
inputFeatures = 10,
hiddenFeatures = 10,
outputType = 'all',
})
-- Loss
local loss = function(params, input)
local v = f(params, input)
return torch.sum(v)
end
-- Test on sequence data:
local o = loss(params, i)
local g = autograd(loss)(params, i)
-- Checks
tester:asserteq(type(g), 'table', 'gradients could not be computed')
tester:assert(gradcheck(loss, params, i), 'incorrect gradients')
end,
Modules_LayerNormalization = function()
local f,params = autograd.module.LayerNormalization({nOutputs = 100})
-- Loss:
local loss = function(params, input)
local normed = f(params, input)
local l = torch.sum(normed)
return l, normed
end
dloss = autograd(loss)
params[1].gain:fill(1)
params[1].bias:fill(0.1)
local i = torch.randn(100) -- test 1D input
local l, i_normed = loss(params, i)
local grads = dloss(params, i)
tester:asserteq(type(l), 'number', 'loss should be a scalar')
tester:asserteq(grads[1].gain:dim(), 2, 'gain has incorrect dim')
tester:asserteq(grads[1].bias:dim(), 2, 'gain has incorrect dim')
tester:asserteq(i_normed:dim(), i:dim(), 'normed input has incorrect dim')
i = torch.randn(5,100) -- batch x nOutputs
l, i_normed = loss(params, i)
grads = dloss(params, i)
tester:asserteq(type(l), 'number', 'loss should be a scalar')
tester:asserteq(grads[1].gain:dim(), 2, 'gain has incorrect dim')
tester:asserteq(grads[1].bias:dim(), 2, 'gain has incorrect dim')
tester:asserteq(i_normed:dim(), i:dim(), 'normed input has incorrect dim')
-- Gradcheck
tester:assert(gradcheck(loss, params, i), 'incorrect gradients')
end,
Modules_SoftAttention = function()
local f,params = autograd.module.SoftAttention({
hiddenFeatures = 50,
subjectFeatures = 100,
subjectChoices = 16
})
-- Loss:
local loss = function(params, input, hidden)
local at, ft = f(params, input, hidden)
local l = torch.sum(at)
return l, at, ft
end
local dloss = autograd(loss)
params[1].W_att_subject:normal(0, 0.01)
params[1].W_att_h:normal(0, 0.01)
params[1].b_att:zero()
local x = torch.randn(100, 16)
local h = torch.randn(50)
local l, a, f = loss(params, x, h)
local grads = dloss(params, x, h)
tester:asserteq(type(l), 'number', 'loss should be a scalar')
tester:asserteq(grads[1].W_att_subject:dim(), 3, 'W_att_subject grad has incorrect dim')
tester:asserteq(grads[1].W_att_h:dim(), 2, 'W_att_h grad has incorrect dim')
tester:asserteq(grads[1].b_att:dim(), 2, 'b_att grad has incorrect dim')
tester:asserteq(torch.size(a, 1), torch.size(x,1), 'attention has incorrect dim')
tester:asserteq(torch.size(f, 1), torch.size(x,2), 'focus has incorrect dim')
-- Gradcheck
tester:assert(gradcheck(loss, params, x, h), 'incorrect gradients')
x = torch.randn(10, 100, 16)
h = torch.randn(10, 50)
local l, a, f = loss(params, x, h)
local grads = dloss(params, x, h)
tester:asserteq(type(l), 'number', 'loss should be a scalar')
tester:asserteq(grads[1].W_att_subject:dim(), 3, 'W_att_subject grad has incorrect dim')
tester:asserteq(grads[1].W_att_h:dim(), 2, 'W_att_h grad has incorrect dim')
tester:asserteq(grads[1].b_att:dim(), 2, 'b_att grad has incorrect dim')
tester:asserteq(torch.size(a, 2), torch.size(x,2), 'attention has incorrect dim')
tester:asserteq(torch.size(f, 2), torch.size(x,3), 'focus has incorrect dim')
tester:asserteq(torch.size(a, 1), torch.size(x,1), 'attention has incorrect batch size')
tester:asserteq(torch.size(f, 1), torch.size(x,1), 'focus has incorrect batch size')
-- Gradcheck
tester:assert(gradcheck(loss, params, x, h), 'incorrect gradients')
end,
Modules_MaskedBatchNormalization = function()
local f, params, state = autograd.module.MaskedBatchNormalization({nOutputs = 100})
local threshold = 1e-5
local eval_state = {momentum = state.momentum, train = 0,
running_mean = state.running_mean,
running_std = state.running_std}
-- Loss:
local loss = function(params, input, mask, state)
local normed = f(params, input, mask, state)
local l = torch.sum(normed)
return l, normed
end
local dloss = autograd(loss)
params[1].gain:fill(0.1)
params[1].bias:fill(0.1)
local i = torch.randn(100) -- test 1D input
local mask = torch.bernoulli(i.new(i:size()))
local pre_mean = state.running_mean:clone()
local pre_std = state.running_std:clone()
local l, i_normed = loss(params, i, mask, state)
local grads = dloss(params, i, mask, state)
tester:asserteq(type(l), 'number', 'loss should be a scalar')
tester:asserteq(grads[1].gain:dim(), 2, 'gain has incorrect dim')
tester:asserteq(grads[1].bias:dim(), 2, 'gain has incorrect dim')
tester:asserteq(i_normed:dim(), i:dim(), 'normed input has incorrect dim')
tester:assert(not pre_mean:equal(state.running_mean), 'running mean did not change with train = 1')
tester:assert(not pre_std:equal(state.running_std), 'running std did not change with train = 1')
-- Gradcheck
tester:assert(gradcheck(loss, params, i, mask, eval_state), 'incorrect gradients')
i = torch.randn(5,100) -- batch x nOutputs
mask = torch.bernoulli(i.new(i:size()))
pre_mean = eval_state.running_mean:clone()
pre_std = eval_state.running_std:clone()
l, i_normed = loss(params, i, mask, eval_state)
grads = dloss(params, i, mask, eval_state)
tester:asserteq(type(l), 'number', 'loss should be a scalar')
tester:asserteq(grads[1].gain:dim(), 2, 'gain has incorrect dim')
tester:asserteq(grads[1].bias:dim(), 2, 'gain has incorrect dim')
tester:asserteq(i_normed:dim(), i:dim(), 'normed input has incorrect dim')
tester:assert(pre_mean:equal(eval_state.running_mean), 'running mean changed with train = 0')
tester:assert(pre_std:equal(eval_state.running_std), 'running std changed with train = 0')
-- Gradcheck
tester:assert(gradcheck(loss, params, i, mask, eval_state), 'incorrect gradients')
i = torch.randn(5,10,100) -- batch x time x nOutputs
mask = torch.bernoulli(i.new(i:size()))
pre_mean = state.running_mean:clone()
pre_std = state.running_std:clone()
l, i_normed = loss(params, i, mask, state)
grads = dloss(params, i, mask, state)
tester:asserteq(type(l), 'number', 'loss should be a scalar')
tester:asserteq(grads[1].gain:dim(), 2, 'gain has incorrect dim')
tester:asserteq(grads[1].bias:dim(), 2, 'gain has incorrect dim')
tester:asserteq(i_normed:dim(), i:dim(), 'normed input has incorrect dim')
tester:assert(not pre_mean:equal(state.running_mean), 'running mean did not change with train = 1')
tester:assert(not pre_std:equal(state.running_std), 'running std did not change with train = 1')
-- Gradcheck
tester:assert(gradcheck(loss, params, i, mask, eval_state), 'incorrect gradients')
end,
DebuggerDivZero = function()
-- Parameters:
local W = torch.Tensor(32,100):fill(.5)
local x = torch.Tensor(100):fill(.5)
-- Function:
local func = function(inputs)
return torch.sum(torch.div(inputs.W * inputs.x, 0))
end
-- Grads:
local sawHook = 0
local badline
local dFunc = autograd(func, {
debugHook = function(debugger, msg, gen)
if sawHook == 0 then
badline = stringx.split(gen.source, "\n")[gen.line]
--debugger.showDot()
end
sawHook = sawHook + 1
end
})
-- Compute func and grads:
local pred = func({W=W, x=x})
local grads = dFunc({W=W, x=x})
-- Tests:
tester:asserteq(sawHook, 5, 'debugHook should have tripped')
tester:asserteq(badline, " torch_div(rlocals[2], rlocals[1], 0)", 'debugHook should have showed the bad line')
end,
ParamLen = function()
local params = {torch.Tensor(100):fill(1), torch.Tensor(100):fill(1)}
-- Function:
local func = function(params)
return torch.sum(params[1] + params[2] * #params)
end
local df = autograd(func)
local grads = df(params)
-- Tests:
tester:assert(gradcheck(func, params, 1), 'incorrect gradients')
end,
-- MissingGradient = function()
-- -- Function:
-- local func = function(W)
-- return torch.sum(torch.reshape(W,5,5,1))
-- end
-- local test = function()
-- return autograd(func)(torch.FloatTensor(5, 5))
-- end
-- --test()
-- local _, msg = pcall(test)
-- tester:assert(string.find(msg, "missing gradient"), "missing gradient not reported")
-- end,
Optim = function()
local f = function(p, x, y)
local h1 = torch.tanh(x * p.W[1] + p.b[1])
return torch.sqrt(torch.sum(torch.pow(y - h1, 2)))
end
local df = autograd(f)
local nData = 5000
local xs = torch.randn(nData, 10)
local ys = torch.Tensor(nData, 1)
for i=1, nData do ys[i][1] = math.tanh(xs[i]:sum()) end
local learningRate = 1e-3
local params = {
W = { torch.randn(10, 1) },
b = { torch.randn(1) }
}
local params3 = {
W = { params.W[1]:clone() },
b = { params.b[1]:clone() }
}
local loss1
for e=1, 5 do
loss1 = 0
for i=1,nData do
local grads, l = df(params, xs:narrow(1, i, 1), ys:narrow(1, i, 1))
loss1 = loss1 + l / nData
params.W[1]:add(-learningRate, grads.W[1])
params.b[1]:add(-learningRate, grads.b[1])
end
end
local state = { learningRate = learningRate }
local loss3
for e=1, 5 do
local optimfn, states = autograd.optim.sgd(df, state, params3)
loss3 = 0
for i=1,nData do
local grads, loss = optimfn(xs:narrow(1, i, 1), ys:narrow(1, i, 1))
loss3 = loss3 + loss / nData
end
end
tester:assert(math.abs(loss1 - loss3) < 1e-6, 'sgd wrapper should produce same loss')
end,
OptimNN = function()
local nn = require 'nn'
local optim = require 'optim'
torch.manualSeed(0)
-- Set up the localizer network
---------------------------------
local locnet = nn.Sequential()
locnet:add(nn.SpatialMaxPooling(2,2,2,2))
locnet:add(nn.SpatialConvolution(1,20,5,5))
locnet:add(nn.ReLU(true))
locnet:add(nn.SpatialMaxPooling(2,2,2,2))
locnet:add(nn.SpatialConvolution(20,20,5,5))
locnet:add(nn.ReLU(true))
locnet:add(nn.View(20*2*2))
locnet:add(nn.Linear(20*2*2,20))
locnet:add(nn.ReLU(true))
locnet:add(nn.Linear(20,6))
-- Functionalize networks
---------------------------------
local agLocnet, locParams = autograd.functionalize(locnet)
-- Set up parameters
---------------------------------
params = {
locParams = locParams,
}
-- Define our loss function
---------------------------------
local function f(inputs, bhwdImages, labels)
local warpPrediction = agLocnet(inputs.locParams, bhwdImages)
return torch.sum(warpPrediction)
end
local g = autograd(f, {optimize = true})
-- FAILS FOR OTHER OPTIMIZERS AS WELL
local optimfn, states = autograd.optim.sgd(g, {learningRate=1e-2}, params)
for i=1,3 do
-- Get images in BHWD format, labels in one-hot format:
local data = torch.randn(256,1,32,32)
local target = torch.zeros(256):random(0,9)
-- Calculate gradients:
local grads, loss = optimfn(data, target)
end
end,
NNFunc_WrapWithoutParams = function()
-- Tests that we can wrap NN modules that do not take parameters
local tanh = autograd.functionalize(nn.Tanh())
local a = torch.eye(3)
tester:assertTensorEq(torch.tanh(a), autograd.nn.Tanh()(a), 1e-8)
tester:assertTensorEq(torch.tanh(a), tanh(a), 1e-8)
local loss = autograd.functionalize(nn.MSECriterion())
end,
FunctionalizeCriterionModule = function()
-- Tests the use of table-valued inputs in criterions
local input = {torch.rand(2,10), torch.randn(2,10)}
local target = {torch.IntTensor{1,8}, torch.randn(2,10)}
local nll = nn.ClassNLLCriterion()
local mse = nn.MSECriterion()
local pc = nn.ParallelCriterion():add(nll, 0.5):add(mse)
local output1 = pc:forward(input, target)
local pcf = autograd.functionalize(pc)
local mt = getmetatable(pc)
local output2 = pcf(input, target)
tester:asserteq(output1, output2, 'loss not equal')
local f = function(x, y)
return pcf(x, y)
end
tester:assert(gradcheck(f, input, target), 'incorrect gradients')
end,
ScalarMul = function()
-- Tests that functions that use scalar multiply do not cause an error
function f(params)
return torch.sum(params.W) * 0.4
end
local df = autograd(f)
local params = { W = torch.randn(5,5)}
-- this line should not raise an error
local grads, loss = df(params)
tester:assert(gradcheck(f, {W=params.W}), 'incorrect gradients')
end,
StableGradients = function()
-- Parameters:
local W = torch.Tensor(32,100):fill(.5)
local x = torch.Tensor(100):fill(.5)
-- Function:
local func = function(inputs, zf)
local dims = torch.size(zf, 1)
local w1 = inputs.W
local x1 = inputs.x
for i = 1, dims do
w1 = w1 * 1.1
x1 = x1 * 1.1
end
return torch.sum(w1 * x1 * 3.0 + 1.0)
end
local df = autograd(func, { stableGradients = true })
local g = df({W=W, x=x}, torch.Tensor(1))
for i = 1, 10 do
local ng = df({W=W, x=x}, torch.Tensor(i))
tester:assert(g.W == ng.W, 'gradient tensors should match')
tester:assert(g.x == ng.x, 'gradient tensors should match')
local ng = df({W=W, x=x}, torch.Tensor(i))
tester:assert(g.W == ng.W, 'gradient tensors should match')
tester:assert(g.x == ng.x, 'gradient tensors should match')
end
end,
LessThan = function()
local f = function(params, x)
local s = torch.sum(params.a)
if autograd.util.lt(s, 3) then
return s*3
else
return s
end
end
tester:assert(gradcheck(f,{a = torch.eye(3)*3}), "Incorrect gradient")
tester:assert(gradcheck(f,{a = torch.eye(3)*0.3}), "Incorrect gradient")
end,
CatNumber = function()
local function f(params)
local tbl = {}
tbl[#tbl+1] = params.a
tbl[#tbl+1] = params.b
tbl[#tbl+1] = params.c
local a = autograd.util.cat(tbl)
return -torch.sum(a)
end
local df = autograd(f)
local params = {a=1,b=2,c=3}
local grads, loss = df(params)
-- It just needs to run, gradcheck doesn't support numbers right now
end,
FunctionalFill = function()
local function f(params)
local o = util.fill(params.a, torch.sum(params.a))
return torch.sum(o)
end
tester:assert(gradcheck(f,{a = torch.randn(5,5)}), "Incorrect gradient")
end,
Padding = function()
local function adjointSelect(params)
local padded = autograd.util.selectSliceCopy(params.x, torch.zeros(3,3), 1, 1)
return torch.sum(padded*3)
end
tester:assert(gradcheck(adjointSelect, {x=torch.randn(3)}), "Incorrect gradient")
local function adjointNarrow(params)
local padded = autograd.util.narrowSliceCopy(params.x, torch.zeros(3,3), 1, 1, 2)
return torch.sum(padded*3)
end
tester:assert(gradcheck(adjointNarrow, {x=torch.randn(3,2)}), "Incorrect gradient")
local function adjointIndex(params)
local padded = autograd.util.indexAdd(params.x, torch.zeros(3,3), 1, torch.LongTensor{3,1})
return torch.sum(padded*3)
end
tester:assert(gradcheck(adjointIndex, {x=torch.randn(2,3)}), "Incorrect gradient")
end,
RepeatTensor = function()
local function f2to2(params)
local y = torch.repeatTensor(params.x, 2, 2)*3
return torch.sum(y)
end
tester:assert(gradcheck(f2to2, {x=torch.randn(3,3)}), "Incorrect gradient")
x = torch.randn(3,3)
local o_double = autograd(f2to2)({x=x}).x
local o_float = autograd(f2to2)({x=x:float()}).x
tester:assertTensorEq(o_double, o_float:double(), 1e-10, "Incorrect floating point gradient")
local function f3to3(params)
local y = torch.repeatTensor(params.x, 2, 2, 2)*3
return torch.sum(y)
end
tester:assert(gradcheck(f3to3, {x=torch.randn(3,3,3)}), "Incorrect gradient")
local function f2to3(params)
local y = torch.repeatTensor(params.x, 2, 2, 2)*3
return torch.sum(y)
end
tester:assert(gradcheck(f2to3, {x=torch.randn(3,3)}), "Incorrect gradient")
local function f3to4(params)
local y = torch.repeatTensor(params.x, 2, 2, 2, 2)*3
return torch.sum(y)
end
tester:assert(gradcheck(f3to4, {x=torch.randn(3,3,3)}), "Incorrect gradient")
end,
ZeroGrad = function()
--the output of this function does not depend on params, so its grad should be uniformly zero
local innerFn = function(params, x, y)
return torch.sum(torch.add(x,y))
end
local dneuralNet = autograd(innerFn)
local numFeatures = 5
local testParams = torch.randn(numFeatures)
local x = torch.randn(numFeatures)
local y = torch.randn(1)[1]
local analyticalGrad = testParams:clone():zero()
local numericalGrad = dneuralNet(testParams,x,y)
tester:assertTensorEq(analyticalGrad,numericalGrad,1e-8,'analytical and numerical solution do not match')
end,
SimpleGradGrad = function()
local innerFn = function(params, x, y)
local yHat = params*x
local squaredLoss = (y - yHat)
return squaredLoss
end
--autodiff
local dneuralNet = autograd(innerFn)
--the outer function computes the sum of the gradient of the neural network. Therefore, differentiating yields the sum of each column of the Hessian
local outerFn = function(params_2,x,y)
local grad = dneuralNet(params_2,x,y)
return torch.sum(grad)
end
--autodiff solution for sum of each column of Hessian
local ddf = autograd(outerFn)
local numFeatures = 1
local testParams = torch.randn(numFeatures)
local x = torch.randn(numFeatures)
local y = torch.randn(1)[1]
local analyticalGrad = x:clone():mul(-1)
local numericalGrad = dneuralNet(testParams,x,y)
tester:assertTensorEq(analyticalGrad,numericalGrad,1e-8,'analytical and numerical solution do not match')
local analyticalGradGrad = x:clone():zero()
local numericalGradGrad = ddf(testParams,x,y)
tester:assertTensorEq(analyticalGradGrad,numericalGradGrad,1e-8,'analytical and numerical solution do not match')
end,
GradGrad = function()
local numFeatures = 5
local params = torch.randn(numFeatures)
--synthetic data
local x = torch.randn(numFeatures)
local y = torch.randn(1)[1]
local innerFn = function(params, x, y)
local yHat = params*x
local squaredLoss = torch.pow(y - yHat,2)
return squaredLoss
end
--autodiff
local dneuralNet = autograd(innerFn)
local numericalGrad = dneuralNet(params,x,y)
--analytical expression
local residual = y - params*x
analyticalGrad = x:clone():mul(-2*residual)
tester:assertTensorEq(analyticalGrad,numericalGrad,1e-8,'analytical and numerical solution do not match')
--the outer function computes the sum of the gradient of the neural network. Therefore, differentiating yields the sum of each column of the Hessian
local outerFn = function(params,x,y)
local grad = dneuralNet(params,x,y)
return torch.sum(grad)
end
--autodiff solution for sum of each column of Hessian
local ddf = autograd(outerFn)
local numericalGradGrad = ddf(params,x,y)
--analytical expression
hessian = torch.ger(x,x):mul(2)
analyticalGradGrad = torch.sum(hessian,2):squeeze()
tester:assertTensorEq(analyticalGradGrad,numericalGradGrad,1e-8,'analytical and numerical solution do not match')
end,
Assignment = function()
local f1 = function(params)
local xc = torch.clone(params.x)
xc[1] = torch.sum(params.y)*2.0
return torch.sum(xc)
end
tester:assert(gradcheck(f1,{x=torch.randn(10),y=torch.randn(3)}), "Incorrect gradient")
local f2 = function(params)
local xc = torch.clone(params.x)
xc[1] = torch.sum(params.y)*2.0
xc[2] = torch.sum(params.y)*3.0
return torch.sum(xc)
end
tester:assert(gradcheck(f2,{x=torch.randn(10),y=torch.randn(3)}), "Incorrect gradient")
local f3 = function(params)
local xc = torch.clone(params.x)
xc[{1,1}] = torch.sum(params.y)*2.0
return torch.sum(xc)
end
tester:assert(gradcheck(f3,{x=torch.randn(10,10),y=torch.randn(3)}), "Incorrect gradient")
local f4 = function(params)
local xc = torch.clone(params.x)
xc[torch.LongStorage{2,2}] = torch.sum(params.y)
return torch.sum(xc)
end
tester:assert(gradcheck(f4,{x=torch.randn(10,10),y=torch.randn(3)}), "Incorrect gradient")
end,
ScalarSigmoid = function()
params = {w = 1}
f = function(params, x)
return torch.sigmoid(params.w * x)
end
df = autograd(f)
dparams, loss = df(params, 2)
end,
Contiguous = function()
-- Parameters:
local W = torch.Tensor(32,100):fill(.5)
local x = torch.Tensor(100):fill(.5)
-- Function:
local f1 = function(inputs)
return torch.sum(torch.contiguous(torch.contiguous(inputs.W)) * torch.contiguous(torch.contiguous(inputs.x)))
end
-- Tests:
tester:assert(gradcheck(f1,{W=torch.Tensor(32,100):fill(.5),x=torch.Tensor(100):fill(.5)}), "Incorrect gradient")
end,
Bmm = function()
local X = torch.randn(5, 4, 3)
local Y = torch.randn(5, 3, 2)
local bmmFn = function(inputs)
return torch.sum(torch.bmm(inputs.X, inputs.Y))
end
tester:assert(gradcheck(bmmFn, {X=X, Y=Y}), "Incorrect gradient")
end,
Gather = function()
local X = torch.randn(5,5)
local index = torch.LongTensor{{1, 2, 3, 4, 5}, {2, 3, 4, 5, 1}}
local gather = function(inputs, index)
return torch.sum(torch.gather(inputs.X, 1, index))
end
tester:assert(gradcheck(gather, {X = X}, index), "Incorrect gradient")
end,
Scatter = function()
local X = torch.rand(2, 5)
local index = torch.LongTensor{{1, 2, 3, 1, 1}, {3, 1, 1, 2, 3}}
local Z = torch.zeros(3, 5)
local scatter = function(inputs, index, Z)
return torch.sum(torch.scatter(Z, 1, index, inputs.X))
end
tester:assert(gradcheck(scatter, {X = X}, index, Z), "Incorrect gradient")
end,
Baddbmm = function()
local v1 = torch.randn(1)[1]
local v2 = torch.randn(1)[1]
local M = torch.randn(5, 4, 2)
local X = torch.randn(5, 4, 3)
local Y = torch.randn(5, 3, 2)
local baddbmm3argFn = function(inputs)
return torch.sum(torch.baddbmm(inputs.M, inputs.X, inputs.Y))
end
tester:assert(gradcheck(baddbmm3argFn, {M=M, X=X, Y=Y}), "Incorrect gradient")
local baddbmm4argFn = function(inputs)
return torch.sum(torch.baddbmm(inputs.v1, inputs.M, inputs.X, inputs.Y))
end
tester:assert(gradcheck(baddbmm4argFn, {v1=v1, M=M, X=X, Y=Y}), "Incorrect gradient")
local baddbmm5argFn = function(inputs)
return torch.sum(torch.baddbmm(inputs.v1, inputs.M, inputs.v2, inputs.X, inputs.Y))
end
tester:assert(gradcheck(baddbmm5argFn, {v1=v1, v2=v2, M=M, X=X, Y=Y}), "Incorrect gradient")
end
}
local function prefixTests(pf, t, skip)
local nt = { }
if type(t) == "table" then
for k, v in pairs(t) do
if not skip[k] then
nt[pf .. k] = v
end
end
elseif type(t) == "string" then
nt = pf .. t
elseif type(t) == "nil" then
nt = nil
end
return nt
end
-- Run tests:
print(prefixTests("Optimized_", tests, { }))
autograd.optimize(true)
tester:add(prefixTests("Optimized_", tests, { })):run(prefixTests("Optimized_", arg[1]))
autograd.optimize(false)
tester = torch.Tester()
tester:add(prefixTests("Direct_", tests, { GradGrad = true, AutoModule = true, DebuggerDivZero = true, StableGradients = true, ZeroGrad = true, SimpleGradGrad = true })):run(arg[1])
<|start_filename|>src/support.lua<|end_filename|>
-- Various torch additions, should move to torch itself
torch.select = function (A, dim, index)
return A:select(dim, index)
end
torch.index = function (A, dim, index)
return A:index(dim, index)
end
torch.narrow = function(A, dim, index, size)
return A:narrow(dim, index, size)
end
torch.clone = function(A)
local B = A.new(A:size())
return B:copy(A)
end
torch.contiguous = function(A)
return A:contiguous()
end
torch.copy = function(A,B)
local o = A:copy(B)
return o
end
torch.size = function(A, dim)
return A:size(dim)
end
torch.nDimension = function(A)
return A:nDimension()
end
torch.nElement = function(A)
return A:nElement()
end
torch.isSameSizeAs = function(A, B)
return A:isSameSizeAs(B)
end
torch.transpose = function(A, d1, d2)
return A:transpose(d1,d2)
end
torch.t = function(A)
return A:t()
end
torch.long = function(A)
return A:long()
end
torch.narrow = function(A, dim, index, size)
return A:narrow(dim, index, size)
end
torch.typeAs = function(A, B)
return A:type(B:type())
end
local numberMetatable = {
__add = function(a,b)
if type(a) == "number" then
return b + a
else
error("attempt to perform arithmetic on a " .. type(a) .. " value", 2)
end
end,
__sub = function(a,b)
if type(a) == "number" then
return -b + a
else
error("attempt to perform arithmetic on a " .. type(a) .. " value", 2)
end
end,
__mul = function(a,b)
if type(a) == "number" then
return b * a
else
error("attempt to perform arithmetic on a " .. type(a) .. " value", 2)
end
end
}
debug.setmetatable(1.0, numberMetatable)
<|start_filename|>src/model/init.lua<|end_filename|>
-- standard models
local model = {
NeuralNetwork = require 'autograd.model.NeuralNetwork',
SpatialNetwork = require 'autograd.model.SpatialNetwork',
RecurrentNetwork = require 'autograd.model.RecurrentNetwork',
RecurrentLSTMNetwork = require 'autograd.model.RecurrentLSTMNetwork',
RecurrentGRUNetwork = require 'autograd.model.RecurrentGRUNetwork',
RecurrentFWNetwork = require 'autograd.model.RecurrentFWNetwork',
AlexNet = require 'autograd.model.AlexNet',
}
return model
<|start_filename|>src/model/RecurrentGRUNetwork.lua<|end_filename|>
-- util
local util = require 'autograd.util'
return function(opt, params)
-- options:
opt = opt or {}
local inputFeatures = opt.inputFeatures or 10
local hiddenFeatures = opt.hiddenFeatures or 100
local outputType = opt.outputType or 'last' -- 'last' or 'all'
-- container:
params = params or {}
-- parameters:
local p = {
W = torch.zeros(inputFeatures+hiddenFeatures, 2 * hiddenFeatures),
b = torch.zeros(1, 2 * hiddenFeatures),
V = torch.zeros(inputFeatures+hiddenFeatures, hiddenFeatures),
c = torch.zeros(1, hiddenFeatures),
}
table.insert(params, p)
-- function:
local f = function(params, x, prevState)
-- dims:
local p = params[1] or params
if torch.nDimension(x) == 2 then
x = torch.view(x, 1, torch.size(x, 1), torch.size(x, 2))
end
local batch = torch.size(x, 1)
local steps = torch.size(x, 2)
-- hiddens:
prevState = prevState or {}
local hs = {}
-- go over time:
for t = 1,steps do
-- xt
local xt = torch.select(x,2,t)
-- prev h
local hp = hs[t-1] or prevState.h or torch.zero(x.new(batch, hiddenFeatures))
-- pack all dot products:
local dots = torch.cat(xt,hp,2) * p.W + torch.expand(p.b, batch, 2*hiddenFeatures)
-- view as 2 groups:
dots = torch.view(dots, batch, 2, hiddenFeatures)
-- batch compute gates:
local sigmoids = util.sigmoid( dots )
local inputGate = torch.select(sigmoids, 2,1)
local forgetGate = torch.select(sigmoids, 2,2)
-- write inputs
local inputValue = torch.tanh( torch.cat(xt,torch.cmul(hp,inputGate),2) * p.V + torch.expand(p.c, batch, hiddenFeatures) )
-- next h:
hs[t] = torch.cmul(1-forgetGate, hp) + torch.cmul(forgetGate, inputValue)
end
-- save state
local newState = {h=hs[#hs]}
-- output:
if outputType == 'last' then
-- return last hidden code:
return hs[#hs], newState
else
-- return all:
for i in ipairs(hs) do
hs[i] = torch.view(hs[i], batch,1,hiddenFeatures)
end
return x.cat(hs, 2), newState
end
end
-- layers
return f, params
end
<|start_filename|>examples/print-dotfile.lua<|end_filename|>
print("This example will only fully work on Mac OS X, because we use Safari to view the resulting SVG file of the compute graph")
require 'torch'
autograd = require 'autograd'
-- Just a standard MLP, for demonstration purposes
----------------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------------
local inputSize = 1024
local classes = {0,1,2,3,4,5,6,7,8,9}
-- What model to train:
local predict,f,params
-- Define our neural net
function predict(params, input, target)
local h1 = torch.tanh(input * params.W[1] + params.B[1])
local h2 = torch.tanh(h1 * params.W[2] + params.B[2])
local h3 = h2 * params.W[3] + params.B[3]
local out = autograd.util.logSoftMax(h3)
return out
end
-- Define our training loss
function f(params, input, target)
local prediction = predict(params, input, target)
local loss = autograd.loss.logMultinomialLoss(prediction, target)
return loss, prediction
end
-- Define our parameters
-- [-1/sqrt(#output), 1/sqrt(#output)]
torch.manualSeed(0)
local W1 = torch.Tensor(inputSize,50):uniform(-1/math.sqrt(50),1/math.sqrt(50))
local B1 = torch.Tensor(50):fill(0)
local W2 = torch.Tensor(50,50):uniform(-1/math.sqrt(50),1/math.sqrt(50))
local B2 = torch.Tensor(50):fill(0)
local W3 = torch.Tensor(50,#classes):uniform(-1/math.sqrt(#classes),1/math.sqrt(#classes))
local B3 = torch.Tensor(#classes):fill(0)
-- Trainable parameters:
params = {
W = {W1, W2, W3},
B = {B1, B2, B3},
}
input = torch.randn(1,inputSize)
target = torch.zeros(1,#classes)
target[1][3] = 1
----------------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------------
-- Get our function, just the forward pass, along with the DOT file showing the graph
fileName = "/tmp/graph.dot"
g = autograd(f,{dotFile=fileName,optimize=true,withGradients=false,withForward=true})
-- Run the function to produce the dotfile
print(g(params,input,target))
-- Show the dotfile
os.execute('dot -O -Tsvg ' .. fileName)
os.remove(fileName)
os.execute('open -a Safari ' .. fileName ..'.svg')
<|start_filename|>src/runtime/codegen/Source.lua<|end_filename|>
local Source = { }
Source.__index = Source
Source.COMPUTED = "computed"
Source.PARAM = "param"
Source.CONSTANT = "constant"
Source.TABLE = "table"
function Source.new(type)
local v = { }
setmetatable(v, Source)
v:init(type)
return v
end
function Source:init(type)
self.type = type
end
function Source:symbolPath(rootSymbols)
if self.type == Source.TABLE then
if type(self.key) == 'number' then
return self.parent:symbolPath(rootSymbols) .. "[" .. self.key .. "]"
else
return self.parent:symbolPath(rootSymbols) .. "." .. self.key
end
elseif self.type == Source.CONSTANT then
if type(self.val) == "userdata" and self.val.totable ~= nil then
if rootSymbols[self] then
return rootSymbols[self]
end
if torch.isTensor(self.val) then
local tt = self.val:totable()
return self.val:type() .. "({" .. table.concat(tt, ", ") .. "})"
else
local tt = self.val:totable()
return torch.type(self.val) .. "({" .. table.concat(tt, ", ") .. "})"
end
elseif type(self.val) == "table" then
local Value = require 'autograd.runtime.codegen.Value'
local elements = { }
for k, v in pairs(self.val) do
if Value.isValue(v) then
elements[#elements + 1] = v.source:symbolPath(rootSymbols)
else
elements[#elements + 1] = tostring(v)
end
end
return "{" .. table.concat(elements, ", ") .. "}"
elseif type(self.val) == "string" then
return '"' .. self.val .. '"'
else
if self.val == math.huge then
return "math.huge"
end
return tostring(self.val)
end
else
if rootSymbols[self] == nil then
error("unknown symbol for node")
end
return rootSymbols[self]
end
end
function Source:differentiable(differentiableMap)
local isDiff = differentiableMap[self]
if isDiff ~= nil then
return isDiff
else
if self.type == Source.TABLE then
isDiff = self.parent:differentiable(differentiableMap)
elseif self.type == Source.COMPUTED then
isDiff = self.node:differentiable(differentiableMap)
elseif self.type == Source.CONSTANT then
if type(self.val) == "table" then
local Value = require 'autograd.runtime.codegen.Value'
for k, v in pairs(self.val) do
if Value.isValue(v) then
if v.source:differentiable(differentiableMap) then
isDiff = true
end
end
end
end
end
differentiableMap[self] = isDiff
end
return isDiff
end
function Source:getRoot()
if self.type == Source.TABLE then
return self.parent:getRoot()
else
return self
end
end
function Source:changeRoot(newRoot)
if self.type == Source.TABLE then
if self.parent.type ~= Source.TABLE then
self.parent = newRoot
else
return self.parent:changeRoot(newRoot)
end
else
return newRoot
end
end
function Source:getParentsArray(arr)
arr = arr or { }
if self.type == Source.TABLE then
self.parent:getParentsArray(arr)
end
arr[#arr + 1] = self
return arr
end
function Source:changeNodeTargetIndex(target, currentIdx, newIdx)
if self.type == Source.COMPUTED then
self.node:changeTargetIndex(self.index, target, currentIdx, newIdx)
end
end
function Source.computed(node, index)
local s = Source.new(Source.COMPUTED)
s.node = node
s.index = index
return s
end
function Source.param(name)
local s = Source.new(Source.PARAM)
s.name = name
return s
end
function Source.constant(value)
local s = Source.new(Source.CONSTANT)
s.val = value
return s
end
function Source.table(parent, key)
local s = Source.new(Source.TABLE)
s.parent = parent
s.key = key
return s
end
return Source
<|start_filename|>examples/train-mnist-cnn.lua<|end_filename|>
-- Libs
local grad = require 'autograd'
local util = require 'autograd.util'
local lossFuns = require 'autograd.loss'
local optim = require 'optim'
grad.optimize(true)
-- Load in MNIST
local trainData, testData, classes = require('./get-mnist.lua')()
local inputSize = trainData.x[1]:nElement()
local confusionMatrix = optim.ConfusionMatrix(classes)
-- What model to train:
local predict,f,params
-- for CNNs, we rely on efficient nn-provided primitives:
local reshape = grad.nn.Reshape(1,32,32)
local conv1, acts1, pool1, conv2, acts2, pool2, flatten, linear
local params = {}
conv1, params.conv1 = grad.nn.SpatialConvolutionMM(1, 16, 5, 5)
acts1 = grad.nn.Tanh()
pool1 = grad.nn.SpatialMaxPooling(2, 2, 2, 2)
conv2, params.conv2 = grad.nn.SpatialConvolutionMM(16, 16, 5, 5)
acts2 = grad.nn.Tanh()
pool2, params.pool2 = grad.nn.SpatialMaxPooling(2, 2, 2, 2)
flatten = grad.nn.Reshape(16*5*5)
linear,params.linear = grad.nn.Linear(16*5*5, 10)
-- Cast the parameters
params = grad.util.cast(params, 'float')
-- Define our network
function predict(params, input, target)
local h1 = pool1(acts1(conv1(params.conv1, reshape(input))))
local h2 = pool2(acts2(conv2(params.conv2, h1)))
local h3 = linear(params.linear, flatten(h2))
local out = util.logSoftMax(h3)
return out
end
-- Define our loss function
function f(params, input, target)
local prediction = predict(params, input, target)
local loss = lossFuns.logMultinomialLoss(prediction, target)
return loss, prediction
end
-- Define our parameters
torch.manualSeed(0)
-- Get the gradients closure magically:
local df = grad(f, {optimize=true})
-- Train a neural network
for epoch = 1,100 do
print('Training Epoch #'..epoch)
for i = 1,trainData.size do
-- Next sample:
local x = trainData.x[i]:view(1,inputSize)
local y = torch.view(trainData.y[i], 1, 10)
-- Grads:
local grads, loss, prediction = df(params,x,y)
-- Update weights and biases
for iparam=1,2 do
params.conv1[iparam] = params.conv1[iparam] - grads.conv1[iparam] * 0.01
params.conv2[iparam] = params.conv2[iparam] - grads.conv2[iparam] * 0.01
params.linear[iparam] = params.linear[iparam] - grads.linear[iparam] * 0.01
end
-- Log performance:
confusionMatrix:add(prediction[1], y[1])
if i % 1000 == 0 then
print(confusionMatrix)
confusionMatrix:zero()
end
end
end
<|start_filename|>src/gradfuns.lua<|end_filename|>
local Value = require 'autograd.runtime.codegen.Value'
local DirectNode = require 'autograd.runtime.direct.DirectNode'
local util = require 'autograd.util'
local overload = require 'autograd.overload'
local function getValue(v)
if Value.isValue(v) then
return v:get()
elseif DirectNode.isNode(v) then
return DirectNode.getValue(v)
else
return v
end
end
-- Utility for defining gradients that are zero
local function zeroGradient(nArgs)
nArgs = nArgs or 2
local zeroGrads = {}
for i=1,nArgs do
zeroGrads[i] = function(...) return nil end
end
return zeroGrads
end
-- Helps with resizing gradients
-- Could also be called sumToMatchShape
local function unbroadcast(g,ans,x)
if torch.isTensor(x) then
if torch.isSameSizeAs(x, g) then
return g
end
if torch.nElement(g) == torch.nElement(x) then
return torch.viewAs(g,x)
end
local size = torch.totable(torch.size(x))
local ndim = torch.nDimension(x)
local grad = g
while torch.nDimension(grad) > ndim do
grad = torch.view(torch.sum(grad,1), thisSize)
end
-- If we're now the same size, then awesome
if torch.nElement(grad) == torch.nElement(x) then
return torch.viewAs(grad,x)
-- Otherwise, we might have to sum across
-- dimensions that are singleton for x,
-- but not yet for the gradient
else
for i=1,#size do
thisSize = torch.totable(torch.size(grad))
if size[i] == 1 then
thisSize[i] = 1
grad = torch.view(torch.sum(grad,i),table.unpack(thisSize))
end
end
return grad
end
elseif torch.isTensor(ans) then
return torch.sum(g)
else
return g
end
end
local function elemwiseMul(a,b)
if torch.isTensor(a) and torch.isTensor(b) then
return torch.cmul(a,b)
else
return a*b
end
end
local function elemwiseDiv(a,b)
if torch.isTensor(a) and torch.isTensor(b) then
return torch.cdiv(a,b)
else
return a/b
end
end
local function _sum(x)
if torch.isTensor(x) then
return torch.sum(x)
else
return x
end
end
local function repeatToMatchShape(x,axis)
-- Special sum function to deal with numbers or tensors
if not torch.isTensor(x) then
return function(x) return x end, 1
end
local size
if not axis then
return function(g) return util.fillSameSizeAs(x, _sum(g)) end, torch.nElement(x)
else
axis = getValue(axis)
local size = torch.size(x):fill(1)
size[axis] = torch.size(x, axis)
return function(g) return torch.repeatTensor(g, size) end, size[axis]
end
end
-- Shared functions
local functions = { }
functions.catGradient = {
function(g, ans, x, y,dim)
if torch.isTensor(x) then
dim = dim or torch.nDimension(x)
return torch.narrow(g, dim, 1, torch.size(x, dim))
else
if torch.isTensor(x[1]) then
-- Second argument is dimension if table is passed in
return util.catTableGradient(g, x, y)
else
return util.catNumberGradient(g, x, y)
end
end
end,
function(g,ans,x,y,dim)
if torch.isTensor(y) then
dim = dim or torch.nDimension(x)
return torch.narrow(g, dim, torch.size(x, dim) + 1, torch.size(y, dim))
else
return nil
end
end
}
functions.get = {
function(g, ans, x, k)
local out = util.zerosLike(x)
out[k] = g
return out
end,
function(g, ans, x, k) return nil end,
}
functions.set = {
function(g, ans, x, k, v)
g[k] = 0
return g
end,
function(g, ans, x, k, v)
return nil
end,
function(g, ans, x, k, v)
local gk = getValue(g[k])
if type(gk) == 'number' then
return gk
else
return torch.clone(gk)
end
end,
}
-- Shared operators
local operators = { }
operators.add = {
function(g, ans, x, y) return unbroadcast(g,ans,x) end,
function(g, ans, x, y) return unbroadcast(g,ans,y) end,
}
operators.mul = {
function(g, ans, A, B)
local isTensorA = torch.isTensor(A)
local isTensorB = torch.isTensor(B)
if not isTensorA and isTensorB then
return torch.sum(elemwiseMul(g, B))
elseif isTensorB and torch.nDimension(B) == 2 then
return g * torch.t(B)
elseif isTensorA and torch.nDimension(A) == 2 then
if not isTensorB then
return elemwiseMul(g, B)
else
return torch.ger(g,B)
end
else
return B * g
end
end,
function(g, ans, A, B)
local isTensorA = torch.isTensor(A)
local isTensorB = torch.isTensor(B)
if not isTensorB and isTensorA then
return torch.sum(elemwiseMul(g, A))
elseif isTensorA and torch.nDimension(A) == 2 then
return torch.t(A) * g
elseif isTensorB and torch.nDimension(B) == 2 then
if not isTensorA then
return elemwiseMul(g, A)
else
return torch.ger(A, g)
end
else
return A * g
end
end,
}
operators.unm = {
function(g, ans, x) return -g end
}
operators.div = {
function(g, ans, x, y) return unbroadcast(elemwiseDiv(g,y),ans,x) end,
function(g, ans, x, y) return unbroadcast(elemwiseMul(-g,elemwiseDiv(x,torch.pow(y,2))),ans,y) end,
}
operators.sub = {
function(g, ans, x, y) return unbroadcast(g,ans,x) end,
function(g, ans, x, y) return unbroadcast(-g,ans,y) end,
}
operators.pow = {
function(g, ans, x, y)
local newg = elemwiseMul(elemwiseMul(g,y),torch.pow(x,y-1))
return unbroadcast(newg, ans, x)
end,
function(g, ans, x, y)
local newg = elemwiseMul(g,elemwiseMul(torch.log(x),torch.pow(x,y)))
return unbroadcast(newg, ans, y)
end
}
-- Define some gradients that will be shared by the class and the torch module
-- e.g. torch.view(x,3,3) and x:view(3,3)
local viewGradients = {
function(g, ans, x,sizes)
return torch.view(torch.contiguous(g), torch.size(x))
end
}
local viewAsGradients = {
function(g, ans, x,template)
return torch.clone(torch.viewAs(g,x))
end,
function(g, ans, x,template)
return nil
end
}
local expandGradients = {
function(g, ans, x,...)
local xSizes = torch.size(x):totable()
local out = g
for dim,size in pairs(xSizes) do
if size == 1 then
out = torch.sum(out,dim)
end
end
return out
end
}
local expandAsGradients = {
function(g, ans, x, template)
local sizes = torch.size(x):totable()
local out = g
for dim,size in pairs(sizes) do
if size == 1 then
out = torch.sum(out, dim)
end
end
return out
end,
function(g, ans, x,template)
return nil
end
}
overload.module("torch", torch, function(module)
local tensorTypes = {"FloatTensor", "DoubleTensor", "CudaTensor"}
for i = 1, #tensorTypes do
local tt = tensorTypes[i]
if torch[tt] ~= nil then
module.class(tt, function(class)
for k, v in pairs(operators) do
class.operator(k, v)
end
class.gradient("cat", functions.catGradient)
class.initializer("new")
class.static("dim", "size", "nDimension", "nElement")
class.gradient("view", viewGradients)
class.gradient("viewAs", viewAsGradients)
class.gradient("expand", expandGradients)
class.gradient("expandAs", expandAsGradients)
class.defaultUnsupported()
end)
end
end
module.gradient("add", {
function(g, ans, x, y) return unbroadcast(g,ans,x) end,
function(g, ans, x, y) return unbroadcast(g,ans,y) end
})
module.gradient("cmul", {
function(g, ans, x, y) return unbroadcast(elemwiseMul(y, g), ans, x) end,
function(g, ans, x, y) return unbroadcast(elemwiseMul(x, g), ans, y) end,
})
module.gradient("mul", {
function(g, ans, x, y) return unbroadcast(elemwiseMul(y,g),ans,x) end,
function(g, ans, x, y) return unbroadcast(elemwiseMul(x,g),ans,y) end,
})
module.gradient("div", {
function(g, ans, x, y) return unbroadcast(elemwiseDiv(g, y),ans,x) end,
function(g, ans, x, y) return unbroadcast(elemwiseMul(-g, elemwiseDiv(x, torch.pow(y, 2))), ans, y) end,
})
module.gradient("cdiv", {
function(g, ans, x, y) return unbroadcast(elemwiseDiv(g, y), ans, x) end,
function(g, ans, x, y) return unbroadcast(elemwiseMul(-g,elemwiseDiv(x, torch.pow(y, 2))), ans, y) end,
})
module.gradient("pow", {
function(g, ans, x, y)
local newg = elemwiseMul(elemwiseMul(g,y),torch.pow(x,y-1))
return unbroadcast(newg, ans, x)
end,
function(g, ans, x, y)
local newg = elemwiseMul(g,elemwiseMul(torch.log(x),torch.pow(x,y)))
return unbroadcast(newg, ans, y)
end
})
module.gradient("ger", {
-- Only takes 1D vectors as input
function(g, ans, x, y) return g * y end,
function(g, ans, x, y) return torch.t(g) * x end
})
module.gradient("inverse", {
function(g, ans, x) return -((torch.t(ans) * g) * torch.t(ans)) end,
})
module.gradient("exp", {
function(g, ans, x) return elemwiseMul(ans, g) end,
})
module.gradient("tanh", {
function(g, ans, x)
local mzz = 1 - elemwiseMul(ans,ans)
return elemwiseMul(g, mzz)
end
})
module.gradient("sinh", {
function(g, ans, x)
return elemwiseMul(g, torch.cosh(x))
end
})
module.gradient("cosh", {
function(g, ans, x)
return elemwiseMul(g, torch.sinh(x))
end
})
module.gradient("abs", {
function(g, ans, x)
if torch.isTensor(x) then
return elemwiseMul(g,torch.sign(x))
else
error("todo")
sign = x>0 and 1 or x<0 and -1 or 0
return elemwiseMul(g, sign)
end
end
})
module.gradient("clamp", {
function(g, ans, x, minVal, maxVal)
-- NOTE: could do a casting and then multiply for 2nd order divs. This is more efficient for now.
local mask = torch.typeAs(torch.eq(torch.ne(ans,minVal),torch.ne(ans,maxVal)), g)
return elemwiseMul(g, mask)
end,
})
module.gradient("contiguous", {
function(g,ans,x)
return g
end
})
module.gradient("cat", functions.catGradient)
module.gradient("expand", expandGradients)
module.gradient("expandAs", expandAsGradients)
module.gradient("view", viewGradients)
module.gradient("viewAs", viewAsGradients)
module.gradient("clone", {
function(g, ans, x)
return g
end,
})
module.gradient("copy", {
function(g, ans, x, y)
return g
end,
function(g, ans, x, y)
return g
end,
})
module.gradient("select", {
function(g, ans, x,dim,index)
return util.selectSliceCopy(g, x, dim, index)
end
})
module.gradient("index", {
function(g, ans, x,dim,index)
return util.indexAdd(g, x, dim, index)
end
})
module.gradient("narrow", {
function(g, ans, x,dim,index,size)
return util.narrowSliceCopy(g, x, dim, index, size)
end
})
module.gradient("reshape", {
function(g, ans, x, ...)
return torch.viewAs(g, x)
end,
function(g, ans, x, ...) return nil end,
function(g, ans, x, ...) return nil end,
function(g, ans, x, ...) return nil end,
function(g, ans, x, ...) return nil end, -- 4D is enough. Beyond that have to use LongTensor for shape
})
module.gradient("sum", {
function(g, ans, x,axis)
local repeater = repeatToMatchShape(x, axis)
return repeater(g)
end
})
module.gradient("mean", {
function(g,ans,x,axis)
local repeater,nrepeats = repeatToMatchShape(x,axis)
return repeater(g)/nrepeats
end
})
module.gradient("sqrt", {
function(g, ans, x) return elemwiseMul(elemwiseMul(g,0.5), torch.pow(x,-0.5)) end
})
module.gradient("sin", {
function(g, ans, x) return elemwiseMul(g, torch.cos(x)) end
})
module.gradient("cos", {
function(g, ans, x) return elemwiseMul(g, -torch.sin(x)) end
})
module.gradient("tan", {
function(g, ans, x) return elemwiseDiv(g, torch.pow(torch.cos(x), 2.0)) end
})
module.gradient("log", {
function(g, ans, x) return elemwiseDiv(g,x) end
})
module.gradient("log1p", {
function(g, ans, x) return elemwiseDiv(g,x + 1) end
})
module.gradient("min", {
function(g, ans, x,axis)
local repeater = repeatToMatchShape(x,axis)
local out = util.setNotEqual(x, repeater(ans), 0, repeater(g))
return out
end
})
module.gradient("max", {
function(g, ans, x,axis)
local repeater = repeatToMatchShape(x,axis)
local out = util.setNotEqual(x, repeater(ans), 0, repeater(g))
return out
end
})
module.gradient("cmin", {
function(g, ans, x, y)
return util.setNotEqual(x, ans, 0, g)
end,
function(g, ans, x, y)
return util.setNotEqual(y, ans, 0, g)
end
})
module.gradient("cmax", {
function(g, ans, x, y)
return util.setNotEqual(x, ans, 0, g)
end,
function(g, ans, x, y)
return util.setNotEqual(y, ans, 0, g)
end
})
module.gradient("transpose", {
function(g, ans, x, d1, d2)
return torch.transpose(g, d1, d2)
end,
function(g, ans, x, d1, d2)
return nil
end,
function(g, ans, x, d1, d2)
return nil
end
})
module.gradient("t", {
function(g, ans, x)
return torch.t(g)
end,
})
module.gradient("long", {
function(g, ans, x)
return torch.typeAs(g, x)
end
})
module.gradient("typeAs", {
function(g, ans, x, y)
return torch.typeAs(g, x)
end,
function(g, ans, x, y)
return nil
end
})
module.gradient("repeatTensor", {
function(g, ans, x, ...)
local Dg = torch.nDimension(g)
local Dx = torch.nDimension(x)
for i=Dx,1,-1 do
local D = torch.nDimension(g)
local c = util.cat(torch.split(g,torch.size(x,i), Dg-Dx+i), D+1)
g = torch.squeeze(torch.sum(c,D+1))
end
for i=1,Dg-Dx do
g = torch.squeeze(torch.sum(g,1))
end
return g
end,
function(g, ans, ...) return nil end,
function(g, ans, ...) return nil end,
function(g, ans, ...) return nil end,
function(g, ans, ...) return nil end,
function(g, ans, ...) return nil end, -- five dimensions should be enough
})
module.gradient("squeeze", {
function(g, ans, x)
return torch.viewAs(g, x)
end
})
module.gradient("sigmoid", {
function(g, ans, x)
local p = elemwiseMul(1-ans,ans)
return elemwiseMul(g, p)
end
})
-- module.gradient("split", {
-- function(g, ans, x, size, dim)
-- -- TODO: untested, not sure if we support table output
-- return torch.cat(g, dim)
-- end,
-- function(g, ans, x, size, dim) return nil end,
-- function(g, ans, x, size, dim) return nil end,
-- })
module.gradient("gather", {
function(g, ans, x, dim, index) return torch.scatter(util.zerosLike(x), dim, index, g) end,
})
module.gradient("scatter", {
function(g, ans, x, dim, index, val) return nil end,
function(g, ans, x, dim, index, val) return nil end,
function(g, ans, x, dim, index, val) return nil end,
function(g, ans, x, dim, index, val) return torch.gather(g, dim, index) end,
})
module.gradient("bmm", {
function(g, ans, x, y) return torch.bmm(g, torch.transpose(y, 3, 2)) end,
function(g, ans, x, y) return torch.bmm(torch.transpose(x, 3, 2), g) end,
})
module.gradient("baddbmm", {
-- baddbmm has three possible patterns:
-- 1.) M X Y
-- 2.) v1 M X Y
-- 3.) v1 M v2 X Y
function(g, ans, a1, a2, a3, a4, a5)
-- grad wrt a1
if torch.isTensor(a1) then
-- pattern 1
return g
else
-- patterns 2 and 3
return torch.sum(elemwiseMul(g, a2))
end
end,
function(g, ans, a1, a2, a3, a4, a5)
-- grad wrt a2
if torch.isTensor(a1) then
-- pattern 1
return torch.bmm(g, torch.transpose(a3, 3, 2))
else
-- patterns 2 and 3
return elemwiseMul(g, a1)
end
end,
function(g, ans, a1, a2, a3, a4, a5)
-- grad wrt a3
if torch.isTensor(a1) then
-- pattern 1
return torch.bmm(torch.transpose(a2, 3, 2), g)
elseif torch.isTensor(a3) then
-- pattern 2
return torch.bmm(g, torch.transpose(a4, 3, 2))
else
-- pattern 3
return torch.sum(elemwiseMul(g, torch.bmm(a4, a5)))
end
end,
function(g, ans, a1, a2, a3, a4, a5)
-- grad wrt a4
if torch.isTensor(a3) then
-- pattern 2
return torch.bmm(torch.transpose(a3, 3, 2), g)
else
-- pattern 3
return elemwiseMul(torch.bmm(g, torch.transpose(a5, 3, 2)), a3)
end
end,
function(g, ans, a1, a2, a3, a4, a5)
-- grad wrt a5
-- pattern 3
return elemwiseMul(torch.bmm(torch.transpose(a4, 3, 2), g), a3)
end,
})
-- Zero gradients
module.gradient("lt", zeroGradient())
module.gradient("le", zeroGradient())
module.gradient("gt", zeroGradient())
module.gradient("ge", zeroGradient())
module.gradient("eq", zeroGradient())
module.gradient("ne", zeroGradient())
module.gradient("all", zeroGradient())
module.gradient("any", zeroGradient())
module.gradient("floor", zeroGradient())
module.gradient("ceil", zeroGradient())
module.gradient("round", zeroGradient())
module.gradient("sign", zeroGradient())
module.initializer("new", "bernoulli", "uniform", "normal", "random", "zeros", "zero", "eye", "ones", "rand", "multinomial")
module.static("size", "isTensor", "nDimension", "nElement", "isSameSizeAs", "setmetatable", "getmetatable", "type")
module.ignore("typename")
module.dynamic("split")
module.defaultUnsupported()
end)
overload.module("Value", Value, function(module)
for k, v in pairs(operators) do
module.operator(k, v)
end
module.gradient("__internal_get", functions.get)
module.gradient("__internal_set", functions.set)
end)
overload.module("DirectNode", DirectNode, function(module)
for k, v in pairs(operators) do
module.operator(k, v)
end
module.gradient("__internal_get", functions.get)
module.gradient("__internal_set", functions.set)
end)
overload.module("util", util, function(module)
module.gradient("fill", {
function(g, ans, template, x)
return nil
end,
function(g, ans, template, x)
return torch.sum(g)
end,
})
module.gradient("selectSliceCopy", {
function(g, ans, x, template, dim, index)
return torch.select(g, dim, index)
end,
function(g, ans, x, template, dim, index) return nil end,
function(g, ans, x, template, dim, index) return nil end,
function(g, ans, x, template, dim, index) return nil end,
})
module.gradient("narrowSliceCopy", {
function(g, ans, x, template, dim, index, size)
return torch.narrow(g, dim, index, size)
end,
function(g, ans, x, template, dim, index, size) return nil end,
function(g, ans, x, template, dim, index, size) return nil end,
function(g, ans, x, template, dim, index, size) return nil end,
function(g, ans, x, template, dim, index, size) return nil end,
})
module.gradient("indexAdd", {
function(g, ans, x, template, dim, index)
return torch.index(g, dim, index)
end,
function(g, ans, x, template, dim, index) return nil end,
function(g, ans, x, template, dim, index) return nil end,
function(g, ans, x, template, dim, index) return nil end,
})
module.gradient("cat", functions.catGradient)
module.gradient("lt", zeroGradient(2))
module.gradient("le", zeroGradient(2))
module.gradient("gt", zeroGradient(2))
module.gradient("ge", zeroGradient(2))
module.gradient("eq", zeroGradient(2))
module.initializer("newTensorLike", "zerosLike")
end)
<|start_filename|>src/init.lua<|end_filename|>
-- Base package
local autograd = require 'autograd.main'
-- Meta info
autograd.VERSION = '0.1'
autograd.LICENSE = 'Apache 2.0'
-- Sub packages:
autograd.nnwrapper = require 'autograd.nnwrapper'
autograd.functionalize = autograd.nnwrapper.functionalize
autograd.nn = autograd.functionalize('nn')
autograd.nn.AutoModule = require 'autograd.auto.AutoModule'
autograd.nn.AutoCriterion = require 'autograd.auto.AutoCriterion'
autograd.auto = require 'autograd.auto'
autograd.auto.factory = autograd.auto.factory or torch.factory
torch.factory = function(className)
if className:find('autograd%.nn%.module%.') and autograd.auto.factory then
autograd.nn.AutoModule(className:gsub('autograd%.nn%.module%.',''))
end
if className:find('autograd%.nn%.criterion%.') and autograd.auto.factory then
autograd.nn.AutoCriterion(className:gsub('autograd%.nn%.criterion%.',''))
end
return autograd.auto.factory(className)
end
autograd.auto = require 'autograd.auto'
autograd.model = require 'autograd.model'
autograd.module = require 'autograd.module'
autograd.loss = require 'autograd.loss'
autograd.util = require 'autograd.util'
autograd.optim = require 'autograd.optim'
-- Return package
return autograd
<|start_filename|>src/runtime/direct/Profiler.lua<|end_filename|>
local util = require 'autograd.util'
local Profiler = { }
Profiler.__index = Profiler
function Profiler.new()
local p = { }
p.lineMap = { }
p.entries = { }
p.times = 0
setmetatable(p, Profiler)
return p
end
function Profiler:mark(fun, level)
local name = fun.name
if fun.raw then
name = fun.raw.__typename
if name == nil or name == "" then
name = "(nn object)"
end
end
local di = debug.getinfo(level + 1)
local line = di.short_src .. ":" .. di.currentline
local fnMap = self.lineMap[line]
if fnMap == nil then
fnMap = { }
self.lineMap[line] = fnMap
end
local entryIndex = fnMap[name]
if entryIndex == nil then
entryIndex = #self.entries + 1
self.entries[entryIndex] = {
debuginfo = di,
name = name,
line = line,
forwardTime = 0,
backwardTime = 0
}
fnMap[name] = entryIndex
end
return entryIndex
end
function Profiler:markCycle()
self.times = self.times + 1
end
function Profiler:measureForward(id, time)
self.entries[id].forwardTime = self.entries[id].forwardTime + time
end
function Profiler:measureBackward(id, time)
self.entries[id].backwardTime = self.entries[id].backwardTime + time
end
local function pctStr(n, tot)
return tostring(math.floor((n / tot) * 100.0)) .. "%"
end
local function padMin(s, min)
if #s < min then
return s .. string.rep(" ", min - #s)
end
return s
end
function Profiler:printReport(type)
local totalForward = 0
local totalBackward = 0
for i = 1, #self.entries do
local t = self.entries[i]
totalForward = totalForward + t.forwardTime
totalBackward = totalBackward + t.backwardTime
end
local timeSorted = util.shallowCopy(self.entries)
table.sort(timeSorted, function(a, b)
return (a.forwardTime + a.backwardTime) > (b.forwardTime + b.backwardTime)
end)
print("")
print(string.format("[autograd] average forward time: %.2fms", (totalForward / (self.times + 1)) * 1000.0))
print(string.format("[autograd] average backward time: %.2fms", (totalBackward / (self.times + 1)) * 1000.0))
print(string.format("[autograd] average overall time: %.2fms", ((totalForward + totalBackward) / (self.times + 1)) * 1000.0))
print("[autograd] top operations:")
if type == "detailed" then
print("[autograd] " .. string.rep("=", 80))
print("[autograd] " .. padMin("name", 20), "fwd", "bwd", "ovr", "line")
print("[autograd] " .. string.rep("=", 80))
for i = 1, math.min(10, #timeSorted) do
local t = timeSorted[i]
print("[autograd] " .. padMin(t.name, 20), pctStr(t.forwardTime, totalForward), pctStr(t.backwardTime, totalBackward), pctStr(t.forwardTime + t.backwardTime, totalForward + totalBackward), t.line)
end
end
print("")
end
return Profiler
<|start_filename|>examples/print-generatedcode.lua<|end_filename|>
require 'torch'
autograd = require 'autograd'
-- Just a standard MLP, for demonstration purposes
----------------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------------
local inputSize = 1024
local classes = {0,1,2,3,4,5,6,7,8,9}
-- What model to train:
local predict,f,params
-- Define our neural net
function predict(params, input, target)
local h1 = torch.tanh(input * params.W[1] + params.B[1])
local h2 = torch.tanh(h1 * params.W[2] + params.B[2])
local h3 = h2 * params.W[3] + params.B[3]
local out = autograd.util.logSoftMax(h3)
return out
end
-- Define our training loss
function f(params, input, target)
local prediction = predict(params, input, target)
local loss = autograd.loss.logMultinomialLoss(prediction, target)
return loss, prediction
end
-- Define our parameters
-- [-1/sqrt(#output), 1/sqrt(#output)]
torch.manualSeed(0)
local W1 = torch.Tensor(inputSize,50):uniform(-1/math.sqrt(50),1/math.sqrt(50))
local B1 = torch.Tensor(50):fill(0)
local W2 = torch.Tensor(50,50):uniform(-1/math.sqrt(50),1/math.sqrt(50))
local B2 = torch.Tensor(50):fill(0)
local W3 = torch.Tensor(50,#classes):uniform(-1/math.sqrt(#classes),1/math.sqrt(#classes))
local B3 = torch.Tensor(#classes):fill(0)
-- Trainable parameters:
params = {
W = {W1, W2, W3},
B = {B1, B2, B3},
}
input = torch.randn(1,inputSize)
target = torch.zeros(1,#classes)
target[1][3] = 1
----------------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------------
print("-------------------------------------------------------------------------")
print("-------------------------- FORWARD PASS ONLY ----------------------------")
print("-------------------------------------------------------------------------")
g = autograd(f,{showCode=true,optimize=true,withGradients=false,withForward=true})
local tmp = g(params,input,target)
print("-------------------------------------------------------------------------")
print("\n\n\n\n\n")
print("-------------------------------------------------------------------------")
print("-------------------------- FORWARD + BACKWARD ---------------------------")
print("-------------------------------------------------------------------------")
print("Forward and backward pass")
g = autograd(f,{showCode=true,optimize=true,withGradients=true,withForward=true})
local tmp = g(params,input,target)
print("-------------------------------------------------------------------------")
<|start_filename|>src/runtime/codegen/Graph.lua<|end_filename|>
local Graph = { }
Graph.__index = Graph
local overload = require 'autograd.overload'
local Node = require 'autograd.runtime.codegen.Node'
local Value = require 'autograd.runtime.codegen.Value'
local Source = require 'autograd.runtime.codegen.Source'
local MutationFlow = require 'autograd.runtime.codegen.MutationFlow'
local util = require 'autograd.util'
local nodeDebugger
local applyDepth = 0
local reentryDepth = 0
local mutationFlow = MutationFlow.new()
local function overloadHook(fn, gradFn, ...)
local inputs = {...}
applyDepth = applyDepth + 1
if reentryDepth ~= 0 and applyDepth == 1 and fn.capture then
if fn.unsupported then
error("function " .. fn.name .. " not currently supported by autograd")
end
local n = Node.new(fn, gradFn, inputs, mutationFlow)
local values = {n:evaluateForward(mutationFlow)}
if nodeDebugger then
nodeDebugger.captureCallStack(n)
end
applyDepth = applyDepth - 1
return table.unpack(values)
else
local evalArgs = { }
for i = 1, #inputs do
if Value.isValue(inputs[i]) then
evalArgs[i] = inputs[i]:flatten()
else
evalArgs[i] = inputs[i]
end
end
local values = {fn.fn(table.unpack(evalArgs))}
applyDepth = applyDepth - 1
return table.unpack(values)
end
end
local function collectGradients(val, intermediateGrads, differentiableMap, grads)
grads = grads or { }
if Value.isValue(val) then
local rootSource = val.source:getRoot()
local gradient = intermediateGrads[val.source]
if gradient == nil and rootSource.type == Source.PARAM and differentiableMap[rootSource] then
-- This was a differentiable param, but we ended up with no gradient for it.
-- Return a zero gradient, but this could also be an error.
if val.type == Value.TENSOR then
gradient = Value.from(util.zerosLike(val), Source.constant(val))
elseif val.type == Value.NUMBER then
gradient = Value.from(0.0, Source.constant(0))
end
intermediateGrads[val.source] = gradient
end
if gradient ~= nil then
grads[#grads + 1] = {
param = val,
grad = gradient
}
end
if val.type == Value.TABLE then
collectGradients(val:get(), intermediateGrads, differentiableMap, grads)
end
elseif type(val) == "table" then
for k, v in pairs(val) do
collectGradients(v, intermediateGrads, differentiableMap, grads)
end
end
return grads
end
local function convertOperators(execOrder)
for i = 1, #execOrder do
local node = execOrder[i]
if node.forwardFn.operator ~= nil then
local op = node.forwardFn.operator
if op == "mul" and #node.inputs == 2 then
if node.inputs[1].type == Value.TENSOR and node.inputs[2].type == Value.TENSOR then
local d1 = node.inputs[1].raw:nDimension()
local d2 = node.inputs[2].raw:nDimension()
if d1 == 2 and d2 == 2 then
node.forwardFn = { name = "torch.mm" }
elseif d1 == 2 and d2 == 1 then
node.forwardFn = { name = "torch.mv" }
elseif d1 == 1 and d2 == 1 then
node.forwardFn = { name = "torch.dot" }
end
elseif node.inputs[1].type == Value.TENSOR and node.inputs[2].type == Value.NUMBER then
node.forwardFn = { name = "torch.mul" }
elseif node.inputs[1].type == Value.NUMBER and node.inputs[2].type == Value.TENSOR then
node.forwardFn = { name = "torch.mul" }
node.inputs[1].source:changeNodeTargetIndex(node, 1, 2)
node.inputs[2].source:changeNodeTargetIndex(node, 2, 1)
local t1 = node.inputs[1]
node.inputs[1] = node.inputs[2]
node.inputs[2] = t1
end
elseif op == "add" and #node.inputs == 2 then
if node.inputs[1].type == Value.TENSOR and node.inputs[2].type == Value.TENSOR then
node.forwardFn = { name = "torch.add" }
elseif node.inputs[1].type == Value.TENSOR and node.inputs[2].type == Value.NUMBER then
node.forwardFn = { name = "torch.add" }
elseif node.inputs[1].type == Value.NUMBER and node.inputs[2].type == Value.TENSOR then
node.forwardFn = { name = "torch.add" }
end
elseif op == "unm" then
if node.inputs[1].type == Value.TENSOR then
node.forwardFn = { name = "torch.neg" }
end
end
end
end
end
local function replaceNode(nodeValue, withNodeValue, outputNodes)
local node = nodeValue.source.node
node:unlinkInputs()
local toRemove = { }
for k = 1, #node.outputs do
for i = 1, #node.outputTargets[k] do
toRemove[#toRemove + 1] = node.outputTargets[k][i].node
end
end
for i = 1, #toRemove do
toRemove[i]:replaceInput(nodeValue, withNodeValue)
end
local rootValues = outputNodes[node]
if rootValues ~= nil then
for i = 1, #rootValues do
if rootValues[i].source.type == Source.TABLE then
rootValues[i].source:changeRoot(withNodeValue.source)
else
rootValues[i].source = withNodeValue.source
end
end
outputNodes[replaceNode] = rootValues
outputNodes[node] = { }
end
end
local function removeIdentityOperators(execOrder, outputNodes)
for i = 1, #execOrder do
local node = execOrder[i]
if outputNodes[node] == nil then
local op = node.forwardFn.operator
if node.forwardFn.operator ~= nil then
if op == "mul" then
if node.inputs[1].source.type == Source.CONSTANT and node.inputs[1]:get() == 1 then
replaceNode(node.outputs[1], node.inputs[2], outputNodes)
elseif node.inputs[2].source.type == Source.CONSTANT and node.inputs[2]:get() == 1 then
replaceNode(node.outputs[1], node.inputs[1], outputNodes)
end
elseif op == "add" or op == "sub" then
if node.inputs[1].source.type == Source.CONSTANT and node.inputs[1]:get() == 0 then
replaceNode(node.outputs[1], node.inputs[2], outputNodes)
elseif node.inputs[2].source.type == Source.CONSTANT and node.inputs[2]:get() == 0 then
replaceNode(node.outputs[1], node.inputs[1], outputNodes)
end
end
end
end
end
end
local function convertSubtract(execOrder, outputNodes)
for i = 1, #execOrder do
local node = execOrder[i]
local op = node.forwardFn.operator
if op == "sub" and #node.inputs == 2 then
local unmNode = Node.new({ fn = function(a) return -a end, operator = "unm", name = "op.unm" }, nil, { node.inputs[2] })
local unmOutput = unmNode:evaluateForward()
local addNode = Node.new({ fn = function(a, b) return a + b end, operator = "add", name = "op.add" }, nil, { node.inputs[1], unmOutput })
local addOutput = addNode:evaluateForward()
replaceNode(node.outputs[1], addOutput, outputNodes)
execOrder[i] = addNode
table.insert(execOrder, i, unmNode)
end
end
end
local function pruneOutputs(execOrder, outputNodes)
for i = 1, #execOrder do
local node = execOrder[i]
if outputNodes[node] == nil then
for k = #node.outputs, 2, -1 do
if #node.outputTargets[k] == 0 then
table.remove(node.outputs, k)
else
break
end
end
end
end
end
local function walkNode(node, order, seen)
if seen[node] == nil then
seen[node] = true
for k = 1, #node.inputs do
local input = node.inputs[k]
if input.type == Value.TABLE then
for k, v in pairs(input:get()) do
local root = v.source:getRoot()
if root.type == Source.COMPUTED then
walkNode(root.node, order, seen)
end
end
else
local root = input.source:getRoot()
if root.type == Source.COMPUTED then
walkNode(root.node, order, seen)
end
end
end
table.insert(order, node)
end
end
local function walkOutputRoots(val, execOrder, seen, outputNodes)
seen = seen or { }
execOrder = execOrder or { }
if Value.isValue(val) then
local root = val.source:getRoot()
if root.type == Source.COMPUTED then
if outputNodes ~= nil then
local valueList = outputNodes[root.node]
if outputNodes[root.node] == nil then
valueList = { }
outputNodes[root.node] = valueList
end
valueList[#valueList + 1] = val
end
walkNode(val.source:getRoot().node, execOrder, seen)
end
elseif type(val) == "table" then
for k, subVal in pairs(val) do
walkOutputRoots(subVal, execOrder, seen, outputNodes)
end
end
return execOrder
end
function Graph:walkExecutionOrder(withForward, withGradients)
local seen = { }
local grads = self.grads
local answers = self.answers
local execOrder = { }
local outputNodes = { }
if util.defaultBool(withGradients, true) then
for i = 1, #grads do
walkOutputRoots(grads[i].grad, execOrder, seen, outputNodes)
end
end
if util.defaultBool(withForward, true) then
walkOutputRoots(answers, execOrder, seen, outputNodes)
end
local mutationRoots = { }
-- If this graph had any tensor mutations (x[k] = v), we have to make sure that any
-- uses of x before to the mutation also appear in the execution order before the mutation.
-- Usually walking the graph makes no guarantees on order.
for i = 1, #self.mutationFlow.history do
local aliasOp = self.mutationFlow.history[i]
local root = aliasOp.from.source:getRoot()
if root.type == Source.COMPUTED then
local node = root.node
local targetArray = node.outputTargets[1]
for k = 1, #targetArray do
local target = targetArray[k]
local targetNode = target.node
if seen[targetNode] and targetNode.forwardFn.name ~= "Value.__internal_set" then
mutationRoots[#mutationRoots + 1] = targetNode
end
end
end
end
-- Re-evaluate the execution order, starting with the mutation roots, then walk normally.
if #mutationRoots > 0 then
local prevExecOrder = execOrder
seen = { }
execOrder = { }
for i = 1, #mutationRoots do
walkNode(mutationRoots[i], execOrder, seen)
end
for i = 1, #prevExecOrder do
walkNode(prevExecOrder[i], execOrder, seen)
end
end
return execOrder, outputNodes
end
function Graph.reentryDepth()
return reentryDepth
end
local function markDifferentiable(value, differentiableMap, args)
if Value.isValue(value) then
if value.type == Value.TABLE then
markDifferentiable(value:get(), differentiableMap)
else
differentiableMap[value.source] = true
end
elseif type(value) == "table" then
for k, v in pairs(value) do
markDifferentiable(v, differentiableMap)
end
end
end
function Graph.record(fn, args, opt)
local argnum = opt.argnum or 1
local debugger = opt.debugger
local partialGrad = util.defaultBool(opt.partialGrad, false)
local withGradients = util.defaultBool(opt.withGradients, true)
local values = { }
local differentiableMap = { }
for i = 1, #args do
-- Don't wrap the outer tables in Values, since it would interfere with the use of the # operator.
-- This creates some problems when referring to an entire param table in the generated code - it'll
-- be represented as a new literal table, but it's a good tradeoff until we move entirely to Lua 5.2
-- and can overload # on Value.
values[i] = Value.from(args[i], Source.param(i), true)
if (i == argnum) then
markDifferentiable(values[i], differentiableMap, args)
end
end
-- Begin recording all torch operations.
overload.install(overloadHook)
applyDepth = 0
reentryDepth = reentryDepth + 1
if reentryDepth == 1 then
mutationFlow:clear()
end
nodeDebugger = debugger
-- Call user forward function.
local answers = { }
local grads = { }
local intermediateGrads = { }
local protectedFn = function()
answers = {fn(table.unpack(values))}
-- Figure out forward graph traversal order.
-- Only walk from the answer we need to differentiate (usually the first).
local forwardExecOrder = walkOutputRoots(answers[argnum])
if withGradients then
-- Walk the execution order backwards, chaining derivatives.
if answers[1].type == Value.TENSOR and opt.partialGrad then
intermediateGrads[answers[1].source] = values[#values]
elseif answers[1].type == Value.NUMBER then
intermediateGrads[answers[1].source] = Value.from(1, Source.constant(1))
else
error("invalid return value type from autograd function, autograd only supports scalar return values")
end
for i=#forwardExecOrder,1,-1 do
local node = forwardExecOrder[i]
node:evaluateBackward(mutationFlow, intermediateGrads, differentiableMap)
end
grads = collectGradients(values[argnum], intermediateGrads, differentiableMap)
end
return true
end
local ok, msg
if opt.protected then
ok, msg = pcall(protectedFn)
else
ok, msg = protectedFn()
end
-- End recording.
nodeDebugger = nil
reentryDepth = reentryDepth - 1
overload.uninstall()
if not ok then
error(msg)
end
local graph = {
mutationFlow = mutationFlow,
grads = grads,
params = values,
answers = answers,
intermediateGrads = intermediateGrads
}
setmetatable(graph, Graph)
return graph
end
function Graph:optimize()
local execOrder, outputNodes = self:walkExecutionOrder()
convertSubtract(execOrder, outputNodes)
removeIdentityOperators(execOrder, outputNodes)
convertOperators(execOrder)
pruneOutputs(execOrder, outputNodes)
end
return Graph
<|start_filename|>src/runtime/direct/DirectTape.lua<|end_filename|>
local isTensor = torch.isTensor
local overload = require 'autograd.overload'
local DirectNode = require 'autograd.runtime.direct.DirectNode'
local DirectTape = { }
local assignmentMap = { }
local reverseAssignmentMap = { }
local currentTape = { }
local currentProfiler = nil
-- A wrapper for a function
-- Anytime we try to apply a function to some arguments,
-- we'd like to make sure that if we're passing nodes in,
-- that we unpack the value in those nodes, apply the function
-- to the underlying value, and then wrap the value in a node
local function nodeApply(fun, gradFun, ...)
local arg = {...}
local parent = nil
local values = { }
local ln = #arg
for k = 1, ln do
local v = arg[k]
if getmetatable(v) == DirectNode then
local alias = assignmentMap[v]
if alias ~= nil then
arg[k] = alias
parent = alias
values[#values + 1] = alias.value
else
parent = v
values[#values + 1] = v.value
end
elseif type(v) == "table" then
local tableValue = {}
for j,element in pairs(v) do
if getmetatable(element) == DirectNode then
parent = element
tableValue[j] = element.value
else
tableValue[j] = element
end
end
values[#values + 1] = tableValue
else
values[#values + 1] = v
end
end
if fun.capture and parent ~= nil then
if fun.unsupported then
error("function " .. fun.name .. " not currently supported by autograd")
end
local profileId = nil
local startTime = sys.clock()
local value = fun.fn(table.unpack(values))
if currentProfiler ~= nil then
local elapsedTime = sys.clock() - startTime
profileId = currentProfiler:mark(fun, 2)
currentProfiler:measureForward(profileId, elapsedTime)
end
local node = nil
local tape = currentTape
local o = tape[tape.nextIndex]
if o ~= nil then
o.value = value
o.fun = fun
o.gradFun = gradFun
o.args = arg
o.profileId = profileId
o.outgrad = nil
o.argValues = values
tape.nextIndex = tape.nextIndex + 1
if fun.name == "DirectNode.__internal_set" then
local reverse = reverseAssignmentMap[arg[1]]
if reverse ~= nil then
assignmentMap[reverse] = o
end
reverseAssignmentMap[o] = arg[1]
assignmentMap[arg[1]] = o
end
return o
end
local newNode = DirectNode:init(value, fun, gradFun, arg, values, tape)
newNode.profileId = profileId
if fun.name == "DirectNode.__internal_set" then
local reverse = reverseAssignmentMap[arg[1]]
if reverse ~= nil then
assignmentMap[reverse] = newNode
end
assignmentMap[arg[1]] = newNode
end
return newNode
else
return fun.fn(table.unpack(values))
end
end
function DirectTape.funOnly(fun, tape, argnum, ...)
local arg = {...}
local tape = tape or {}
tape.nextIndex = 1
-- If our target argument is a table, we'll need to walk its members and node-ify them.
-- For now, if we see a number or a tensor, we'll node-ify it, otherwise,
-- if it's a table, we'll try to walk it
currentTape = tape
arg[argnum] = DirectNode.newStartNode(arg[argnum], tape)
overload.install(nodeApply)
assignmentMap = { }
reverseAssignmentMap = { }
local allAns = {fun(table.unpack(arg))}
overload.uninstall()
local ans = allAns[1]
if not DirectNode.isNode(ans) then
error("A node type was not returned. This is either because a gradient was not defined, or the input is independent of the output")
end
-- Now spit out the grads, along with any answers returned along the way
local out = {}
local ansVal = DirectNode.getValue(allAns)
if type(allAns) == "table" then
for key,value in pairs(ansVal) do
out[#out+1] = DirectNode.getValue(value)
end
else
out[1] = ansVal
end
return arg, allAns, tape, table.unpack(out)
end
function DirectTape.gradOnly(tape, arg, argnum, allAns, gradOutput)
local ans = allAns[argnum]
ans.outgrad = gradOutput
for i=tape.nextIndex-1,1,-1 do
local node = tape[i]
local elapsedTime = 0
for iarg=#node.args,1,-1 do
local thisArg = node.args[iarg]
if getmetatable(thisArg) == DirectNode then
if node.outgrad == nil then
if isTensor(node.value) then
node.outgrad = node.value.new(node.value:size()):zero()
elseif type(node.value) == "number" then
node.outgrad = 0.0
end
end
local gf = (node.gradFun or {})[iarg]
if gf ~= nil then
local startTime = sys.clock()
local gradUpdate = (gf)(node.outgrad, node.value, table.unpack(node.argValues))
elapsedTime = elapsedTime + (sys.clock() - startTime)
if gradUpdate then
if thisArg.outgrad == nil or thisArg.outgrad == 0 then
thisArg.outgrad = gradUpdate
elseif torch.isTensor(thisArg.outgrad) then
thisArg.outgrad:add(gradUpdate)
else
thisArg.outgrad = thisArg.outgrad + gradUpdate
end
end
elseif node.fun.differentiable then
error("missing gradient for argument " .. tostring(iarg) .. " in function " .. node.fun.name)
end
-- Special-casing table-valued arguments that contain nodes
-- right now, this is just torch.cat
elseif type(thisArg) == "table" then
local hasNode = false
for k, v in pairs(thisArg) do
if getmetatable(v) == DirectNode then
hasNode = true
break
end
end
if hasNode then
if node.outgrad == nil then
if isTensor(node.value) then
node.outgrad = node.value.new(node.value:size()):zero()
elseif type(node.value) == "number" then
node.outgrad = 0.0
end
end
local startTime = sys.clock()
local gradUpdate = (node.gradFun[iarg])(node.outgrad, node.value, table.unpack(node.argValues))
elapsedTime = elapsedTime + (sys.clock() - startTime)
local la = #thisArg
for isubArg=1,la do
if gradUpdate[isubArg] then
local thisSubArg = thisArg[isubArg]
if getmetatable(thisSubArg) == DirectNode then
if thisSubArg.outgrad == nil or thisSubArg.outgrad == 0 then
thisSubArg.outgrad = gradUpdate[isubArg]
else
thisSubArg.outgrad = thisSubArg.outgrad + gradUpdate[isubArg]
end
end
end
end
end
end
end
if currentProfiler ~= nil and node.profileId ~= nil then
currentProfiler:measureBackward(node.profileId, elapsedTime)
end
end
-- Now spit out the grads
local out = DirectNode.getOutgrad(arg[argnum])
return out
end
local lastTape = { }
-- Step through the computation graph and find the gradient
function DirectTape.grad(fun, argnum, partialGrad, profiler, ...)
currentProfiler = profiler
local all = {DirectTape.funOnly(fun, lastTape, argnum, ...)}
local arg, allAns, tape, out = all[1], all[2], all[3], all[4]
local ans = allAns[1]
if partialGrad == nil and type(DirectNode.getValue(ans)) ~= "number" then
print("")
print("Autograd only supports scalar outputs. This is current functions output: ")
print(DirectNode.getValue(ans))
error("Autograd only supports scalar return values. Output is not scalar")
end
partialGrad = partialGrad or 1.0
local go = DirectTape.gradOnly(tape, arg, argnum, allAns, partialGrad)
local fout = {}
fout[1] = go
local ansVal = DirectNode.getValue(allAns)
if type(allAns) == "table" then
for key,value in pairs(ansVal) do
fout[#fout+1] = DirectNode.getValue(value)
end
else
fout[2] = ansVal
end
if currentProfiler ~= nil then
currentProfiler:markCycle()
end
currentProfiler = nil
return table.unpack(fout)
end
return DirectTape
<|start_filename|>src/overload.lua<|end_filename|>
local overloads = { }
local nodeApply = nil
local nnwrapper = require 'autograd.nnwrapper'
local toRegister = { }
local function module(name, table, fn)
toRegister[#toRegister + 1] = function()
local mm = {
name = name,
table = table,
functions = { },
classes = { }
}
local supported = { }
local overload = function(table, fnName, gradFn, capture, differentiable, unsupported)
local old = table[fnName]
if old ~= nil then
local fnDesc = {
name = name .. "." .. fnName,
differentiable = differentiable,
fn = old,
capture = capture,
unsupported = unsupported,
}
local newFn = function(...)
return nodeApply(fnDesc, gradFn, ...)
end
return {
name = fnName,
newFn = newFn,
oldFn = old
}
end
end
local overloadClass = function(table, className, fnName, gradFn, capture, differentiable, unsupported)
local old = table[fnName]
if old ~= nil then
local fnDesc = {
name = name .. "." .. className .. "." .. fnName,
differentiable = differentiable,
fn = old,
capture = capture,
unsupported = unsupported,
}
local newFn = function(...)
return nodeApply(fnDesc, gradFn, ...)
end
return {
name = fnName,
newFn = newFn,
oldFn = old
}
end
end
local overloadOp = function(table, opName, gradFn)
local fnName = "__" .. opName
local old = table[fnName]
if old ~= nil then
local fnDesc = {
name = "op." .. fnName,
operator = opName,
differentiable = true,
capture = true,
fn = old
}
local newFn
if opName == "unm" then
newFn = function(a)
return nodeApply(fnDesc, gradFn, a)
end
else
newFn = function(a, b)
return nodeApply(fnDesc, gradFn, a, b)
end
end
return {
name = fnName,
newFn = newFn,
oldFn = old
}
end
end
local moduleFns = {
gradient = function(fnName, gradFn)
local fn = overload(table, fnName, gradFn, true, true, false)
supported[fnName] = true
mm.functions[#mm.functions + 1] = fn
end,
dynamic = function(...)
local arg = {...}
for i = 1, #arg do
local fnName = arg[i]
local fn = overload(table, fnName, nil, true, true, false)
supported[fnName] = true
mm.functions[#mm.functions + 1] = fn
end
end,
initializer = function(...)
local arg = {...}
for i = 1, #arg do
local fnName = arg[i]
local fn = overload(table, fnName, nil, true, false, false)
supported[fnName] = true
mm.functions[#mm.functions + 1] = fn
end
end,
static = function(...)
local arg = {...}
for i = 1, #arg do
local fnName = arg[i]
local fn = overload(table, fnName, nil, false, false, false)
supported[fnName] = true
mm.functions[#mm.functions + 1] = fn
end
end,
unsupported = function(...)
local arg = {...}
for i = 1, #arg do
local fnName = arg[i]
local fn = overload(table, fnName, nil, true, false, true)
supported[fnName] = true
mm.functions[#mm.functions + 1] = fn
end
end,
ignore = function(...)
local arg = {...}
for i = 1, #arg do
local fnName = arg[i]
supported[fnName] = true
end
end,
operator = function(opName, gradFn)
local fn = overloadOp(table, opName, gradFn)
supported[opName] = true
mm.functions[#mm.functions + 1] = fn
end,
defaultUnsupported = function()
for k, v in pairs(table) do
if supported[k] == nil and type(v) == "function" and string.sub(k, 1, 2) ~= "__" then
local fn = overload(table, k, nil, true, false, true)
mm.functions[#mm.functions + 1] = fn
end
end
end,
class = function(className, fn)
local classTable = table[className]
local cc = {
name = className,
functions = { }
}
local supported = { }
local classFns = {
operator = function(opName, gradFn)
local fn = overloadOp(classTable, opName, gradFn)
supported[opName] = true
cc.functions[#cc.functions + 1] = fn
end,
gradient = function(fnName, gradFn)
local fn = overloadClass(classTable, className, fnName, gradFn, true, true, false)
supported[fnName] = true
cc.functions[#cc.functions + 1] = fn
end,
dynamic = function(...)
local arg = {...}
for i = 1, #arg do
local fnName = arg[i]
local fn = overloadClass(classTable, className, fnName, nil, true, true, false)
supported[fnName] = true
cc.functions[#cc.functions + 1] = fn
end
end,
initializer = function(...)
local arg = {...}
for i = 1, #arg do
local fnName = arg[i]
local fn = overloadClass(classTable, className, fnName, nil, true, false, false)
supported[fnName] = true
cc.functions[#cc.functions + 1] = fn
end
end,
static = function(...)
local arg = {...}
for i = 1, #arg do
local fnName = arg[i]
local fn = overloadClass(classTable, className, fnName, nil, false, false, false)
supported[fnName] = true
cc.functions[#cc.functions + 1] = fn
end
end,
unsupported = function(...)
local arg = {...}
for i = 1, #arg do
local fnName = arg[i]
local fn = overloadClass(classTable, className, fnName, nil, true, false, true)
supported[fnName] = true
cc.functions[#cc.functions + 1] = fn
end
end,
defaultUnsupported = function()
local mt = getmetatable(classTable)
for k, v in pairs(mt) do
if supported[k] == nil and type(v) == "function" and string.sub(k, 1, 2) ~= "__" then
local fn = overloadClass(classTable, className, k, nil, true, false, true)
cc.functions[#cc.functions + 1] = fn
end
end
end
}
fn(classFns)
mm.classes[#mm.classes + 1] = cc
end
}
fn(moduleFns)
overloads[#overloads + 1] = mm
end
end
local installDepth = 0
local function install(fn)
installDepth = installDepth + 1
if installDepth ~= 1 then
return
end
if #toRegister > 0 then
for i = 1, #toRegister do
toRegister[i]()
end
toRegister = { }
end
nnwrapper.setApplyFn(fn)
nodeApply = fn
for i = 1, #overloads do
local mm = overloads[i]
for k = 1, #mm.functions do
local fn = mm.functions[k]
mm.table[fn.name] = fn.newFn
end
for k = 1, #mm.classes do
local cc = mm.classes[k]
local mt = torch.getmetatable('torch.' .. cc.name)
for f = 1, #cc.functions do
local fn = cc.functions[f]
rawset(mt, fn.name, fn.newFn)
end
end
end
end
local function uninstall()
installDepth = installDepth - 1
if installDepth ~= 0 then
return
end
nnwrapper.setApplyFn(nil)
for i = 1, #overloads do
local mm = overloads[i]
for k = 1, #mm.functions do
local fn = mm.functions[k]
mm.table[fn.name] = fn.oldFn
end
for k = 1, #mm.classes do
local cc = mm.classes[k]
local mt = torch.getmetatable('torch.' .. cc.name)
for f = 1, #cc.functions do
local fn = cc.functions[f]
rawset(mt, fn.name, fn.oldFn)
end
end
end
end
-- Main functions:
local overload = {
install = install,
uninstall = uninstall,
module = module
}
-- Return package
return overload
<|start_filename|>src/module/init.lua<|end_filename|>
-- autograd native modules
local module = {
LayerNormalization = require 'autograd.module.LayerNormalization',
MaskedBatchNormalization = require 'autograd.module.MaskedBatchNormalization',
SoftAttention = require 'autograd.module.SoftAttention'
}
return module
<|start_filename|>examples/get-penn.lua<|end_filename|>
-- Dictionary
local word2id = {}
local id2word = {}
-- Load txt file:
local function loadDataset(path)
-- Parse words:
local data = io.open(path):read('*all')
data = data:gsub('\n','<eos>')
local tokens = stringx.split(data)
-- Build dictionary:
local id = 1
local ids = torch.FloatTensor(#tokens)
for i,token in ipairs(tokens) do
if not word2id[token] then
word2id[token] = id
id2word[id] = token
id = id + 1
end
ids[i] = word2id[token]
end
-- Final dataset:
return ids
end
-- Get/create dataset
local function setupData()
-- Fetch from Amazon
if not path.exists(sys.fpath()..'/penn') then
os.execute[[
curl https://s3.amazonaws.com/torch.data/penn.tgz -o penn.tgz
tar xvf penn.tgz
rm penn.tgz
]]
end
-- Each dataset is a 1D tensor of ids, the 4th arg
-- is the dictionary, with 2-way indexes
return
loadDataset(sys.fpath()..'/penn/train.txt'),
loadDataset(sys.fpath()..'/penn/valid.txt'),
loadDataset(sys.fpath()..'/penn/test.txt'),
{word2id = word2id, id2word = id2word}
end
return setupData
<|start_filename|>src/runtime/codegen/backend/lua/init.lua<|end_filename|>
local Value = require 'autograd.runtime.codegen.Value'
local Source = require 'autograd.runtime.codegen.Source'
local StringBuilder = require 'autograd.runtime.codegen.StringBuilder'
local Debugger = require 'autograd.runtime.codegen.Debugger'
local util = require 'autograd.util'
local reusableFunctionsMap = {
["torch.tanh"] = true,
["torch.cmul"] = true,
["torch.cdiv"] = true,
["torch.exp"] = true,
["torch.pow"] = true,
["torch.add"] = true,
["torch.mul"] = true,
["torch.div"] = true,
["torch.neg"] = true,
["torch.ger"] = true,
["torch.mm"] = true,
["torch.mv"] = true,
["torch.cosh"] = true,
["torch.cat"] = true,
["torch.log"] = true,
["torch.repeatTensor"] = true,
["util.sigmoidInPlace"] = true,
["util.narrowSliceCopyInPlace"] = true,
["util.selectSliceCopyInPlace"] = true,
["util.fillSameSizeAsInPlace"] = true,
["util.fillSameSizeAsInPlace"] = true,
["util.zerosLikeInPlace"] = true,
["util.setNotEqualInPlace"] = true,
["util.indexAddInPlace"] = true,
["util.newTensorLikeInPlace"] = true,
["util.fillInPlace"] = true,
["util.cloneInPlace"] = true,
["util.newInPlace"] = true,
["util.typeAsInPlace"] = true,
}
local reusableFunctionTransforms = {
["util.narrowSliceCopy"] = "util.narrowSliceCopyInPlace",
["util.selectSliceCopy"] = "util.selectSliceCopyInPlace",
["util.fillSameSizeAs"] = "util.fillSameSizeAsInPlace",
["util.zerosLike"] = "util.zerosLikeInPlace",
["util.setNotEqual"] = "util.setNotEqualInPlace",
["util.indexAdd"] = "util.indexAddInPlace",
["util.sigmoid"] = "util.sigmoidInPlace",
["util.newTensorLike"] = "util.newTensorLikeInPlace",
["util.fill"] = "util.fillInPlace",
["torch.clone"] = "util.cloneInPlace",
["torch.DoubleTensor.new"] = "util.newInPlace",
["torch.FloatTensor.new"] = "util.newInPlace",
["torch.CudaTensor.new"] = "util.newInPlace",
["torch.typeAs"] = "util.typeAsInPlace",
}
local function canReuseOutput(node)
return reusableFunctionsMap[node.forwardFn.name] ~= nil and #node.outputs == 1 and node.outputs[1].type == Value.TENSOR
end
local function canInline(node, state)
return #node.outputs == 1 and #node.outputTargets[1] == 1 and state.hazardNodes[node] == nil and state.debugger == nil and state.inline
end
local function writeLiteralTable(wtable, out, symbols, depth)
depth = depth or 1
out:writeln("{")
local keys = { }
local numeric = true
for k, v in pairs(wtable) do
if type(k) ~= 'number' then
numeric = false
end
keys[#keys + 1] = k
end
local si = #keys
local ei = 1
local di = -1
if numeric then
si = 1
ei = #keys
di = 1
end
for i = si, ei, di do
local k = keys[i]
local v = wtable[k]
out:write(string.rep(" ", depth * 4))
if type(k) == 'number' or tostring(tonumber(k)) == k then
out:write("[", tostring(k), "]")
else
out:write(tostring(k))
end
out:write(" = ")
if Value.isValue(v) then
out:write(v.source:symbolPath(symbols))
elseif type(v) == 'table' then
writeLiteralTable(v, out, symbols, depth + 1)
else
out:write(tostring(v))
end
out:write(",\n")
end
out:write(string.rep(" ", (depth-1) * 4), "}")
end
local writeExpr
local function buildInputExpr(state, input)
if input.source.type == Source.CONSTANT and type(input.source.val) == "table" then
-- Literal table.
-- TODO: Only supports arrays.
local elements = { }
for k, v in pairs(input.source.val) do
if Value.isValue(v) then
elements[#elements + 1] = buildInputExpr(state, v)
else
elements[#elements + 1] = tostring(v)
end
end
return "{" .. table.concat(elements, ", ") .. "}"
elseif input.source.type == Source.COMPUTED and canInline(input.source.node, state) then
local subExpr = writeExpr(state, input.source.node)
return "(" .. subExpr .. ")"
else
local symbol = input.source:symbolPath(state.symbols)
return symbol
end
end
writeExpr = function(state, node)
local out = StringBuilder.new()
local inputSymbols = { }
for k = 1, #node.inputs do
local input = node.inputs[k]
inputSymbols[k] = buildInputExpr(state, input)
end
if node.forwardFn.operator ~= nil then
local op = node.forwardFn.operator
if op == "unm" then
out:write("-", inputSymbols[1])
elseif op == "index" then
out:write(inputSymbols[1])
out:write("[")
out:write(inputSymbols[2])
out:write("]")
elseif op == "newindex" then
out:write(inputSymbols[1])
out:write("[")
out:write(inputSymbols[2])
out:write("]")
out:write(" = ")
out:write(inputSymbols[3])
else
out:write(inputSymbols[1])
out:write(" ")
if op == "add" then
out:write("+")
elseif op == "sub" then
out:write("-")
elseif op == "mul" then
out:write("*")
elseif op == "div" then
out:write("/")
elseif op == 'pow' then
out:write('^')
end
out:write(" ")
out:write(inputSymbols[2])
end
elseif node.forwardFn.object ~= nil then
out:write(state.objects[node.forwardFn.object].name, ".", node.forwardFn.method, "(", table.concat(inputSymbols, ", "), ")")
else
local fnName = node.forwardFn.name
if canReuseOutput(node) then
table.insert(inputSymbols, 1, node.outputs[1].source:symbolPath(state.symbols))
end
out:write(state.functionRemap[fnName], "(", table.concat(inputSymbols, ", "), ")")
end
return out:finish()
end
local function tensorSig(t)
return t:type() .. table.concat(t:size():totable(), ",")
end
local function storageSig(t)
return torch.type(t) .. table.concat(t:totable(), ",")
end
local function letterForType(val)
if val.type == Value.TENSOR then
return "t"
elseif val.type == Value.NUMBER then
return "n"
else
return "r"
end
end
local function searchMatchingTensorLargest(tensor, sortedTensors, locals)
local ttype = tensor:type()
for i = #sortedTensors, 1, -1 do
local idx = sortedTensors[i]
local lt = locals[idx]
if lt:type() == ttype then
return i
end
end
return 0
end
local function findParamSource(val)
if Value.isValue(val) then
local rootSource = val.source:getRoot()
if rootSource.type == Source.PARAM then
return rootSource
end
elseif type(val) == "table" then
for k, v in pairs(val) do
local paramSource = findParamSource(v)
if paramSource ~= nil then
return paramSource
end
end
end
end
local function collectParams(val, params, seen, depth)
params = params or { }
for k, v in pairs(val) do
local paramSource = findParamSource(v)
params[k] = paramSource or Source.param(k)
end
return params
end
local function flattenAnswer(val)
if Value.isValue(val) then
return val:flatten()
elseif type(val) == "table" then
local ft = { }
for k, v in pairs(val) do
ft[k] = flattenAnswer(v)
end
return ft
else
return val
end
end
local function storageSize(tensor)
local storage = tensor:storage()
return storage and storage:size() or 0
end
local function mapReusableTensorNodeSymbol(node, symbols, tensorPool, availableTensorMap, remainingOutputs, availableCount, index)
local output = node.outputs[1]
local tensor = output:get()
local sig = tensorSig(tensor)
local matchingList = availableTensorMap[sig]
local tensorIdx = nil
if matchingList ~= nil and #matchingList > 0 then
-- Map to tensor pool.
tensorIdx = table.remove(matchingList, #matchingList)
availableCount = availableCount - 1
else
if availableCount > 0 and index ~= nil then
-- There are tensors remaining, so keep track for possible later inexact allocation.
remainingOutputs[#remainingOutputs + 1] = index
else
-- No available tensors, so just go ahead and allocate a slot for this one now.
tensorIdx = #tensorPool + 1
tensorPool[tensorIdx] = tensor
end
end
if tensorIdx ~= nil then
symbols[output.source] = "rlocals[" .. tensorIdx .. "]"
end
return availableCount
end
local function createSymbolTable(graph, execOrder, aliases, params, tensorPool, tensorLocals, opt)
-- Assign symbols to params, inputs, outputs.
local symbols = { }
local undefined = { }
local constants = { }
for i = 1, #params do
symbols[params[i]] = "p" .. i
end
local constantStorageMap = { }
local constantTensorMap = { }
local tensorPoolViews = { }
local availableTensorMap = { }
local availableCount = 0
local tensorSigs = { }
for i = #tensorPool, 1, -1 do
local tensor = tensorPool[i]
local sig = tensorSig(tensor)
local list = availableTensorMap[sig]
if list == nil then
list = { }
availableTensorMap[sig] = list
end
list[#list + 1] = i
availableCount = availableCount + 1
end
local remainingOutputs = { }
local localCount = 0
local skip = { }
-- Guarantee a stable mapping for gradient output tensors.
if opt.stableGradients then
local grads = graph.grads
local gradsByParamPath = { }
for i = 1, #grads do
local node = grads[i].grad.source:getRoot().node
if canReuseOutput(node) then
local paramPath = grads[i].param.source:symbolPath(symbols)
gradsByParamPath[paramPath] = node
end
end
local flatParamGrads = util.sortedFlatten(gradsByParamPath, { }, true)
for i = 1, #flatParamGrads do
local gradNode = flatParamGrads[i]
availableCount = mapReusableTensorNodeSymbol(gradNode, symbols, tensorPool, availableTensorMap, remainingOutputs, availableCount)
skip[gradNode] = true
end
end
-- Exact matches first.
for i = 1, #execOrder do
local node = execOrder[i]
if aliases[node] ~= nil or skip[node] ~= nil then
elseif #node.outputs == 1 then
local output = node.outputs[1]
if node.outputs[1].type == Value.TENSOR and canReuseOutput(node) then
availableCount = mapReusableTensorNodeSymbol(node, symbols, tensorPool, availableTensorMap, remainingOutputs, availableCount, i)
else
-- Non-reusable local.
localCount = localCount + 1
tensorLocals[localCount] = 0
symbols[output.source] = "locals[" .. localCount .. "]"
end
-- else
-- -- One output, not a tensor.
-- undefined[output.source] = true
-- symbols[output.source] = letterForType(node.outputs[1]) .. i
-- end
else
-- More than one output.
-- TODO, currently uncached.
for k = 1, #node.outputs do
local output = node.outputs[k]
undefined[output.source] = true
symbols[node.outputs[k].source] = letterForType(node.outputs[k]) .. i .. "_" .. k
end
end
-- Find constant inputs.
for k = 1, #node.inputs do
local input = node.inputs[k]
local source = input.source:getRoot()
if source.type == Source.CONSTANT and symbols[source] == nil then
if torch.isTensor(source.val) then
local index = constantTensorMap[source.val]
if index == nil then
index = #constants + 1
constantTensorMap[source.val] = index
constants[index] = source
end
symbols[source] = "c" .. index
elseif torch.isStorage(source.val) then
local sig = storageSig(source.val)
if constantStorageMap[sig] then
symbols[source] = "c" .. constantStorageMap[sig]
else
index = #constants + 1
constants[index] = source
constantStorageMap[sig] = index
symbols[source] = "c" .. index
end
end
end
end
end
-- Did we fail to find a spot for any tensors? Try an inexact mapping that requires a view.
local availableTensors = { }
if availableCount > 0 then
-- Only bother sorting the two lists by size if we're actually going to use them.
local availableTensorSizes = { }
for k, v in pairs(availableTensorMap) do
for i = 1, #v do
local idx = v[i]
local size = storageSize(tensorPool[idx])
if size > 0 then
availableTensors[#availableTensors + 1] = idx
availableTensorSizes[idx] = size
end
end
end
local function sortLocalSize(a, b)
return availableTensorSizes[a] < availableTensorSizes[b]
end
table.sort(availableTensors, sortLocalSize)
local remainingOutputSizes = { }
for i = 1, #remainingOutputs do
local idx = remainingOutputs[i]
local output = execOrder[idx].outputs[1]
remainingOutputSizes[idx] = storageSize(output:get())
end
local function sortTensorSize(a, b)
return remainingOutputSizes[a] < remainingOutputSizes[b]
end
table.sort(remainingOutputs, sortTensorSize)
end
for i = #remainingOutputs, 1, -1 do
local output = execOrder[remainingOutputs[i]].outputs[1]
local outputTensor = output:get()
local matchingIndex = searchMatchingTensorLargest(outputTensor, availableTensors, tensorPool)
if matchingIndex > 0 then
local tensorIdx = availableTensors[matchingIndex]
local poolTensor = tensorPool[tensorIdx]
table.remove(availableTensors, matchingIndex) -- typically the last element
local poolStorage = poolTensor:storage()
if storageSize(outputTensor) > poolStorage:size() then
-- We don't care about the data in the pool tensor, so resize it to zero before growing to avoid a realloc/copy.
poolStorage:resize(0)
end
local viewIdx = #tensorPoolViews + 1
symbols[output.source] = "vlocals[" .. viewIdx .. "]"
tensorPoolViews[viewIdx] = outputTensor.new(poolStorage):resize(outputTensor:size())
else
-- No match anywhere, allocate new slot in the tensor pool.
local tensorIdx = #tensorPool + 1
tensorPool[tensorIdx] = outputTensor.new(outputTensor:size())
symbols[output.source] = "rlocals[" .. tensorIdx .. "]"
end
end
-- Map aliased outputs.
for node, aliasNode in pairs(aliases) do
if not symbols[aliasNode.outputs[1].source] then
error("invalid node alias")
end
symbols[node.outputs[1].source] = symbols[aliasNode.outputs[1].source]
end
for i = 1, #graph.mutationFlow.history do
local aliasOp = graph.mutationFlow.history[i]
symbols[aliasOp.to.source] = symbols[aliasOp.from.source]
end
return symbols, undefined, constants, tensorPoolViews
end
local function collectObjects(execOrder)
-- Find all the nn objects we need to create or pass in.
local objectMap = { }
local objectTable = { }
for i = 1, #execOrder do
local node = execOrder[i]
local obj = node.forwardFn.object
if obj ~= nil and objectMap[obj] == nil then
objectTable[#objectTable + 1] = obj
objectMap[obj] = {
object = obj,
index = objectTable[#objectTable],
name = "objects[" .. #objectTable .. "]"
}
end
end
return objectMap, objectTable
end
local function changeToReuseFunctions(execOrder)
for i = 1, #execOrder do
local node = execOrder[i]
local tfn = reusableFunctionTransforms[node.forwardFn.name]
if tfn ~= nil and #node.outputs == 1 and node.outputs[1].type == Value.TENSOR then
node.forwardFn.name = tfn
end
end
end
local function aliasFreeTensors(execOrder, aliases, hazardNodes)
local availableTensorMap = { }
local availableCount = 0
local refCounts = { }
local freeTensors = { }
for i = 1, #execOrder do
local node = execOrder[i]
if canReuseOutput(node) then
refCounts[node.outputs[1]] = #node.outputTargets[1]
if aliases[node] == nil and hazardNodes[node] == nil then
if availableCount > 0 then
local sig = tensorSig(node.outputs[1]:get())
local matchingList = availableTensorMap[sig]
if matchingList ~= nil and #matchingList > 0 then
local aliasInput = table.remove(matchingList, #matchingList)
local target = aliasInput.source:getRoot().node
if aliases[target] ~= nil then
aliases[node] = aliases[target]
else
aliases[node] = target
end
availableCount = availableCount - 1
end
end
end
end
for k = 1, #node.inputs do
local input = node.inputs[k]
if input.type == Value.TENSOR then
local refCount = refCounts[input]
if refCount ~= nil then
refCounts[input] = refCount - 1
if refCount == 1 then
local sig = tensorSig(input:get())
local list = availableTensorMap[sig]
if list == nil then
list = { }
availableTensorMap[sig] = list
end
list[#list + 1] = input
availableCount = availableCount + 1
end
end
end
end
end
end
local function addNodeTargets(node, hazardNodes)
local targetArray = node.outputTargets[1]
for k = 1, #targetArray do
local target = targetArray[k]
local targetNode = target.node
hazardNodes[targetNode] = true
end
end
local function generateCode(graph, opt)
local optimize = opt.optimize or true
local withForward = util.defaultBool(opt.withForward, true)
local withGradients = util.defaultBool(opt.withGradients, true)
local inline = util.defaultBool(opt.inline, true)
local tensorPool = opt.tensorPool or { }
local tensorLocals = opt.tensorLocals or { }
local debugger = opt.debugger
local execOrder, hazardNodes = graph:walkExecutionOrder(withForward, withGradients)
-- Don't allow any reordering or inlining of operations near assignment flow.
for i = 1, #graph.mutationFlow.history do
local aliasOp = graph.mutationFlow.history[i]
addNodeTargets(aliasOp.from.source.node, hazardNodes)
addNodeTargets(aliasOp.to.source.node, hazardNodes)
hazardNodes[aliasOp.from.source.node] = true
hazardNodes[aliasOp.to.source.node] = true
end
changeToReuseFunctions(execOrder)
local params = collectParams(graph.params)
local aliases = { }
if opt.reduceFootprint then
if opt.stableGradients then
aliasFreeTensors(execOrder, aliases, hazardNodes, graph.mutationFlow)
else
aliasFreeTensors(execOrder, aliases, { })
end
end
local symbols, undefined, constants, tensorPoolViews = createSymbolTable(graph, execOrder, aliases, params, tensorPool, tensorLocals, opt)
local objectMap, objectTable = collectObjects(execOrder)
-- Print out a dotfile of the computation graph if requested
if opt.dotFile then
if not debugger then
debugger = Debugger()
end
debugger.setMain(symbols, graph.grads, graph.answers)
debugger.generateDot(opt.dotFile)
end
local out = StringBuilder.new()
local outerArgNames = {"locals", "rlocals", "vlocals"}
local outerArgs = { tensorLocals, tensorPool, tensorPoolViews }
if debugger then
debugger.setMain(symbols, graph.grads, graph.answers)
outerArgNames[#outerArgNames + 1] = "debugger"
outerArgs[#outerArgs + 1] = debugger
end
if #objectTable > 0 then
outerArgNames[#outerArgNames + 1] = "objects"
outerArgs[#outerArgs + 1] = objectTable
end
local functionRemap = { }
for i = 1, #execOrder do
local node = execOrder[i]
if node.forwardFn.name == "Value.__internal_get" then
node.forwardFn = { operator = "index", name = "op.__index" }
end
if node.forwardFn.name == "Value.__internal_set" then
node.forwardFn = { operator = "newindex", name = "op.__newindex" }
end
if node.forwardFn.operator == nil and functionRemap[node.forwardFn.name] == nil then
functionRemap[node.forwardFn.name] = string.gsub(node.forwardFn.name, "%.", "_")
end
end
local state = {
symbols = symbols,
hazardNodes = hazardNodes,
functionRemap = functionRemap,
debugger = debugger,
inline = inline,
objects = objectMap
}
-- Generate code.
out:writeln("return function(", table.concat(outerArgNames, ", "), ")")
out:writeln("local nn = require('autograd').nn")
out:writeln("local util = require('autograd.util')")
for k, v in pairs(functionRemap) do
out:writeln("local ", v, " = ", k)
end
for i = 1, #constants do
out:writeln("local ", constants[i]:symbolPath(symbols), " = ", constants[i]:symbolPath({}))
end
local paramSymbols = { }
for i = 1, #params do
paramSymbols[i] = symbols[params[i]]
end
out:writeln("return function(", table.concat(paramSymbols, ", "), ")")
if debugger then
for i = 1, #graph.params do
debugger.generateInputCheck(graph.params[i], paramSymbols[i], out)
end
end
for i = 1, #execOrder do
local node = execOrder[i]
local outputSymbols = { }
for k = 1, #node.outputs do
outputSymbols[k] = symbols[node.outputs[k].source]
end
if not canInline(node, state) then
out:indent(1)
if (not canReuseOutput(node)) and (node.forwardFn.operator ~= "newindex") then
if #outputSymbols > 0 then
if undefined[node.outputs[1].source] then
out:write("local ")
end
out:write(table.concat(outputSymbols, ", "), " = ")
end
end
out:writeln(writeExpr(state, node))
if debugger then
for k = 1, #node.outputs do
debugger.generateOutputCheck(node, k, outputSymbols[k], out)
end
end
end
end
out:indent(1)
out:write("return ")
local grads = graph.grads
local answers = graph.answers
if withGradients then
if #grads == 1 and grads[1].grad.type == Value.TABLE then
-- This doesn't feel quite right, should be possible to unify this with the other path.
out:write(grads[1].grad.source:symbolPath(symbols))
elseif #grads == 1 and grads[1].grad.type == Value.TENSOR and grads[1].param.source.type == Source.PARAM then
out:write(grads[1].grad.source:symbolPath(symbols))
else
local retTable = { }
for i = 1, #grads do
local valTable = retTable
local stack = grads[i].param.source:getParentsArray()
local gradSymbol = grads[i].grad.source:symbolPath(symbols)
for k = 1, #stack do
local ss = stack[k]
if ss.type == Source.TABLE then
if valTable[ss.key] == nil then
if stack[k + 1] == nil then
valTable[ss.key] = gradSymbol
else
local nextTable = { }
valTable[ss.key] = nextTable
end
end
valTable = valTable[ss.key]
end
end
end
writeLiteralTable(retTable, out, symbols, 2)
end
end
if withForward then
if withGradients then
out:write(", ")
end
for i = 1, #answers do
if i ~= 1 then
out:write(", ")
end
local answer = answers[i]
if Value.isValue(answer) then
out:write(answers[i].source:symbolPath(symbols))
elseif type(answer) == "table" then
writeLiteralTable(answer, out, symbols, 2)
else
out:write(answer)
end
end
end
out:writeln()
out:writeln("end")
out:writeln("end")
local code = out:finish()
if debugger then
debugger.setCode(code)
end
local retValues = { }
if withGradients then
retValues = { Value.flattenGrads(graph.params[opt.argnum], graph.intermediateGrads) }
end
for i = 1, #graph.answers do
retValues[#retValues + 1] = flattenAnswer(graph.answers[i])
end
return code, outerArgs, retValues
end
local function generateFn(graph, opt)
local code, outerArgs, retValues = generateCode(graph, opt)
-- Optionally show the generated code. It's not the most readable,
-- but it's all there for your perusal
if opt.showCode then
print(code)
end
local outer, err = (loadstring or load)(code)
if outer == nil then
print(code)
if err ~= nil then
print(err)
end
error("failed to parse generated code.")
end
return outer()(table.unpack(outerArgs)), retValues, code
end
return {
generateFn = generateFn
}
<|start_filename|>benchmark/benchmark.lua<|end_filename|>
require 'trepl'
-- Options
local opt = lapp [[
Run benchmarks.
Options:
--type (default float) can be: double | float | cuda
--nodes (default false)
--profile (default false) requires profi to be installed (luarocks install profi)
--nooptimize (default false)
]]
-- benchmark of common models
local d = require 'autograd'
local nn = require 'nn'
local c = require 'trepl.colorize'
local haveProfi,profi = pcall(require,'ProFi')
d.optimize(opt.nooptimize == 'false')
-- tic/toc
local tic,toc
local tm = torch.Timer()
if opt.type == 'cuda' then
tic = function()
cutorch.synchronize()
tm:reset()
end
toc = function()
cutorch.synchronize()
return tm:time().real
end
else
tic = function()
tm:reset()
end
toc = function()
return tm:time().real
end
end
-- type
local tensor = torch.FloatTensor
local ttype = 'torch.FloatTensor'
if opt.type == 'cuda' then
require 'cunn'
tensor = torch.CudaTensor
ttype = 'torch.CudaTensor'
elseif opt.type == 'double' then
tensor = torch.DoubleTensor
ttype = 'torch.DoubleTensor'
end
local nodeTimes = { }
if opt.nodes ~= 'false' then
local preTime;
d.debugFns.preGradFn = function(node)
preTime = sys.clock()
end
d.debugFns.postGradFn = function(node)
if node.gradFun ~= nil then
local idx = 'grad ' .. node.gradFun[1]
nodeTimes[idx] = (nodeTimes[idx] or 0) + (sys.clock() - preTime)
end
end
local preTime;
d.debugFns.preFwdFn = function(fn)
return sys.clock()
end
d.debugFns.postFwdFn = function(fn, o)
local idx = 'forward ' .. fn
local tm = (sys.clock() - o)
nodeTimes['forward (inclusive)'] = (nodeTimes['forward (inclusive)'] or 0) + tm
nodeTimes[idx] = (nodeTimes[idx] or 0) + tm
end
end
-- Test 1: logistic regression
local tests = {
['logistic (ag)'] = function()
local tnn, tag
local x = tensor(1000,100):normal()
local y = tensor(1000):uniform(1.5,10.5):floor()
do
local model = nn.Sequential()
model:add(nn.Linear(100,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x[1])
local loss = lossf:forward(yhat, y[1])
local dloss_dyhat = lossf:backward(yhat, y[1])
model:backward(x[1], dloss_dyhat)
tic()
for k = 1,40 do
for i = 1,x:size(1) do
model:zeroGradParameters()
local yhat = model:forward(x[i])
local loss = lossf:forward(yhat, y[i])
local dloss_dyhat = lossf:backward(yhat, y[i])
model:backward(x[i], dloss_dyhat)
end
end
tnn = toc()
end
do
local f = function(params, x, y)
local wx = params.W * x + params.b
local yhat = d.util.logSoftMax(wx)
local loss = -torch.sum(torch.narrow(yhat,1,y,1))
return loss
end
local params = {
W = tensor(10, 100):normal(.01),
b = tensor(10):zero(),
}
-- force allocs
local df = d(f)
local grads = df(params, x[1], y[1])
tic()
for k = 1,40 do
for i = 1,x:size(1) do
local grads = df(params, x[i], y[i])
end
end
tag = toc()
end
return tnn, tag
end,
['logistic (nn)'] = function()
local tnn, tag
local x = tensor(1000,100):normal()
local y = tensor(1000):uniform(1.5,10.5):floor()
do
local model = nn.Sequential()
model:add(nn.Linear(100,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x[1])
local loss = lossf:forward(yhat, y[1])
local dloss_dyhat = lossf:backward(yhat, y[1])
model:backward(x[1], dloss_dyhat)
tic()
for k = 1,20 do
for i = 1,x:size(1) do
model:zeroGradParameters()
local yhat = model:forward(x[i])
local loss = lossf:forward(yhat, y[i])
local dloss_dyhat = lossf:backward(yhat, y[i])
model:backward(x[i], dloss_dyhat)
end
end
tnn = toc()
end
do
local lin
local params = {}
lin,params.lin = d.nn.Linear(100,10)
local lsm = d.nn.LogSoftMax()
local lossf = d.nn.ClassNLLCriterion()
params = d.util.cast(params, ttype)
local f = function(params, x, y)
local h = lin(params.lin, x)
local yhat = lsm(h)
local loss = lossf(yhat, y)
return loss
end
-- force allocs
local df = d(f)
local grads = df(params, x[1], y[1])
tic()
for k = 1,20 do
for i = 1,x:size(1) do
local grads = df(params, x[i], y[i])
end
end
tag = toc()
end
return tnn, tag
end,
['mlp (ag)'] = function()
local tnn, tag
local x = tensor(2000,100):normal()
local y = tensor(2000):uniform(1.5,10.5):floor()
do
local model = nn.Sequential()
model:add(nn.Linear(100,1000))
model:add(nn.Tanh())
model:add(nn.Linear(1000,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x[1])
local loss = lossf:forward(yhat, y[1])
local dloss_dyhat = lossf:backward(yhat, y[1])
model:backward(x[1], dloss_dyhat)
tic()
for k = 1,10 do
for i = 1,x:size(1) do
model:zeroGradParameters()
local yhat = model:forward(x[i])
local loss = lossf:forward(yhat, y[i])
local dloss_dyhat = lossf:backward(yhat, y[i])
model:backward(x[i], dloss_dyhat)
end
end
tnn = toc()
end
do
local f = function(params, x, y)
local h1 = torch.tanh( params.W1 * x + params.b1 )
local h2 = params.W2 * h1 + params.b2
local yhat = d.util.logSoftMax(h2)
local loss = -torch.sum(torch.narrow(yhat,1,y,1))
return loss
end
local params = {
W1 = tensor(1000, 100):normal(.01),
b1 = tensor(1000):zero(),
W2 = tensor(10, 1000):normal(.01),
b2 = tensor(10):zero(),
}
-- force allocs
local df = d(f)
local grads = df(params, x[1], y[1])
tic()
for k = 1,10 do
for i = 1,x:size(1) do
local grads = df(params, x[i], y[i])
end
end
tag = toc()
end
return tnn, tag
end,
['mlp (nn+ag)'] = function()
local tnn, tag
local x = tensor(2000,100):normal()
local y = tensor(2000):uniform(1.5,10.5):floor()
do
local model = nn.Sequential()
model:add(nn.Linear(100,1000))
model:add(nn.Tanh())
model:add(nn.Linear(1000,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x[1])
local loss = lossf:forward(yhat, y[1])
local dloss_dyhat = lossf:backward(yhat, y[1])
model:backward(x[1], dloss_dyhat)
tic()
for k = 1,10 do
for i = 1,x:size(1) do
model:zeroGradParameters()
local yhat = model:forward(x[i])
local loss = lossf:forward(yhat, y[i])
local dloss_dyhat = lossf:backward(yhat, y[i])
model:backward(x[i], dloss_dyhat)
end
end
tnn = toc()
end
do
local lin1,tanh,lin2,lsm
local params = {}
lin1,params.lin1 = d.nn.Linear(100,1000)
tanh = d.nn.Tanh()
lin2,params.lin2 = d.nn.Linear(1000,10)
lsm = d.nn.LogSoftMax()
params = d.util.cast(params, ttype)
local f = function(params, x, y)
local h1 = tanh( lin1(params.lin1, x) )
local h2 = lin2(params.lin2, h1)
local yhat = lsm(h2)
local loss = -torch.sum(torch.narrow(yhat, 1, y, 1))
return loss
end
-- force allocs
local df = d(f)
local grads = df(params, x[1], y[1])
tic()
for k = 1,10 do
for i = 1,x:size(1) do
local grads = df(params, x[i], y[i])
end
end
tag = toc()
end
return tnn, tag
end,
['mlp (nn)'] = function()
local tnn, tag
local x = tensor(2000,100):normal()
local y = tensor(2000):uniform(1.5,10.5):floor()
do
local model = nn.Sequential()
model:add(nn.Linear(100,1000))
model:add(nn.Tanh())
model:add(nn.Linear(1000,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x[1])
local loss = lossf:forward(yhat, y[1])
local dloss_dyhat = lossf:backward(yhat, y[1])
model:backward(x[1], dloss_dyhat)
tic()
for k = 1,10 do
for i = 1,x:size(1) do
model:zeroGradParameters()
local yhat = model:forward(x[i])
local loss = lossf:forward(yhat, y[i])
local dloss_dyhat = lossf:backward(yhat, y[i])
model:backward(x[i], dloss_dyhat)
end
end
tnn = toc()
end
do
local lin1, tanh, lin2, lsm, lossf
local params = {}
lin1,params.lin1 = d.nn.Linear(100,1000)
tanh = d.nn.Tanh()
lin2,params.lin2 = d.nn.Linear(1000,10)
lsm = d.nn.LogSoftMax()
lossf = d.nn.ClassNLLCriterion()
params = d.util.cast(params, ttype)
local f = function(params, x, y)
local h1 = tanh( lin1(params.lin1, x) )
local h2 = lin2(params.lin2, h1)
local yhat = lsm(h2)
local loss = lossf(yhat, y)
return loss
end
-- force allocs
local df = d(f)
local grads = df(params, x[1], y[1])
tic()
for k = 1,10 do
for i = 1,x:size(1) do
local grads = df(params, x[i], y[i])
end
end
tag = toc()
end
return tnn, tag
end,
['mlp (autoModule, batched)'] = function()
local tnn, tag
local inputSize, outputSize = 100,1000
local x = tensor(32,inputSize):uniform(-5,5)
local uniformMultiplier = torch.expand( tensor(inputSize):uniform():resize(1, inputSize), 32, inputSize)
local y = x:clone():exp():cmul(uniformMultiplier)
do
local model = nn.Sequential()
local linear1 = nn.Linear(inputSize, outputSize)
local linear2 = nn.Linear(outputSize, inputSize)
model:add( linear1 )
model:add( nn.ReLU() )
model:add( linear2 )
model:type(ttype)
local lossf = nn.MSECriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
tic()
for i = 1,200 do
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
end
tnn = toc()
end
do
local linear = function(input, weight, bias)
local y = input * weight + torch.expand(bias, torch.size(input, 1), torch.size(bias, 2))
return y
end
local linearReLU = function(input, weight, bias)
local y = input * weight + torch.expand(bias, torch.size(input, 1), torch.size(bias, 2))
local output = torch.mul( torch.abs( y ) + y, 0.5)
return output
end
local mse = function(input, target)
local buffer = input-target
return torch.sum( torch.cmul(buffer, buffer) ) / (torch.nDimension(input) == 2 and torch.size(input, 1) * torch.size(input, 2) or torch.size(input, 1))
end
local autoModel = nn.Sequential()
local autoLinear1ReLU = d.nn.AutoModule('AutoLinearReLU')(linearReLU, tensor(inputSize, outputSize), tensor(1,outputSize))
local autoLinear2 = d.nn.AutoModule('AutoLinear')(linear, tensor(outputSize, inputSize), tensor(1,inputSize))
autoModel:add( autoLinear1ReLU )
autoModel:add( autoLinear2 )
local lossf = d.nn.AutoCriterion('AutoMSE')(mse)
-- force allocs
autoModel:zeroGradParameters()
local yhat = autoModel:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
autoModel:backward(x, dloss_dyhat)
tic()
for i = 1,200 do
autoModel:zeroGradParameters()
local yhat = autoModel:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
autoModel:backward(x, dloss_dyhat)
end
tag = toc()
end
return tnn, tag
end,
['mlp (ag, batched)'] = function()
local tnn, tag
local x = tensor(32,100):normal()
local y = tensor(32):uniform(1.5,10.5):floor()
local yOneHot = d.util.oneHot(y,10)
do
local model = nn.Sequential()
model:add(nn.Linear(100,1000))
model:add(nn.Tanh())
model:add(nn.Linear(1000,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
tic()
for i = 1,2000 do
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
end
tnn = toc()
end
do
local f = function(params, x, y)
local N = torch.size(x, 1)
local h1 = torch.tanh( x * params.W1 + torch.expand(params.b1, N,1000) )
local h2 = h1 * params.W2 + torch.expand(params.b2, N,10)
local loss, yhat = d.loss.crossEntropy(h2, y)
return loss
end
local params = {
W1 = tensor(1000, 100):normal(.01):t(),
b1 = tensor(1, 1000):zero(),
W2 = tensor(10, 1000):normal(.01):t(),
b2 = tensor(1, 10):zero(),
}
-- force allocs
local df = d(f)
local grads = df(params, x, yOneHot)
tic()
for i = 1,2000 do
local grads = df(params, x, yOneHot)
end
tag = toc()
end
return tnn, tag
end,
['mlp (nn, batched)'] = function()
local tnn, tag
local x = tensor(32,1000):normal()
local y = tensor(32):uniform(1.5,10.5):floor()
do
local model = nn.Sequential()
model:add(nn.Linear(1000,1000))
model:add(nn.Tanh())
model:add(nn.Linear(1000,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
tic()
for i = 1,200 do
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
end
tnn = toc()
end
do
local lin1,tanh,lin2,lsm,lossf
local params = {}
lin1,params.lin1 = d.nn.Linear(1000,1000)
tanh = d.nn.Tanh()
lin2,params.lin2 = d.nn.Linear(1000,10)
lsm = d.nn.LogSoftMax()
lossf = d.nn.ClassNLLCriterion()
params = d.util.cast(params, ttype)
local f = function(params, x, y)
local h1 = tanh( lin1(params.lin1, x) )
local h2 = lin2(params.lin2, h1)
local yhat = lsm(h2)
local loss = lossf(yhat, y)
return loss
end
-- force allocs
local df = d(f)
local grads = df(params, x, y)
tic()
for i = 1,200 do
local grads = df(params, x, y)
end
tag = toc()
end
return tnn, tag
end,
['cnn (nn, batched)'] = function()
local tnn, tag
local x = tensor(32,3,64,64):normal()
local y = tensor(32):uniform(1.5,10.5):floor()
do
local model = nn.Sequential()
model:add(nn.SpatialConvolutionMM(3,16,5,5))
model:add(nn.Tanh())
model:add(nn.SpatialMaxPooling(2,2,2,2))
model:add(nn.SpatialConvolutionMM(16,32,5,5))
model:add(nn.Tanh())
model:add(nn.SpatialMaxPooling(2,2,2,2))
model:add(nn.Reshape(13*13*32))
model:add(nn.Linear(13*13*32,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
tic()
for i = 1,10 do
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
end
tnn = toc()
end
do
local c1,t1,m1,c2,t2,m2,r,l3,lsm,lossf
local params = {}
c1,params.c1 = d.nn.SpatialConvolutionMM(3,16,5,5)
t1 = d.nn.Tanh()
m1 = d.nn.SpatialMaxPooling(2,2,2,2)
c2,params.c2 = d.nn.SpatialConvolutionMM(16,32,5,5)
t2 = d.nn.Tanh()
m2 = d.nn.SpatialMaxPooling(2,2,2,2)
r = d.nn.Reshape(13*13*32)
l3,params.l3 = d.nn.Linear(13*13*32,10)
lsm = d.nn.LogSoftMax()
lossf = d.nn.ClassNLLCriterion()
params = d.util.cast(params, ttype)
local f = function(params, x, y)
local h1 = m1( t1( c1(params.c1, x) ) )
local h2 = m2( t2( c2(params.c2, h1) ) )
local h3 = l3(params.l3, r(h2))
local yhat = lsm(h3)
local loss = lossf(yhat, y)
return loss
end
-- local params = {
-- W1 = tensor(16, 3*5*5):normal(.01),
-- b1 = tensor(16):zero(),
-- W2 = tensor(32, 16*5*5):normal(.01),
-- b2 = tensor(32):zero(),
-- W3 = tensor(10, 32*13*13):normal(.01),
-- b3 = tensor(10):zero(),
-- }
-- force allocs
local df = d(f)
local grads = df(params, x, y)
tic()
for i = 1,10 do
local grads = df(params, x, y)
end
tag = toc()
end
return tnn, tag
end,
['cnn (nnWrap, batched)'] = function()
local tnn, tag
local x = tensor(32,3,64,64):normal()
local y = tensor(32):uniform(1.5,10.5):floor()
local model = nn.Sequential()
model:add(nn.SpatialConvolutionMM(3,16,5,5))
model:add(nn.Tanh())
model:add(nn.SpatialMaxPooling(2,2,2,2))
model:add(nn.SpatialConvolutionMM(16,32,5,5))
model:add(nn.Tanh())
model:add(nn.SpatialMaxPooling(2,2,2,2))
model:add(nn.Reshape(13*13*32))
model:add(nn.Linear(13*13*32,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
do
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
tic()
for i = 1,10 do
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
end
tnn = toc()
end
do
local modelf,params = d.functionalize(model)
local lossf = d.nn.ClassNLLCriterion()
local f = function(params, x, y)
local yhat = modelf(params, x)
local loss = lossf(yhat, y)
return loss
end
-- force allocs
local df = d(f)
local grads = df(params, x, y)
tic()
for i = 1,10 do
local grads = df(params, x, y)
end
tag = toc()
end
return tnn, tag
end,
['lstm (ag+nn)'] = function()
-- Depends on CXNN reference implementation
local ok,cxnn = pcall(require, 'cxnn')
if not ok then
return
end
-- Data
local tnn, tag
local x = tensor(1,33,100):normal()
local y = tensor(1):uniform(1.5,10.5):floor()
do
local model = nn.Sequential()
model:add(cxnn.RecurrentLSTMNetwork({
inputSize = 100,
hiddenFeatures = {200},
outputType = 'last',
}))
model:add(nn.Linear(200,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
tic()
for i = 1,200 do
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
end
tnn = toc()
end
do
local lstm1
local params = {}
lstm1,params.lstm1 = d.model.RecurrentLSTMNetwork({
inputFeatures = 100,
hiddenFeatures = 200,
outputType = 'last',
})
local lin2,lsm,lossf
lin2,params.lin2 = d.nn.Linear(200,10)
lsm = d.nn.LogSoftMax()
lossf = d.nn.ClassNLLCriterion()
local f = function(params, x, y)
local h1 = lstm1(params.lstm1, x)
local h2 = lin2(params.lin2, h1)
local yhat = lsm(h2)
local loss = lossf(yhat, y)
return loss
end
params = d.util.cast(params, ttype)
-- force allocs
local df = d(f)
local grads = df(params, x, y)
tic()
for i = 1,200 do
local grads = df(params, x, y)
end
tag = toc()
end
return tnn, tag
end,
['lstm (ag+nn, batched)'] = function()
-- Depends on CXNN reference implementation
local ok,cxnn = pcall(require, 'cxnn')
if not ok then
return
end
-- Data
local tnn, tag
local x = tensor(32,33,100):normal()
local y = tensor(32):uniform(1.5,10.5):floor()
do
local model = nn.Sequential()
model:add(cxnn.RecurrentLSTMNetwork({
inputSize = 100,
hiddenFeatures = {200},
outputType = 'last',
}))
model:add(nn.Linear(200,10))
model:add(nn.LogSoftMax())
model:type(ttype)
local lossf = nn.ClassNLLCriterion()
lossf:type(ttype)
-- force allocs
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
tic()
for i = 1,30 do
model:zeroGradParameters()
local yhat = model:forward(x)
local loss = lossf:forward(yhat, y)
local dloss_dyhat = lossf:backward(yhat, y)
model:backward(x, dloss_dyhat)
end
tnn = toc()
end
do
local lstm1
local params = {}
lstm1,params.lstm1 = d.model.RecurrentLSTMNetwork({
inputFeatures = 100,
hiddenFeatures = 200,
outputType = 'last',
})
local lin2, lsm, lossf
lin2,params.lin2 = d.nn.Linear(200,10)
lsm = d.nn.LogSoftMax()
lossf = d.nn.ClassNLLCriterion()
params = d.util.cast(params, ttype)
local f = function(params, x, y)
local h1 = lstm1(params.lstm1, x)
local h2 = lin2(params.lin2, h1)
local yhat = lsm(h2)
local loss = lossf(yhat, y)
return loss
end
for i in ipairs(params) do
for k in pairs(params[i]) do
params[i][k] = params[i][k]:type(ttype):normal()
end
end
-- force allocs
local df = d(f)
local grads = df(params, x, y)
tic()
for i = 1,30 do
local grads = df(params, x, y)
end
tag = toc()
end
return tnn, tag
end
}
local fmt = function(nb,color)
local nb = stringx.rjust(string.format('%.2f',nb), 5)
return c[color](nb)
end
function keysSortedByValue(tbl, sortFunction)
local keys = {}
for key in pairs(tbl) do
table.insert(keys, key)
end
table.sort(keys, function(a, b)
return tbl[a] > tbl[b]
end)
return keys
end
print('Benchmarks:')
for name,test in pairs(tests) do
nodeTimes = { }
if opt.profile ~= 'false' and haveProfi then
profi:start()
end
local tnn,tag = test()
tnn = tnn or 1/0
tag = tag or 1/0
if tnn ~= 1/0 then
if opt.profile ~= 'false' and haveProfi then
profi:stop()
profi:writeReport(string.format("%s.profile.txt",name))
profi:reset()
end
print(c.blue(stringx.rjust(''..name..' => ', 32))
.. ' nn: ' .. fmt(tnn,'yellow') .. 's, autograd: ' .. fmt(tag,'red') .. 's, ratio: ' .. fmt(tag/tnn,'green') .. 'x')
if opt.nodes ~= 'false' then
local sortedKeys = keysSortedByValue(nodeTimes)
for i, v in pairs(sortedKeys) do
print(stringx.rjust(v, 41) .. ': ' .. fmt(nodeTimes[v],'red') .. 's')
end
print('')
end
end
end
<|start_filename|>examples/train-penn-rnn.lua<|end_filename|>
-- Options
local opt = lapp [[
Train an LSTM to fit the Penn Treebank dataset.
Options:
--nEpochs (default 20) nb of epochs
--bpropLength (default 20) max backprop steps
--batchSize (default 20) batch size
--wordDim (default 200) word vector dimensionality
--hiddens (default 200) nb of hidden units
--capEpoch (default -1) cap epoch to given number of steps (for debugging)
--reportEvery (default 200) report training accuracy every N steps
--learningRate (default 20) learning rate
--maxGradNorm (default .25) cap gradient norm
--paramRange (default .1) initial parameter range
--dropout (default 0) dropout probability on hidden states
--type (default float) tensor type: cuda | float | double
--model (default LSTM) recursive model: LSTM | GRU | FW
]]
-- CUDA?
if opt.type == 'cuda' then
require 'cutorch'
require 'cunn'
cutorch.manualSeed(1)
end
-- Libs
local d = require 'autograd'
local util = require 'autograd.util'
local model = require 'autograd.model'
d.optimize(true)
-- Seed
torch.manualSeed(1)
-- Load in PENN Treebank dataset
local trainData, valData, testData, dict = require('./get-penn.lua')()
local nTokens = #dict.id2word
-- Move data to CUDA
if opt.type == 'cuda' then
trainData = trainData:cuda()
testData = testData:cuda()
valData = valData:cuda()
elseif opt.type == 'double' then
trainData = trainData:double()
testData = testData:double()
valData = valData:double()
end
print('Loaded datasets: ', {
train = trainData,
validation = valData,
test = testData,
nTokens = nTokens,
})
-- Define LSTM layers:
local lstm1,params = model['Recurrent'..opt.model..'Network']({
inputFeatures = opt.wordDim,
hiddenFeatures = opt.hiddens,
outputType = 'all',
})
local lstm2 = model['Recurrent'..opt.model..'Network']({
inputFeatures = opt.hiddens,
hiddenFeatures = opt.hiddens,
outputType = 'all',
}, params)
-- Dropout
local regularize = util.dropout
-- Shortcuts
local nElements = opt.batchSize*opt.bpropLength
local nClasses = #dict.id2word
-- Use built-in nn modules:
local lsm = d.nn.LogSoftMax()
local lossf = d.nn.ClassNLLCriterion()
-- Complete trainable function:
local f = function(params, x, y, prevState, dropout)
-- N elements:
local batchSize = torch.size(x, 1)
local bpropLength = torch.size(x, 2)
local nElements = batchSize * bpropLength
-- Select word vectors
x = util.lookup(params.words.W, x)
-- Encode all inputs through LSTM layers:
local h1,newState1 = lstm1(params[1], regularize(x,dropout), prevState[1])
local h2,newState2 = lstm2(params[2], regularize(h1,dropout), prevState[2])
-- Flatten batch + temporal
local h2f = torch.view(h2, nElements, opt.hiddens)
local yf = torch.view(y, nElements)
-- Linear classifier:
local h3 = regularize(h2f,dropout) * params[3].W + torch.expand(params[3].b, nElements, nClasses)
-- Lsm
local yhat = lsm(h3)
-- Loss:
local loss = lossf(yhat, yf)
-- Return avergage loss
return loss, {newState1, newState2}
end
-- Linear classifier params:
table.insert(params, {
W = torch.Tensor(opt.hiddens, #dict.id2word),
b = torch.Tensor(1, #dict.id2word),
})
-- Init weights + cast:
for i,weights in ipairs(params) do
for k,weight in pairs(weights) do
if opt.type == 'cuda' then
weights[k] = weights[k]:cuda()
elseif opt.type == 'double' then
weights[k] = weights[k]:double()
else
weights[k] = weights[k]:float()
end
weights[k]:uniform(-opt.paramRange, opt.paramRange)
end
end
-- Word dictionary to train:
local words
if opt.type == 'cuda' then
words = torch.CudaTensor(nTokens, opt.wordDim)
elseif opt.type == 'double' then
words = torch.DoubleTensor(nTokens, opt.wordDim)
else
words = torch.FloatTensor(nTokens, opt.wordDim)
end
words:uniform(-opt.paramRange, opt.paramRange)
params.words = {W = words}
-- Reformat training data for batches:
local epochLength = math.floor(trainData:size(1) / opt.batchSize)
trainData = trainData:narrow(1,1,epochLength*opt.batchSize):view(opt.batchSize, epochLength)
-- Reformat val for batches:
local valLength = math.floor(valData:size(1) / opt.batchSize)
valData = valData:narrow(1,1,valLength*opt.batchSize):view(opt.batchSize, valLength)
-- Reformat test, no batches (because we want the full perplexity):
testData = testData:view(1, testData:size(1))
-- Optional cap:
if tonumber(opt.capEpoch) > 0 then
epochLength = opt.capEpoch
end
-- Train it
local lr = opt.learningRate
local reportEvery = opt.reportEvery
local valPerplexity = math.huge
local df = d(f, { optimize = true })
for epoch = 1,opt.nEpochs do
-- Train:
print('\nTraining Epoch #'..epoch)
local aloss = 0
local maxGrad = 0
local lstmState = {} -- clear LSTM state at each new epoch
local grads,loss
for i = 1,epochLength-opt.bpropLength,opt.bpropLength do
xlua.progress(i,epochLength)
-- Next sequence:
local x = trainData:narrow(2,i,opt.bpropLength):contiguous()
local y = trainData:narrow(2,i+1,opt.bpropLength):contiguous()
-- Grads:
grads,loss,lstmState = df(params, x, y, lstmState, opt.dropout)
-- Cap gradient norms:
local norm = 0
for i,grad in ipairs(util.sortedFlatten(grads)) do
norm = norm + torch.sum(torch.pow(grad,2))
end
norm = math.sqrt(norm)
if norm > opt.maxGradNorm then
for i,grad in ipairs(util.sortedFlatten(grads)) do
grad:mul( opt.maxGradNorm / norm )
end
end
-- Update params:
for k,vs in pairs(grads) do
for kk,v in pairs(vs) do
params[k][kk]:add(-lr, grads[k][kk])
end
end
-- Loss: exponentiate nll gives perplexity
aloss = aloss + loss
if ((i-1)/opt.bpropLength+1) % reportEvery == 0 then
aloss = aloss / reportEvery
local perplexity = math.exp(aloss)
print('\nAverage training perplexity = ' .. perplexity)
aloss = 0
end
end
-- Validate:
print('\n\nValidation #'..epoch..'...')
local aloss = 0
local steps = 0
local lstmState = {}
local loss
for i = 1,valData:size(2)-opt.bpropLength,opt.bpropLength do
-- Next sequence:
local x = valData:narrow(2,i,opt.bpropLength):contiguous()
local y = valData:narrow(2,i+1,opt.bpropLength):contiguous()
-- Estimate loss:
loss,lstmState = f(params, x, y, lstmState)
-- Loss: exponentiate nll gives perplexity
aloss = aloss + loss
steps = steps + 1
end
aloss = aloss / steps
local newValPerplexity = math.exp(aloss)
print('Validation perplexity = ' .. newValPerplexity)
-- Learning rate scheme:
if newValPerplexity > valPerplexity or (valPerplexity - newValPerplexity)/valPerplexity < .10 then
-- No progress made, decrease learning rate
lr = lr / 2
print('Validation perplexity stagnating, decreasing learning rate to: ' .. lr)
end
valPerplexity = newValPerplexity
-- Test:
print('\nTest set [just indicative, not used for training]...')
local aloss = 0
local steps = 0
local lstmState = {}
local loss
for i = 1,testData:size(2)-opt.bpropLength,opt.bpropLength do
-- Next sequence:
local x = testData:narrow(2,i,opt.bpropLength):contiguous()
local y = testData:narrow(2,i+1,opt.bpropLength):contiguous()
-- Estimate loss:
loss,lstmState = f(params, x, y, lstmState)
-- Loss: exponentiate nll gives perplexity
aloss = aloss + loss
steps = steps + 1
end
aloss = aloss / steps
local perplexity = math.exp(aloss)
print('Test set perplexity = ' .. perplexity)
end
<|start_filename|>src/runtime/codegen/init.lua<|end_filename|>
local Debugger = require 'autograd.runtime.codegen.Debugger'
local Graph = require 'autograd.runtime.codegen.Graph'
local Value = require 'autograd.runtime.codegen.Value'
local LuaBackend = require 'autograd.runtime.codegen.backend.lua'
local function buildSignature(params, tensorDims)
for k, v in pairs(params) do
if torch.isTensor(v) then
tensorDims[#tensorDims + 1] = table.concat(v:size():totable(), "x")
elseif type(v) == 'number' then
tensorDims[#tensorDims + 1] = "n"
elseif type(v) == 'table' then
tensorDims[#tensorDims + 1] = "t" .. #v
buildSignature(v, tensorDims)
end
end
end
local function execUncached(fn, args, opt, nestedGradient)
local graph = Graph.record(fn, args, opt)
local retValues = { Value.collectGrads(graph.params[opt.argnum], graph.intermediateGrads) }
for i = 1, #graph.answers do
retValues[#retValues + 1] = graph.answers[i]
end
if not nestedGradient then
retValues = Value.flatten(retValues)
end
return table.unpack(retValues)
end
local function printPoolStats(tensorPool)
local size = 0
for i = 1, #tensorPool do
local tensor = tensorPool[i]
size = size + tensor:storage():size() * 4
end
print("tensor pool size: " .. (size / (1024 * 1024)) .. " MB")
end
local function generateFn(fn, args, opt)
if opt.debugHook then
opt.debugger = Debugger(opt)
end
local graph = Graph.record(fn, args, opt)
graph:optimize()
return LuaBackend.generateFn(graph, opt)
end
local function copyStableTensors(retValues, stableGrads)
for k, rv in pairs(retValues) do
local sv = stableGrads[k]
if sv == nil then
sv = rv:clone()
stableGrads[k] = sv
end
if type(rv) ~= type(sv) then
error("mismatched types in stable tensor copy")
end
if torch.isTensor(rv) and rv ~= sv then
if not torch.isSameSizeAs(rv, sv) then
print("resizing stable grad " .. table.concat(sv:size():totable(), "x") .. " -> " .. table.concat(rv:size():totable(), "x"))
sv:resize(rv:size())
end
sv:copy(rv)
retValues[k] = sv
elseif type(sv) == "table" then
copyStableTensors(rv, sv)
end
end
end
local function pctStr(n, tot)
return tostring(math.floor((n / tot) * 100.0)) .. "%"
end
local function padMin(s, min)
if #s < min then
return s .. string.rep(" ", min - #s)
end
return s
end
local function printProfile(stats)
print(" ")
--print(string.format("[autograd] calls: %i", stats.calls))
print(string.format("[autograd] code cache hit rate: %i%%", math.floor((stats.cacheHits / stats.calls) * 100.0)))
print(string.format("[autograd] generated code paths: %i", stats.cacheMisses))
local averageCodegen = (stats.codegenTime / stats.cacheMisses) * 1000.0
local averageExec = (stats.executionTime / stats.cacheHits) * 1000.0
-- codegen always executes the code once
local totalCodegen = stats.codegenTime - ((averageExec * stats.cacheMisses) / 1000.0)
local totalAll = stats.codegenTime + stats.executionTime + stats.externalTime
print(string.format("[autograd] code gen time: average=%.2fms total=%.2fs pct=%s", averageCodegen - averageExec, totalCodegen, pctStr(totalCodegen, totalAll)))
print(string.format("[autograd] exec time: average=%.2fms total=%.2fs pct=%s", averageExec, stats.executionTime, pctStr(stats.executionTime, totalAll)))
print(string.format("[autograd] external time: average=%.2fms total=%.2fs pct=%s", (stats.externalTime / stats.calls) * 1000.0, stats.externalTime, pctStr(stats.externalTime, totalAll)))
print(" ")
end
local function create(fn, opt)
local generatedFunctions = { }
opt.tensorPool = { }
opt.tensorLocals = { }
local stableGradTensors = nil
local stats = {
cacheHits = 0,
cacheMisses = 0,
calls = 0,
externalTime = 0,
codegenTime = 0,
executionTime = 0,
prevTimestamp = nil
}
return function(...)
local args = {...}
if Graph.reentryDepth() > 0 then
-- If we're in the middle of building the graph for a parent function, include this one in the parent, don't codegen.
return execUncached(fn, args, opt, true)
end
stats.calls = stats.calls + 1
if opt.profile == 'summary' and math.fmod(stats.calls, opt.profileReportFrequency) == 0 then
printProfile(stats)
end
if stats.prevTimestamp ~= nil then
stats.externalTime = stats.externalTime + (sys.clock() - stats.prevTimestamp)
end
local sigFun = opt.signatureFn or function(params)
local tensorDims = { }
buildSignature(params, tensorDims)
return table.concat(tensorDims, "-")
end
local signature = sigFun(args)
if signature == nil then
stats.cacheMisses = stats.cacheMisses + 1
stats.prevTimestamp = sys.clock()
return execUncached(fn, args, opt, false)
end
if generatedFunctions[signature] == nil then
local genStart = sys.clock()
local gradFn, retValues, code = generateFn(fn, args, opt)
stats.codegenTime = stats.codegenTime + (sys.clock() - genStart)
generatedFunctions[signature] = gradFn
stats.cacheMisses = stats.cacheMisses + 1
stats.prevTimestamp = sys.clock()
-- We already have the answers, don't run it all over again.
if opt.withGradients and opt.withForward and not opt.debugHook then
if opt.stableGradients then
if stableGradTensors == nil then
stableGradTensors = retValues[1]
else
-- Since the user is expecting the results in the same tensors, copy the new results to the first set of results.
copyStableTensors(retValues[1], stableGradTensors)
end
end
return table.unpack(retValues)
elseif opt.withForward and not opt.debugHook then
return table.unpack(retValues)
end
end
stats.cacheHits = stats.cacheHits + 1
local execStart = sys.clock()
local retValues = {generatedFunctions[signature](table.unpack(args))}
stats.executionTime = stats.executionTime + (sys.clock() - execStart)
if opt.stableGradients then
copyStableTensors(retValues[1], stableGradTensors)
end
stats.prevTimestamp = sys.clock()
return table.unpack(retValues)
end
end
return {
create = create
}
<|start_filename|>src/gradcheck.lua<|end_filename|>
-- Autograd
local autograd = require 'autograd'
-- Perturbation (finite diffs):
local perturbation = 1e-6
-- Threshold:
local threshold = 1e-5
-- Compute grads with bprop:
local function jacobianFromAutograd(func, inputs, key)
-- Autograd:
local df = autograd(func)
local grads = df(table.unpack(inputs))
local gradsVerify = df(table.unpack(inputs))
-- Find grad:
local g = autograd.util.nestedGet(grads, key)
local gVerify = autograd.util.nestedGet(gradsVerify, key)
local err
if torch.isTensor(g) then
err = (g - gVerify):abs():max()
else
err = torch.abs(g - gVerify)
end
if err ~= 0 then
error("autograd gradient not deterministic")
end
-- Return grads:
if torch.isTensor(g) then
return g:contiguous():view(-1):clone()
else
return g
end
end
-- Compute grads from finite differences
local function jacobianFromFiniteDifferences(func, inputs, key)
local var = autograd.util.nestedGet(inputs[1], key)
if torch.isTensor(var) then
-- Flat view:
local view = var:view(-1)
-- Grads:
local grads = view:clone():zero()
-- Finite diffs:
for i = 1,view:size(1) do
-- Initial val:
local val = view[i]
-- Perturbate:
view[i] = val - perturbation/2
local pred1 = func(table.unpack(inputs))
view[i] = val + perturbation/2
local pred2 = func(table.unpack(inputs))
view[i] = val
-- Finite diff:
grads[i] = (pred2-pred1) / perturbation
end
-- Return grads:
return grads
else
-- Initial val:
local val = var
-- Perturbate:
autograd.util.nestedSet(inputs[1], key, val - perturbation/2)
local pred1 = func(table.unpack(inputs))
autograd.util.nestedSet(inputs[1], key, val + perturbation/2)
local pred2 = func(table.unpack(inputs))
autograd.util.nestedSet(inputs[1], key, val)
-- Finite diff:
return (pred2-pred1) / perturbation
end
end
local function gradcheckvar2(func, inputs, key, randomizeInput)
local var = autograd.util.nestedGet(inputs[1], key)
local isTensorVar = torch.isTensor(var)
-- Random input:
if randomizeInput then
if isTensorVar then
var:uniform(-10,10)
else
autograd.util.nestedSet(inputs[1], key, 20 * (math.random() - 0.5))
var = autograd.util.nestedGet(inputs[1], key)
end
end
-- Estimate grads with fprop:
local jacobian = jacobianFromAutograd(func, inputs, key)
local originalLoss = func(table.unpack(inputs))
local perturbedLoss, approxPerturbed
if isTensorVar then
local noise = jacobian:view(-1):clone():zero()
local idx = math.random(1, noise:size(1))
local originalVar = var:clone()
noise:narrow(1,idx,1):uniform(-perturbation, perturbation)
var:add(torch.view(noise, var:size()))
perturbedLoss = func(table.unpack(inputs))
approxPerturbed = originalLoss + torch.dot(jacobian, noise)
var:copy(originalVar)
else
local noise = 2*perturbation*(math.random()-0.5)
autograd.util.nestedSet(inputs[1], key, var + noise)
perturbedLoss = func(table.unpack(inputs))
approxPerturbed = originalLoss + jacobian * noise
autograd.util.nestedSet(inputs[1], key, var)
end
-- Error:
local err = math.abs((perturbedLoss - approxPerturbed)) /
(math.max(math.abs(perturbedLoss), math.abs(originalLoss))+perturbation)
-- Threhold?
local pass = err < threshold
if not pass then
print('original loss = '..originalLoss)
print('perturbed loss = '..perturbedLoss)
print('approximated perturbed loss = '..approxPerturbed)
print('error = ' .. err)
end
return pass, err
end
local function gradcheckvar(func, inputs, key, randomizeInput)
local var = autograd.util.nestedGet(inputs[1], key)
local isTensorVar = torch.isTensor(var)
-- Random input:
if randomizeInput then
if isTensorVar then
var:uniform(-1,1)
else
autograd.util.nestedSet(inputs[1], key, 2*math.random()-1)
end
end
-- Estimate grads with fprop:
local jacobian1 = jacobianFromFiniteDifferences(func, inputs, key)
-- Coded grads:
local jacobian2 = jacobianFromAutograd(func, inputs, key)
-- Error:
local err
if isTensorVar then
err = (jacobian1 - jacobian2):abs():max()
else
err = torch.abs(jacobian1 - jacobian2)
end
-- Threhold?
local pass = err < threshold
if not pass then
print('error = ' .. err)
end
return pass
end
-- Test grads:
return function(opt)
-- Options
local randomizeInput = opt.randomizeInput
if randomizeInput == nil then
randomizeInput = true
end
-- Run grad check:
local function gradcheck(func, ...)
local args = {...}
-- get all vars:
local vars, keys = autograd.util.sortedFlattenKeys(args[1])
local max_err = 0
local ok = true
for i,key in ipairs(keys) do
local t, err = gradcheckvar2(func, args, key, randomizeInput)
ok = ok and t
if err > max_err then max_err = err end
ok = ok and gradcheckvar(func, args, key, randomizeInput)
end
print('[gradcheck2] maximum error = '..max_err)
return ok
end
-- Grad check fun:
return gradcheck
end
<|start_filename|>src/model/NeuralNetwork.lua<|end_filename|>
local sequence = require 'autograd.model.common'.sequence
local hasCudnn, cudnn = pcall(require, 'cudnn')
hasCudnn = hasCudnn and cudnn
local functionalize = require('autograd.nnwrapper').functionalize
local cast = require('autograd.util').cast
if hasCudnn then
cudnn = functionalize('cudnn')
end
local nn = functionalize('nn')
local function NeuralLayer(opt, params, layers, layer2params)
-- options:
opt = opt or {}
local inputFeatures = opt.inputFeatures or 3
local outputFeatures = opt.outputFeatures or 16
local batchNormalization = opt.batchNormalization or false
local dropoutProb = opt.dropoutProb or 0
local activations = opt.activations
local cuda = opt.cuda or false
-- container
layers = layers or {}
params = params or {}
layer2params = layer2params or {}
-- Dropout
--------------------------------------
if dropoutProb > 0 then
table.insert(layers, nn.Dropout(dropoutProb) )
end
-- Fully-connected layer
--------------------------------------
local l,p = nn.Linear(inputFeatures, outputFeatures)
table.insert(layers, l)
table.insert(params, p)
layer2params[#layers] = #params
-- Batch normalization
--------------------------------------
if batchNormalization then
local l,p = nn.BatchNormalization(outputFeatures)
table.insert(layers, l)
table.insert(params, p)
layer2params[#layers] = #params
end
-- Activations
--------------------------------------
if opt.activations then
local activation
if hasCudnn and cuda then
activation = cudnn[activations]()
else
activation = nn[activations]()
end
table.insert(layers, activation)
end
-- layers
return sequence(layers, layer2params), params, layers
end
return function(opt, params, layers, layer2params)
-- options:
opt = opt or {}
local inputFeatures = opt.inputFeatures or 10
local hiddenFeatures = opt.hiddenFeatures or {100,2}
local batchNormalization = opt.batchNormalization or false
local dropoutProb = opt.dropoutProb or 0
local dropoutProbs = opt.dropoutProbs or {}
local activations = opt.activations or 'ReLU'
local classifier = opt.classifier or false
local cuda = opt.cuda or false
-- container
layers = layers or {}
params = params or {}
layer2params = layer2params or {}
-- always add a reshape to force input dim:
table.insert(layers, nn.Reshape(inputFeatures))
-- add layers:
for i,hiddens in ipairs(hiddenFeatures) do
if classifier and i == #hiddenFeatures then
activations = nil
batchNormalization = nil
end
NeuralLayer({
inputFeatures = inputFeatures,
outputFeatures = hiddens,
dropoutProb = dropoutProbs[i] or dropoutProb,
activations = activations,
batchNormalization = batchNormalization,
cuda = cuda,
}, params, layers, layer2params)
inputFeatures = hiddens
end
-- Type cast, if CUDA
--------------------------------------
if cuda then
params = cast(params, "cuda")
end
-- layers
return sequence(layers, layer2params), params, layers
end
<|start_filename|>examples/train-mnist-mlp.lua<|end_filename|>
-- Libs
local grad = require 'autograd'
local util = require 'autograd.util'
local lossFuns = require 'autograd.loss'
local optim = require 'optim'
grad.optimize(true)
-- Load in MNIST
local trainData, testData, classes = require('./get-mnist.lua')()
local inputSize = trainData.x[1]:nElement()
local confusionMatrix = optim.ConfusionMatrix(classes)
-- What model to train:
local predict,f,params
-- Define our neural net
function predict(params, input, target)
local h1 = torch.tanh(input * params.W[1] + params.B[1])
local h2 = torch.tanh(h1 * params.W[2] + params.B[2])
local h3 = h2 * params.W[3] + params.B[3]
local out = util.logSoftMax(h3)
return out
end
-- Define our training loss
function f(params, input, target)
local prediction = predict(params, input, target)
local loss = lossFuns.logMultinomialLoss(prediction, target)
return loss, prediction
end
-- Define our parameters
-- [-1/sqrt(#output), 1/sqrt(#output)]
torch.manualSeed(0)
local W1 = torch.FloatTensor(inputSize,50):uniform(-1/math.sqrt(50),1/math.sqrt(50))
local B1 = torch.FloatTensor(50):fill(0)
local W2 = torch.FloatTensor(50,50):uniform(-1/math.sqrt(50),1/math.sqrt(50))
local B2 = torch.FloatTensor(50):fill(0)
local W3 = torch.FloatTensor(50,#classes):uniform(-1/math.sqrt(#classes),1/math.sqrt(#classes))
local B3 = torch.FloatTensor(#classes):fill(0)
-- Trainable parameters:
params = {
W = {W1, W2, W3},
B = {B1, B2, B3},
}
-- Get the gradients closure magically:
local df = grad(f, { optimize = true })
-- Train a neural network
for epoch = 1,100 do
print('Training Epoch #'..epoch)
for i = 1,trainData.size do
-- Next sample:
local x = trainData.x[i]:view(1,inputSize)
local y = torch.view(trainData.y[i], 1, 10)
-- Grads:
local grads, loss, prediction = df(params, x, y)
-- Update weights and biases
for i=1,#params.W do
params.W[i] = params.W[i] - grads.W[i] * 0.01
params.B[i] = params.B[i] - grads.B[i] * 0.01
end
-- Log performance:
confusionMatrix:add(prediction[1], y[1])
if i % 1000 == 0 then
print(confusionMatrix)
confusionMatrix:zero()
end
end
end
<|start_filename|>src/loss/init.lua<|end_filename|>
-- utils
local util = require 'autograd.util'
-- standard loss functions
local loss = {}
-- each loss function takes a raw output of a network (last hidden layer)
-- and produces the loss, plus the transformed ouptut (in the case of a
-- binary cross entropy loss, the output goes through a sigmoid)
function loss.logMultinomialLoss(out, target)
return -torch.sum(torch.cmul(out,target))
end
function loss.logBCELoss(out, target, p)
if p then
out = out + p
end
return -torch.sum(torch.cmul(target, torch.log(out)) + torch.cmul(-target+1, torch.log(-out+1)))
end
function loss.crossEntropy(out, target)
local yhat = util.logSoftMax(out)
return -torch.sum(torch.cmul(yhat, target)), yhat
end
function loss.binaryCrossEntropy(out, target)
local p = 1e-6
local yhat = util.sigmoid(out) * (1-p*2) + p
return - torch.sum( torch.cmul(target, torch.log(yhat)) + torch.cmul((-target + 1), torch.log(-yhat + 1)) ), yhat
end
function loss.leastSquares(out, target)
local yhat = out
local diffs = out - target
local sq = torch.cmul(diffs, diffs)
return torch.sum(sq), yhat
end
function loss.margin(out, target, margin)
margin = margin or 1
local preds1 = out[torch.eq(target,1)]
local preds0 = out[torch.eq(target,0)]
local np1 = preds1:size(1)
local np0 = preds0:size(1)
local diffs = torch.expand( torch.view(preds1, np1, 1), np1, np0 ) - torch.expand( torch.view(preds0, 1, np0), np1, np0 )
diffs = -diffs + margin
local max0s = diffs[ torch.gt(diffs, 0) ]
local loss = torch.sum(max0s)
return loss, out
end
return loss
<|start_filename|>src/module/LayerNormalization.lua<|end_filename|>
local util = require 'autograd.util'
return function(opt, params)
local opt = opt or {}
local params = params or {}
local nOutputs = opt.nOutputs or 10
local p = {gain = torch.ones(1, nOutputs),
bias = torch.zeros(1, nOutputs)}
table.insert(params, p)
local function layer_norm(params, x, eps)
--[[ Layer Normalization of Ba, Kiros, and Hinton (https://arxiv.org/abs/1607.06450)
Normalizes activations x at a layer by their mean and std.
Parameters:
* `params` - Gain and bias parameters to adjust normalized output.
* `x` - ([batch, nOutputs]) tensor to be normalized.
* `eps` - Small constant to avoid divide by zero for small std.
Returns:
* `x_corrected` - ([batch,] nOutputs]) layer normalized tensor.
--]]
local p = params[1] or params
local eps = eps or 1e-5
local x_in = x
if torch.nDimension(x) == 1 then
x_in = torch.view(x, 1, torch.size(x, 1))
end
local n = torch.size(x_in,2)
local mean = torch.expand(torch.mean(x_in, 2), torch.size(x_in))
local x_centered = x_in - mean
local std = torch.expand(torch.sqrt(torch.sum(torch.cmul(x_centered, x_centered) / n, 2) + eps), torch.size(x_in))
local x_normed = torch.cdiv(x_centered, std)
local gain = torch.expand(p.gain, torch.size(x_in))
local bias = torch.expand(p.bias, torch.size(x_in))
local x_corrected = torch.view(torch.cmul(x_normed, gain) + bias, torch.size(x))
return x_corrected
end
return layer_norm, params
end
<|start_filename|>src/auto/AutoCriterion.lua<|end_filename|>
local auto = require 'autograd.auto'
local autograd = require 'autograd.main'
-- This generates a new autograd.nn.AutoCriterion.[moduleName]
-- that takes a suitable forward function executed in :updateOutput
-- it automatically deals with the updateGradInput and accGradParameters differentiation
return function(criterionName)
-- Input fn has the following format (MSE error):
-- function(input, target)
-- local buffer = input-target
-- return torch.cmul(buffer, buffer) / (input:dim() == 2 and input:size(1) or 1)
-- end
criterionName = criterionName or error('You must provide a name for your auto-differentiated criterion.')
if not auto.factory('autograd.nn.criterion.'..criterionName) then
local AutoCriterion,parent = torch.class('autograd.nn.criterion.'..criterionName, 'nn.Criterion', auto)
local criterion = auto[criterionName]
function criterion:__init(fn)
parent.__init(self)
self.fn = fn or error('An autograd function must be specified as input to AutoCriterion')
self.fnWrapper = function(params, target)
return self.fn(params.input, target)
end
end
function criterion:validate()
if not self.validated then
local mt = getmetatable(self)
mt.validated = true
mt.f = mt.f or autograd(self.fnWrapper, { withForward = true, withGradients = true })
end
end
function criterion:updateOutput(input,y)
self:validate()
self.gradInput, self.output, self.predictions = self.f({input=input}, y)
return self.output
end
function criterion:updateGradInput(input, y)
self.gradInput = self.gradInput.input
return self.gradInput
end
end
local criterion = auto[criterionName]
return criterion
end
<|start_filename|>src/optim/init.lua<|end_filename|>
local util = require 'autograd.util'
local function wrap(optimfn)
return function(fn, state, params)
local states = { }
local flatParams = util.sortedFlatten(params)
for i = 1, #flatParams do
states[i] = util.deepCopy(state)
end
return function(...)
local out = {fn(params, ...)}
local grads, loss = out[1], out[2]
local flatGrads = util.sortedFlatten(grads)
for i = 1, #flatGrads do
local grad = flatGrads[i]
optimfn(function()
return loss, grad
end, flatParams[i], states[i])
end
return table.unpack(out)
end, states
end
end
local opt = {}
for k, v in pairs(require 'optim') do
opt[k] = wrap(v)
end
return opt
<|start_filename|>src/module/SoftAttention.lua<|end_filename|>
local functionalize = require('autograd.nnwrapper').functionalize
local nn = functionalize('nn')
local LayerNorm = require 'autograd.module.LayerNormalization'
local softMax = nn.SoftMax()
return function(opt, params)
local opt = opt or {}
local params = params or {}
local layerNormalization = opt.layerNormalization or false
local hiddenFeatures = opt.hiddenFeatures or 10
local subjectFeatures = opt.subjectFeatures or 15
local subjectChoices = opt.subjectChoices or 20
local p = {W_att_subject = torch.zeros(1, 1, subjectFeatures),
W_att_h = torch.zeros(hiddenFeatures, subjectChoices),
b_att = torch.zeros(1, subjectChoices)}
if layerNormalization then
local focus_ln_params = LayerNorm({nOutputs = subjectChoices})
p.focus_ln_gain = focus_ln_params.gain
p.focus_ln_bias = focus_ln_params.bias
p.b_att = nil
end
table.insert(params, p)
local soft_attention = function(params, subject, h)
--[[ Soft attention over subject given hidden state.
Deterministic soft attention of Show, Attend, and Tell by Xu et al. (http://arxiv.org/abs/1502.03044)
Parameters:
* `params` - Weights to combine subject and hidden features to score choices.
* `subject` - ([batch,] subjectFeatures, subjectChoices) tensor.
* `h` - ([batch,] hiddenFeatures) tensor.
Returns:
* `attention` - ([batch,], subjectFeatures) tensor that is the expectation of the attended subject vector.
* `focus` - ([batch,], subjectChoices) tensor that is the probability of selecting any given subject choice.
--]]
local p = params[1] or params
local subject_in = subject
local h_in = h
if torch.nDimension(subject) == 2 then
subject_in = torch.view(subject, 1, torch.size(subject, 1), torch.size(subject, 2))
end
if torch.nDimension(h) == 1 then
h_in = torch.view(h, 1, torch.size(h, 1))
end
local batchSize = torch.size(subject_in, 1)
local subjectFeatures = torch.size(subject_in, 2)
local subjectChoices = torch.size(subject_in, 3)
-- Activations for each subject choice and hidden state.
local W_subject = torch.expand(p.W_att_subject, batchSize, 1, subjectFeatures)
local subject_logit = torch.squeeze(torch.bmm(W_subject, subject_in), 2)
local hidden_logit = h_in * p.W_att_h
-- Focus distribution over subject choices.
local focus_logit = subject_logit + hidden_logit
if layerNormalization then
focus_logit = layer_norm({gain = p.focus_ln_gain, bias = p.focus_ln_bias}, focus_logit)
else
focus_logit = focus_logit + torch.expand(p.b_att, batchSize, subjectChoices)
end
local focus = softMax(focus_logit)
-- Attend to choice in expectation.
local expanded_focus = torch.expand(torch.view(focus, batchSize, 1, subjectChoices), torch.size(subject_in))
local attention = torch.squeeze(torch.sum(torch.cmul(subject_in, expanded_focus), 3), 3)
if torch.nDimension(subject) == 2 then
attention = torch.squeeze(attention, 1)
focus = torch.squeeze(focus, 1)
end
return attention, focus
end
return soft_attention, params
end
| twitter/autograd |
<|start_filename|>docs/articles/getting-started.html<|end_filename|>
<!DOCTYPE html>
<!-- Generated by pkgdown: do not edit by hand --><html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Getting started with the robis package • robis</title>
<!-- jquery --><script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.4.1/jquery.min.js" integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo=" crossorigin="anonymous"></script><!-- Bootstrap --><link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.4.1/css/bootstrap.min.css" integrity="sha256-bZLfwXAP04zRMK2BjiO8iu9pf4FbLqX6zitd+tIvLhE=" crossorigin="anonymous">
<script src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.4.1/js/bootstrap.min.js" integrity="sha256-nuL8/2cJ5NDSSwnKD8VqreErSWHtnEP9E7AySL+1ev4=" crossorigin="anonymous"></script><!-- bootstrap-toc --><link rel="stylesheet" href="../bootstrap-toc.css">
<script src="../bootstrap-toc.js"></script><!-- Font Awesome icons --><link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/all.min.css" integrity="sha256-mmgLkCYLUQbXn0B1SRqzHar6dCnv9oZFPEC1g1cwlkk=" crossorigin="anonymous">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/v4-shims.min.css" integrity="sha256-wZjR52fzng1pJHwx4aV2AO3yyTOXrcDW7jBpJtTwVxw=" crossorigin="anonymous">
<!-- clipboard.js --><script src="https://cdnjs.cloudflare.com/ajax/libs/clipboard.js/2.0.6/clipboard.min.js" integrity="sha256-inc5kl9MA1hkeYUt+EC3BhlIgyp/2jDIyBLS6k3UxPI=" crossorigin="anonymous"></script><!-- headroom.js --><script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/headroom.min.js" integrity="sha256-AsUX4SJE1+yuDu5+mAVzJbuYNPHj/WroHuZ8Ir/CkE0=" crossorigin="anonymous"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/jQuery.headroom.min.js" integrity="sha256-ZX/yNShbjqsohH1k95liqY9Gd8uOiE1S4vZc+9KQ1K4=" crossorigin="anonymous"></script><!-- pkgdown --><link href="../pkgdown.css" rel="stylesheet">
<script src="../pkgdown.js"></script><meta property="og:title" content="Getting started with the robis package">
<meta property="og:description" content="robis">
<!-- mathjax --><script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/MathJax.js" integrity="sha256-nvJJv9wWKEm88qvoQl9ekL2J+k/RWIsaSScxxlsrv8k=" crossorigin="anonymous"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/config/TeX-AMS-MML_HTMLorMML.js" integrity="sha256-84DKXVJXs0/F8OTMzX4UR909+jtl4G7SPypPavF+GfA=" crossorigin="anonymous"></script><!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body data-spy="scroll" data-target="#toc">
<div class="container template-article">
<header><div class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<span class="navbar-brand">
<a class="navbar-link" href="../index.html">robis</a>
<span class="version label label-default" data-toggle="tooltip" data-placement="bottom" title="Released version">2.3.12</span>
</span>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li>
<a href="../index.html">
<span class="fas fa fas fa-home fa-lg"></span>
</a>
</li>
<li>
<a href="../reference/index.html">Reference</a>
</li>
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-expanded="false">
Articles
<span class="caret"></span>
</a>
<ul class="dropdown-menu" role="menu">
<li>
<a href="../articles/getting-started.html">Getting started with the robis package</a>
</li>
</ul>
</li>
<li>
<a href="../news/index.html">Changelog</a>
</li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li>
<a href="https://github.com/iobis/robis/">
<span class="fab fa fab fa-github fa-lg"></span>
</a>
</li>
</ul>
</div>
<!--/.nav-collapse -->
</div>
<!--/.container -->
</div>
<!--/.navbar -->
</header><script src="getting-started_files/header-attrs-2.3/header-attrs.js"></script><script src="getting-started_files/accessible-code-block-0.0.1/empty-anchor.js"></script><div class="row">
<div class="col-md-9 contents">
<div class="page-header toc-ignore">
<h1 data-toc-skip>Getting started with the robis package</h1>
<small class="dont-index">Source: <a href="https://github.com/iobis/robis/blob/master/vignettes/getting-started.Rmd"><code>vignettes/getting-started.Rmd</code></a></small>
<div class="hidden name"><code>getting-started.Rmd</code></div>
</div>
<p>First load the package:</p>
<div class="sourceCode" id="cb1"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="kw"><a href="https://rdrr.io/r/base/library.html">library</a></span><span class="op">(</span><span class="va"><a href="https://github.com/iobis/robis">robis</a></span><span class="op">)</span></code></pre></div>
<div id="occurrences" class="section level2">
<h2 class="hasAnchor">
<a href="#occurrences" class="anchor"></a>Occurrences</h2>
<p>The <code><a href="../reference/occurrence.html">occurrence()</a></code> function provides access to raw occurrence data. For example, to fetch all occurrences by scientific name:</p>
<div class="sourceCode" id="cb2"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="fu"><a href="../reference/occurrence.html">occurrence</a></span><span class="op">(</span><span class="st">"Abra aequalis"</span><span class="op">)</span>
<span class="co">#> # A tibble: 767 x 99</span>
<span class="co">#> country date_year scientificNameID year scientificName superfamilyid</span>
<span class="co">#> <chr> <int> <chr> <chr> <chr> <int></span>
<span class="co">#> 1 United… 1976 urn:lsid:marine… 1976 Abra aequalis 14636</span>
<span class="co">#> 2 United… 1977 urn:lsid:marine… 1977 Abra aequalis 14636</span>
<span class="co">#> 3 <NA> 1986 urn:lsid:marine… 1986 Abra aequalis 14636</span>
<span class="co">#> 4 <NA> 2013 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 5 <NA> 1994 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 6 <NA> 1972 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 7 <NA> 1996 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 8 Colomb… 2001 urn:lsid:marine… 2001 Abra aequalis 14636</span>
<span class="co">#> 9 <NA> 2013 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 10 <NA> 2001 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> # … with 757 more rows, and 93 more variables: individualCount <chr>,</span>
<span class="co">#> # dropped <lgl>, aphiaID <int>, decimalLatitude <dbl>, subclassid <int>,</span>
<span class="co">#> # phylumid <int>, familyid <int>, basisOfRecord <chr>, subterclassid <int>,</span>
<span class="co">#> # maximumDepthInMeters <dbl>, id <chr>, day <chr>, order <chr>,</span>
<span class="co">#> # dataset_id <chr>, decimalLongitude <dbl>, collectionCode <chr>,</span>
<span class="co">#> # date_end <dbl>, speciesid <int>, superfamily <chr>, date_start <dbl>,</span>
<span class="co">#> # month <chr>, genus <chr>, bibliographicCitation <chr>, subterclass <chr>,</span>
<span class="co">#> # eventDate <chr>, superorder <chr>, coordinateUncertaintyInMeters <chr>,</span>
<span class="co">#> # absence <lgl>, superorderid <int>, genusid <int>,</span>
<span class="co">#> # originalScientificName <chr>, marine <lgl>, minimumDepthInMeters <dbl>,</span>
<span class="co">#> # infraclassid <int>, institutionCode <chr>, date_mid <dbl>,</span>
<span class="co">#> # infraclass <chr>, class <chr>, orderid <int>, waterBody <chr>,</span>
<span class="co">#> # kingdom <chr>, recordedBy <chr>, classid <int>, phylum <chr>,</span>
<span class="co">#> # species <chr>, subclass <chr>, family <chr>, kingdomid <int>,</span>
<span class="co">#> # node_id <chr>, flags <chr>, sss <dbl>, shoredistance <int>, sst <dbl>,</span>
<span class="co">#> # bathymetry <int>, dynamicProperties <chr>, catalogNumber <chr>,</span>
<span class="co">#> # locality <chr>, scientificNameAuthorship <chr>, identifiedBy <chr>,</span>
<span class="co">#> # depth <dbl>, type <chr>, taxonRemarks <chr>, occurrenceStatus <chr>,</span>
<span class="co">#> # materialSampleID <chr>, occurrenceID <chr>, ownerInstitutionCode <chr>,</span>
<span class="co">#> # samplingProtocol <chr>, taxonRank <chr>, datasetName <chr>,</span>
<span class="co">#> # datasetID <chr>, collectionID <chr>, eventID <chr>, habitat <chr>,</span>
<span class="co">#> # associatedMedia <lgl>, associatedSequences <lgl>, county <chr>,</span>
<span class="co">#> # coordinatePrecision <chr>, associatedReferences <chr>, fieldNumber <chr>,</span>
<span class="co">#> # stateProvince <chr>, preparations <chr>, occurrenceRemarks <chr>,</span>
<span class="co">#> # verbatimDepth <chr>, modified <lgl>, infraspecificEpithet <lgl>,</span>
<span class="co">#> # recordNumber <lgl>, higherGeography <chr>, continent <chr>,</span>
<span class="co">#> # typeStatus <lgl>, geodeticDatum <lgl>, specificEpithet <chr>,</span>
<span class="co">#> # georeferenceSources <lgl>, dateIdentified <chr></span></code></pre></div>
<p>Alternatively, occurrences can be fetched by AphiaID:</p>
<div class="sourceCode" id="cb3"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="fu"><a href="../reference/occurrence.html">occurrence</a></span><span class="op">(</span>taxonid <span class="op">=</span> <span class="fl">293683</span><span class="op">)</span>
<span class="co">#> # A tibble: 767 x 99</span>
<span class="co">#> country date_year scientificNameID year scientificName superfamilyid</span>
<span class="co">#> <chr> <int> <chr> <chr> <chr> <int></span>
<span class="co">#> 1 United… 1976 urn:lsid:marine… 1976 Abra aequalis 14636</span>
<span class="co">#> 2 United… 1977 urn:lsid:marine… 1977 Abra aequalis 14636</span>
<span class="co">#> 3 <NA> 1986 urn:lsid:marine… 1986 Abra aequalis 14636</span>
<span class="co">#> 4 <NA> 2013 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 5 <NA> 1994 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 6 <NA> 1972 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 7 <NA> 1996 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 8 Colomb… 2001 urn:lsid:marine… 2001 Abra aequalis 14636</span>
<span class="co">#> 9 <NA> 2013 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> 10 <NA> 2001 urn:lsid:marine… <NA> Abra aequalis 14636</span>
<span class="co">#> # … with 757 more rows, and 93 more variables: individualCount <chr>,</span>
<span class="co">#> # dropped <lgl>, aphiaID <int>, decimalLatitude <dbl>, subclassid <int>,</span>
<span class="co">#> # phylumid <int>, familyid <int>, basisOfRecord <chr>, subterclassid <int>,</span>
<span class="co">#> # maximumDepthInMeters <dbl>, id <chr>, day <chr>, order <chr>,</span>
<span class="co">#> # dataset_id <chr>, decimalLongitude <dbl>, collectionCode <chr>,</span>
<span class="co">#> # date_end <dbl>, speciesid <int>, superfamily <chr>, date_start <dbl>,</span>
<span class="co">#> # month <chr>, genus <chr>, bibliographicCitation <chr>, subterclass <chr>,</span>
<span class="co">#> # eventDate <chr>, superorder <chr>, coordinateUncertaintyInMeters <chr>,</span>
<span class="co">#> # absence <lgl>, superorderid <int>, genusid <int>,</span>
<span class="co">#> # originalScientificName <chr>, marine <lgl>, minimumDepthInMeters <dbl>,</span>
<span class="co">#> # infraclassid <int>, institutionCode <chr>, date_mid <dbl>,</span>
<span class="co">#> # infraclass <chr>, class <chr>, orderid <int>, waterBody <chr>,</span>
<span class="co">#> # kingdom <chr>, recordedBy <chr>, classid <int>, phylum <chr>,</span>
<span class="co">#> # species <chr>, subclass <chr>, family <chr>, kingdomid <int>,</span>
<span class="co">#> # node_id <chr>, flags <chr>, sss <dbl>, shoredistance <int>, sst <dbl>,</span>
<span class="co">#> # bathymetry <int>, dynamicProperties <chr>, catalogNumber <chr>,</span>
<span class="co">#> # locality <chr>, scientificNameAuthorship <chr>, identifiedBy <chr>,</span>
<span class="co">#> # depth <dbl>, type <chr>, taxonRemarks <chr>, occurrenceStatus <chr>,</span>
<span class="co">#> # materialSampleID <chr>, occurrenceID <chr>, ownerInstitutionCode <chr>,</span>
<span class="co">#> # samplingProtocol <chr>, taxonRank <chr>, datasetName <chr>,</span>
<span class="co">#> # datasetID <chr>, collectionID <chr>, eventID <chr>, habitat <chr>,</span>
<span class="co">#> # associatedMedia <lgl>, associatedSequences <lgl>, county <chr>,</span>
<span class="co">#> # coordinatePrecision <chr>, associatedReferences <chr>, fieldNumber <chr>,</span>
<span class="co">#> # stateProvince <chr>, preparations <chr>, occurrenceRemarks <chr>,</span>
<span class="co">#> # verbatimDepth <chr>, modified <lgl>, infraspecificEpithet <lgl>,</span>
<span class="co">#> # recordNumber <lgl>, higherGeography <chr>, continent <chr>,</span>
<span class="co">#> # typeStatus <lgl>, geodeticDatum <lgl>, specificEpithet <chr>,</span>
<span class="co">#> # georeferenceSources <lgl>, dateIdentified <chr></span></code></pre></div>
<p>Other parameters include <code>geometry</code>, which accepts polygons in WKT format:</p>
<div class="sourceCode" id="cb4"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="fu"><a href="../reference/occurrence.html">occurrence</a></span><span class="op">(</span><span class="st">"Abra alba"</span>, geometry <span class="op">=</span> <span class="st">"POLYGON ((2.59689 51.16772, 2.62436 51.14059, 2.76066 51.19225, 2.73216 51.20946, 2.59689 51.16772))"</span><span class="op">)</span>
<span class="co">#> # A tibble: 319 x 85</span>
<span class="co">#> date_year scientificNameID year scientificName superfamilyid individualCount</span>
<span class="co">#> <int> <chr> <chr> <chr> <int> <chr> </span>
<span class="co">#> 1 1984 urn:lsid:marine… 1984 Abra alba 14636 23.0 </span>
<span class="co">#> 2 2009 urn:lsid:marine… 2009 Abra alba 14636 <NA> </span>
<span class="co">#> 3 2002 urn:lsid:marine… 2002 Abra alba 14636 <NA> </span>
<span class="co">#> 4 2002 urn:lsid:marine… 2002 Abra alba 14636 <NA> </span>
<span class="co">#> 5 1995 urn:lsid:marine… 1995 Abra alba 14636 78.0 </span>
<span class="co">#> 6 2012 urn:lsid:marine… 2012 Abra alba 14636 <NA> </span>
<span class="co">#> 7 1999 urn:lsid:marine… 1999 Abra alba 14636 15.0 </span>
<span class="co">#> 8 2010 urn:lsid:marine… 2010 Abra alba 14636 <NA> </span>
<span class="co">#> 9 1989 urn:lsid:marine… 1989 Abra alba 14636 47.0 </span>
<span class="co">#> 10 1991 urn:lsid:marine… 1991 Abra alba 14636 103.0 </span>
<span class="co">#> # … with 309 more rows, and 79 more variables: dropped <lgl>,</span>
<span class="co">#> # fieldNumber <chr>, aphiaID <int>, decimalLatitude <dbl>, subclassid <int>,</span>
<span class="co">#> # phylumid <int>, familyid <int>, catalogNumber <chr>,</span>
<span class="co">#> # occurrenceStatus <chr>, basisOfRecord <chr>, subterclassid <int>,</span>
<span class="co">#> # modified <chr>, maximumDepthInMeters <dbl>, id <chr>, day <chr>,</span>
<span class="co">#> # order <chr>, dataset_id <chr>, locality <chr>, decimalLongitude <dbl>,</span>
<span class="co">#> # collectionCode <chr>, date_end <dbl>, speciesid <int>, occurrenceID <chr>,</span>
<span class="co">#> # superfamily <chr>, date_start <dbl>, month <chr>, genus <chr>,</span>
<span class="co">#> # samplingProtocol <chr>, subterclass <chr>, eventDate <chr>, eventID <chr>,</span>
<span class="co">#> # superorder <chr>, absence <lgl>, samplingEffort <chr>, superorderid <int>,</span>
<span class="co">#> # genusid <int>, originalScientificName <chr>, marine <lgl>,</span>
<span class="co">#> # minimumDepthInMeters <dbl>, infraclassid <int>, institutionCode <chr>,</span>
<span class="co">#> # date_mid <dbl>, infraclass <chr>, class <chr>, orderid <int>, sex <chr>,</span>
<span class="co">#> # geodeticDatum <chr>, kingdom <chr>, recordedBy <chr>, classid <int>,</span>
<span class="co">#> # phylum <chr>, lifeStage <chr>, species <chr>, subclass <chr>,</span>
<span class="co">#> # datasetID <chr>, family <chr>, kingdomid <int>, node_id <chr>, flags <chr>,</span>
<span class="co">#> # sss <dbl>, shoredistance <int>, sst <dbl>, bathymetry <dbl>,</span>
<span class="co">#> # language <chr>, footprintSRS <chr>, datasetName <chr>, country <chr>,</span>
<span class="co">#> # references <chr>, dynamicProperties <chr>, bibliographicCitation <chr>,</span>
<span class="co">#> # continent <chr>, scientificNameAuthorship <chr>, specificEpithet <chr>,</span>
<span class="co">#> # verbatimDepth <chr>, occurrenceRemarks <chr>, footprintWKT <chr>,</span>
<span class="co">#> # locationID <chr>, depth <dbl>, eventTime <chr></span></code></pre></div>
<p>A convenience function <code><a href="../reference/map_leaflet.html">map_leaflet()</a></code> is provided to visualize occurrences on an interactive map:</p>
<div class="sourceCode" id="cb5"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="fu"><a href="../reference/map_leaflet.html">map_leaflet</a></span><span class="op">(</span><span class="fu"><a href="../reference/occurrence.html">occurrence</a></span><span class="op">(</span><span class="st">"Abra sibogai"</span><span class="op">)</span><span class="op">)</span></code></pre></div>
</div>
<div id="checklists" class="section level2">
<h2 class="hasAnchor">
<a href="#checklists" class="anchor"></a>Checklists</h2>
<p>The <code><a href="../reference/checklist.html">checklist()</a></code> function returns all taxa observed for a given set of filters.</p>
<div class="sourceCode" id="cb6"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="fu"><a href="../reference/checklist.html">checklist</a></span><span class="op">(</span><span class="st">"Semelidae"</span><span class="op">)</span>
<span class="co">#> # A tibble: 108 x 38</span>
<span class="co">#> scientificName scientificNameA… taxonID taxonRank taxonomicStatus</span>
<span class="co">#> <chr> <chr> <int> <chr> <chr> </span>
<span class="co">#> 1 Abra alba (<NAME>, 1802) 141433 Species accepted </span>
<span class="co">#> 2 Abra nitida (<NAME>, … 141435 Species accepted </span>
<span class="co">#> 3 Scrobicularia… (da Costa, 1778) 141424 Species accepted </span>
<span class="co">#> 4 Abra prismati… (Montagu, 1808) 141436 Species accepted </span>
<span class="co">#> 5 Abra tenuis (Montagu, 1803) 141439 Species accepted </span>
<span class="co">#> 6 Abra Lamarck, 1818 138474 Genus accepted </span>
<span class="co">#> 7 Theora lubrica Gould, 1861 233903 Species accepted </span>
<span class="co">#> 8 Semelidae Stoliczka, 1870… 1781 Family accepted </span>
<span class="co">#> 9 Abra aequalis (Say, 1822) 293683 Species accepted </span>
<span class="co">#> 10 Abra segmentum (Récluz, 1843) 141438 Species accepted </span>
<span class="co">#> # … with 98 more rows, and 33 more variables: acceptedNameUsage <chr>,</span>
<span class="co">#> # acceptedNameUsageID <int>, is_marine <lgl>, is_brackish <lgl>,</span>
<span class="co">#> # kingdom <chr>, phylum <chr>, class <chr>, subclass <chr>, infraclass <chr>,</span>
<span class="co">#> # subterclass <chr>, superorder <chr>, order <chr>, superfamily <chr>,</span>
<span class="co">#> # family <chr>, kingdomid <int>, phylumid <int>, classid <int>,</span>
<span class="co">#> # subclassid <int>, infraclassid <int>, subterclassid <int>,</span>
<span class="co">#> # superorderid <int>, orderid <int>, superfamilyid <int>, familyid <int>,</span>
<span class="co">#> # records <int>, genus <chr>, genusid <int>, species <chr>, speciesid <int>,</span>
<span class="co">#> # is_freshwater <lgl>, is_terrestrial <lgl>, subspecies <chr>,</span>
<span class="co">#> # subspeciesid <int></span></code></pre></div>
<p>Just like the <code><a href="../reference/occurrence.html">occurrence()</a></code> function, <code><a href="../reference/checklist.html">checklist()</a></code> accepts WKT geometries:</p>
<div class="sourceCode" id="cb7"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="fu"><a href="../reference/checklist.html">checklist</a></span><span class="op">(</span>geometry <span class="op">=</span> <span class="st">"POLYGON ((2.59689 51.16772, 2.62436 51.14059, 2.76066 51.19225, 2.73216 51.20946, 2.59689 51.16772))"</span><span class="op">)</span>
<span class="co">#> # A tibble: 901 x 69</span>
<span class="co">#> scientificName taxonID taxonomicStatus acceptedNameUsa… acceptedNameUsa…</span>
<span class="co">#> <chr> <int> <chr> <chr> <int></span>
<span class="co">#> 1 Nematoda 799 accepted Nematoda 799</span>
<span class="co">#> 2 Abra alba 141433 accepted Abra alba 141433</span>
<span class="co">#> 3 Sabatieria ce… 121360 accepted Sabatieria celt… 121360</span>
<span class="co">#> 4 Sabatieria pu… 153130 accepted Sabatieria punc… 153130</span>
<span class="co">#> 5 Spiophanes bo… 131187 accepted Spiophanes bomb… 131187</span>
<span class="co">#> 6 Kurtiella bid… 345281 accepted Kurtiella biden… 345281</span>
<span class="co">#> 7 Nephtys hombe… 130359 accepted Nephtys homberg… 130359</span>
<span class="co">#> 8 Oligochaeta 2036 accepted Oligochaeta 2036</span>
<span class="co">#> 9 Cirratulidae 919 accepted Cirratulidae 919</span>
<span class="co">#> 10 Fabulina fabu… 146907 accepted Fabulina fabula 146907</span>
<span class="co">#> # … with 891 more rows, and 64 more variables: is_marine <lgl>,</span>
<span class="co">#> # is_brackish <lgl>, is_freshwater <lgl>, is_terrestrial <lgl>,</span>
<span class="co">#> # records <int>, taxonRank <chr>, kingdom <chr>, kingdomid <int>,</span>
<span class="co">#> # phylum <chr>, phylumid <int>, scientificNameAuthorship <chr>, class <chr>,</span>
<span class="co">#> # classid <int>, subclass <chr>, order <chr>, superfamily <chr>,</span>
<span class="co">#> # family <chr>, subclassid <int>, orderid <int>, superfamilyid <int>,</span>
<span class="co">#> # familyid <int>, infraclass <chr>, infraclassid <int>, subterclass <chr>,</span>
<span class="co">#> # superorder <chr>, subterclassid <int>, superorderid <int>, suborder <chr>,</span>
<span class="co">#> # suborderid <int>, subfamily <chr>, subfamilyid <int>, subphylum <chr>,</span>
<span class="co">#> # subphylumid <int>, superclass <chr>, superclassid <int>, subkingdom <chr>,</span>
<span class="co">#> # infrakingdom <chr>, subkingdomid <int>, infrakingdomid <int>, genus <chr>,</span>
<span class="co">#> # genusid <int>, infraphylum <chr>, infraphylumid <int>, species <chr>,</span>
<span class="co">#> # speciesid <int>, infraorder <chr>, parvorder <chr>, infraorderid <int>,</span>
<span class="co">#> # parvorderid <int>, tribe <chr>, tribeid <int>, subgenus <chr>,</span>
<span class="co">#> # subgenusid <int>, category <chr>, section <chr>, subsection <chr>,</span>
<span class="co">#> # sectionid <int>, subsectionid <int>, subspecies <chr>, subspeciesid <int>,</span>
<span class="co">#> # variety <chr>, varietyid <int>, forma <chr>, formaid <int></span></code></pre></div>
</div>
<div id="measurements-and-facts" class="section level2">
<h2 class="hasAnchor">
<a href="#measurements-and-facts" class="anchor"></a>Measurements and facts</h2>
<p>The package also provides access to <code>MeasurementOrFact</code> records associated with occurrences. When calling <code><a href="../reference/occurrence.html">occurrence()</a></code>, <code>MeasurementOrFact</code> records can be included by setting <code>mof = true</code>.</p>
<div class="sourceCode" id="cb8"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="va">occ</span> <span class="op"><-</span> <span class="fu"><a href="../reference/occurrence.html">occurrence</a></span><span class="op">(</span><span class="st">"Abra tenuis"</span>, mof <span class="op">=</span> <span class="cn">TRUE</span><span class="op">)</span></code></pre></div>
<p><code>MeasurementOrFact</code> records are nested in the occurrence, but the <code><a href="../reference/measurements.html">measurements()</a></code> function allows you to extract them to a flat data frame. Use the <code>fields</code> parameter to indicate which occurrence fields need to be preserved in the measurements table.</p>
<div class="sourceCode" id="cb9"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="va">mof</span> <span class="op"><-</span> <span class="fu"><a href="../reference/measurements.html">measurements</a></span><span class="op">(</span><span class="va">occ</span>, fields <span class="op">=</span> <span class="fu"><a href="https://rdrr.io/r/base/c.html">c</a></span><span class="op">(</span><span class="st">"scientificName"</span>, <span class="st">"decimalLongitude"</span>, <span class="st">"decimalLatitude"</span><span class="op">)</span><span class="op">)</span>
<span class="va">mof</span>
<span class="co">#> # A tibble: 19,355 x 17</span>
<span class="co">#> scientificName decimalLongitude decimalLatitude measurementDete…</span>
<span class="co">#> <chr> <dbl> <dbl> <chr> </span>
<span class="co">#> 1 Abra tenuis -1.20 46.3 <NA> </span>
<span class="co">#> 2 Abra tenuis -1.20 46.3 <NA> </span>
<span class="co">#> 3 Abra tenuis -1.20 46.3 <NA> </span>
<span class="co">#> 4 Abra tenuis -1.20 46.3 <NA> </span>
<span class="co">#> 5 Abra tenuis -1.20 46.3 <NA> </span>
<span class="co">#> 6 Abra tenuis -1.18 46.3 <NA> </span>
<span class="co">#> 7 Abra tenuis -1.18 46.3 <NA> </span>
<span class="co">#> 8 Abra tenuis -1.18 46.3 <NA> </span>
<span class="co">#> 9 Abra tenuis -1.18 46.3 <NA> </span>
<span class="co">#> 10 Abra tenuis -1.18 46.3 <NA> </span>
<span class="co">#> # … with 19,345 more rows, and 13 more variables: measurementAccuracy <chr>,</span>
<span class="co">#> # measurementValue <chr>, measurementRemarks <chr>, measurementValueID <chr>,</span>
<span class="co">#> # level <int>, occurrenceID <chr>, measurementUnit <chr>,</span>
<span class="co">#> # measurementDeterminedDate <chr>, measurementType <chr>,</span>
<span class="co">#> # measurementUnitID <chr>, measurementTypeID <chr>, measurementID <chr>,</span>
<span class="co">#> # measurementMethod <chr></span></code></pre></div>
<p>Note that the <code>MeasurementOrFact</code> fields can be used as parameters to the <code><a href="../reference/occurrence.html">occurrence()</a></code> function. For example, to only get occurrences with associated biomass measurements:</p>
<div class="sourceCode" id="cb10"><pre class="downlit sourceCode r">
<code class="sourceCode R"><span class="kw"><a href="https://rdrr.io/r/base/library.html">library</a></span><span class="op">(</span><span class="va"><a href="https://dplyr.tidyverse.org">dplyr</a></span><span class="op">)</span>
<span class="fu"><a href="../reference/occurrence.html">occurrence</a></span><span class="op">(</span><span class="st">"Abra tenuis"</span>, mof <span class="op">=</span> <span class="cn">TRUE</span>, measurementtype <span class="op">=</span> <span class="st">"biomass"</span><span class="op">)</span> <span class="op">%>%</span>
<span class="fu"><a href="../reference/measurements.html">measurements</a></span><span class="op">(</span><span class="op">)</span>
<span class="co">#> # A tibble: 29 x 15</span>
<span class="co">#> id measurementDete… measurementAccu… measurementValue measurementRema…</span>
<span class="co">#> <chr> <lgl> <lgl> <chr> <lgl> </span>
<span class="co">#> 1 1878… NA NA mire NA </span>
<span class="co">#> 2 1878… NA NA 0,12 NA </span>
<span class="co">#> 3 267d… NA NA mire NA </span>
<span class="co">#> 4 267d… NA NA 0,11 NA </span>
<span class="co">#> 5 26d2… NA NA mire NA </span>
<span class="co">#> 6 26d2… NA NA 0,25 NA </span>
<span class="co">#> 7 2924… NA NA mire, putrido NA </span>
<span class="co">#> 8 2924… NA NA 0,05 NA </span>
<span class="co">#> 9 3534… NA NA mire with debris NA </span>
<span class="co">#> 10 3534… NA NA 0,05 NA </span>
<span class="co">#> # … with 19 more rows, and 10 more variables: measurementValueID <lgl>,</span>
<span class="co">#> # level <int>, occurrenceID <chr>, measurementUnit <chr>,</span>
<span class="co">#> # measurementDeterminedDate <lgl>, measurementType <chr>,</span>
<span class="co">#> # measurementUnitID <chr>, measurementTypeID <lgl>, measurementID <lgl>,</span>
<span class="co">#> # measurementMethod <lgl></span></code></pre></div>
</div>
</div>
<div class="col-md-3 hidden-xs hidden-sm" id="pkgdown-sidebar">
<nav id="toc" data-toggle="toc"><h2 data-toc-skip>Contents</h2>
</nav>
</div>
</div>
<footer><div class="copyright">
<p>Developed by <NAME>, <NAME>.</p>
</div>
<div class="pkgdown">
<p>Site built with <a href="https://pkgdown.r-lib.org/">pkgdown</a> 1.6.1.</p>
</div>
</footer>
</div>
</body>
</html>
| iobis/robis |
<|start_filename|>store/types/gas_test.go<|end_filename|>
package types
import (
"math"
"testing"
"github.com/stretchr/testify/require"
)
func TestGasMeter(t *testing.T) {
cases := []struct {
limit Gas
usage []Gas
}{
{10, []Gas{1, 2, 3, 4}},
{1000, []Gas{40, 30, 20, 10, 900}},
{100000, []Gas{99999, 1}},
{100000000, []Gas{50000000, 40000000, 10000000}},
{65535, []Gas{32768, 32767}},
{65536, []Gas{32768, 32767, 1}},
}
for tcnum, tc := range cases {
meter := NewGasMeter(tc.limit)
used := uint64(0)
for unum, usage := range tc.usage {
used += usage
require.NotPanics(t, func() { meter.ConsumeGas(usage, "") }, "Not exceeded limit but panicked. tc #%d, usage #%d", tcnum, unum)
require.Equal(t, used, meter.GasConsumed(), "Gas consumption not match. tc #%d, usage #%d", tcnum, unum)
require.Equal(t, used, meter.GasConsumedToLimit(), "Gas consumption (to limit) not match. tc #%d, usage #%d", tcnum, unum)
require.False(t, meter.IsPastLimit(), "Not exceeded limit but got IsPastLimit() true")
if unum < len(tc.usage)-1 {
require.False(t, meter.IsOutOfGas(), "Not yet at limit but got IsOutOfGas() true")
} else {
require.True(t, meter.IsOutOfGas(), "At limit but got IsOutOfGas() false")
}
}
require.Panics(t, func() { meter.ConsumeGas(1, "") }, "Exceeded but not panicked. tc #%d", tcnum)
require.Equal(t, meter.GasConsumedToLimit(), meter.Limit(), "Gas consumption (to limit) not match limit")
require.Equal(t, meter.GasConsumed(), meter.Limit()+1, "Gas consumption not match limit+1")
}
}
func TestAddUint64Overflow(t *testing.T) {
testCases := []struct {
a, b uint64
result uint64
overflow bool
}{
{0, 0, 0, false},
{100, 100, 200, false},
{math.MaxUint64 / 2, math.MaxUint64/2 + 1, math.MaxUint64, false},
{math.MaxUint64 / 2, math.MaxUint64/2 + 2, 0, true},
}
for i, tc := range testCases {
res, overflow := addUint64Overflow(tc.a, tc.b)
require.Equal(
t, tc.overflow, overflow,
"invalid overflow result; tc: #%d, a: %d, b: %d", i, tc.a, tc.b,
)
require.Equal(
t, tc.result, res,
"invalid uint64 result; tc: #%d, a: %d, b: %d", i, tc.a, tc.b,
)
}
}
<|start_filename|>x/params/doc.go<|end_filename|>
package params
/*
Package params provides a globally available parameter store.
There are two main types, Keeper and Subspace. Subspace is an isolated namespace for a
paramstore, where keys are prefixed by preconfigured spacename. Keeper has a
permission to access all existing spaces.
Subspace can be used by the individual keepers, who needs a private parameter store
that the other keeper cannot modify. Keeper can be used by the Governance keeper,
who need to modify any parameter in case of the proposal passes.
Basic Usage:
First, declare parameter space and parameter keys for the module. Then include
params.Subspace in the keeper. Since we prefix the keys with the spacename, it is
recommended to use the same name with the module's.
const (
DefaultParamspace = "mymodule"
)
const (
KeyParameter1 = "myparameter1"
KeyParameter2 = "myparameter2"
)
type Keeper struct {
cdc *codec.Codec
key sdk.StoreKey
ps params.Subspace
}
func ParamKeyTable() params.KeyTable {
return params.NewKeyTable(
KeyParameter1, MyStruct{},
KeyParameter2, MyStruct{},
)
}
func NewKeeper(cdc *codec.Codec, key sdk.StoreKey, ps params.Subspace) Keeper {
return Keeper {
cdc: cdc,
key: key,
ps: ps.WithKeyTable(ParamKeyTable()),
}
}
Pass a params.Subspace to NewKeeper with DefaultParamspace (or another)
app.myKeeper = mymodule.NewKeeper(app.paramStore.SubStore(mymodule.DefaultParamspace))
Now we can access to the paramstore using Paramstore Keys
var param MyStruct
k.ps.Get(ctx, KeyParameter1, ¶m)
k.ps.Set(ctx, KeyParameter2, param)
If you want to store an unknown number of parameters, or want to store a mapping,
you can use subkeys. Subkeys can be used with a main key, where the subkeys are
inheriting the key properties.
func ParamKeyTable() params.KeyTable {
return params.NewKeyTable(
KeyParamMain, MyStruct{},
)
}
func (k Keeper) GetDynamicParameter(ctx sdk.Context, subkey []byte) (res MyStruct) {
k.ps.GetWithSubkey(ctx, KeyParamMain, subkey, &res)
}
Genesis Usage:
Declare a struct for parameters and make it implement params.ParamSet. It will then
be able to be passed to SetParamSet.
type MyParams struct {
Parameter1 uint64
Parameter2 string
}
// Implements params.ParamSet
// KeyValuePairs must return the list of (ParamKey, PointerToTheField)
func (p *MyParams) KeyValuePairs() params.KeyValuePairs {
return params.KeyFieldPairs {
{KeyParameter1, &p.Parameter1},
{KeyParameter2, &p.Parameter2},
}
}
func InitGenesis(ctx sdk.Context, k Keeper, data GenesisState) {
k.ps.SetParamSet(ctx, &data.params)
}
The method is pointer receiver because there could be a case that we read from
the store and set the result to the struct.
Master Keeper Usage:
Keepers that require master permission to the paramstore, such as gov, can take
params.Keeper itself to access all subspace(using GetSubspace)
type MasterKeeper struct {
pk params.Keeper
}
func (k MasterKeeper) SetParam(ctx sdk.Context, space string, key string, param interface{}) {
space, ok := k.pk.GetSubspace(space)
if !ok {
return
}
space.Set(ctx, key, param)
}
*/
<|start_filename|>x/nft/internal/types/events.go<|end_filename|>
package types
// NFT module event types
var (
EventTypeTransfer = "transfer_nft"
EventTypeEditNFTMetadata = "edit_nft_metadata"
EventTypeMintNFT = "mint_nft"
EventTypeBurnNFT = "burn_nft"
AttributeValueCategory = ModuleName
AttributeKeySender = "sender"
AttributeKeyRecipient = "recipient"
AttributeKeyOwner = "owner"
AttributeKeyNFTID = "nft-id"
AttributeKeyNFTTokenURI = "token-uri"
AttributeKeyDenom = "denom"
)
<|start_filename|>x/nft/exported/nft.go<|end_filename|>
package exported
import (
sdk "github.com/cosmos/cosmos-sdk/types"
)
// NFT non fungible token interface
type NFT interface {
GetID() string
GetOwner() sdk.AccAddress
SetOwner(address sdk.AccAddress)
GetTokenURI() string
EditMetadata(tokenURI string)
String() string
}
<|start_filename|>x/gov/types/events.go<|end_filename|>
package types
// Governance module event types
const (
EventTypeSubmitProposal = "submit_proposal"
EventTypeProposalDeposit = "proposal_deposit"
EventTypeProposalVote = "proposal_vote"
EventTypeInactiveProposal = "inactive_proposal"
EventTypeActiveProposal = "active_proposal"
AttributeKeyProposalResult = "proposal_result"
AttributeKeyOption = "option"
AttributeKeyProposalID = "proposal_id"
AttributeKeyVotingPeriodStart = "voting_period_start"
AttributeValueCategory = "governance"
AttributeValueProposalDropped = "proposal_dropped" // didn't meet min deposit
AttributeValueProposalPassed = "proposal_passed" // met vote quorum
AttributeValueProposalRejected = "proposal_rejected" // didn't meet vote quorum
AttributeValueProposalFailed = "proposal_failed" // error on proposal handler
)
<|start_filename|>docs/.vuepress/enhanceApp.js<|end_filename|>
import axios from 'axios'
import Vue from 'vue'
Vue.use({
install (Vue) {
Vue.prototype.$axios = axios.create()
}
})
<|start_filename|>x/params/subspace/table.go<|end_filename|>
package subspace
import (
"reflect"
)
type attribute struct {
ty reflect.Type
}
// KeyTable subspaces appropriate type for each parameter key
type KeyTable struct {
m map[string]attribute
}
// Constructs new table
func NewKeyTable(keytypes ...interface{}) (res KeyTable) {
if len(keytypes)%2 != 0 {
panic("odd number arguments in NewTypeKeyTable")
}
res = KeyTable{
m: make(map[string]attribute),
}
for i := 0; i < len(keytypes); i += 2 {
res = res.RegisterType(keytypes[i].([]byte), keytypes[i+1])
}
return
}
func isAlphaNumeric(key []byte) bool {
for _, b := range key {
if !((48 <= b && b <= 57) || // numeric
(65 <= b && b <= 90) || // upper case
(97 <= b && b <= 122)) { // lower case
return false
}
}
return true
}
// Register single key-type pair
func (t KeyTable) RegisterType(key []byte, ty interface{}) KeyTable {
if len(key) == 0 {
panic("cannot register empty key")
}
if !isAlphaNumeric(key) {
panic("non alphanumeric parameter key")
}
keystr := string(key)
if _, ok := t.m[keystr]; ok {
panic("duplicate parameter key")
}
rty := reflect.TypeOf(ty)
// Indirect rty if it is ptr
if rty.Kind() == reflect.Ptr {
rty = rty.Elem()
}
t.m[keystr] = attribute{
ty: rty,
}
return t
}
// Register multiple pairs from ParamSet
func (t KeyTable) RegisterParamSet(ps ParamSet) KeyTable {
for _, kvp := range ps.ParamSetPairs() {
t = t.RegisterType(kvp.Key, kvp.Value)
}
return t
}
func (t KeyTable) maxKeyLength() (res int) {
for k := range t.m {
l := len(k)
if l > res {
res = l
}
}
return
}
<|start_filename|>types/router.go<|end_filename|>
package types
// Router provides handlers for each transaction type.
type Router interface {
AddRoute(r string, h Handler) Router
Route(path string) Handler
}
// QueryRouter provides queryables for each query path.
type QueryRouter interface {
AddRoute(r string, h Querier) QueryRouter
Route(path string) Querier
}
<|start_filename|>x/nft/internal/types/utils.go<|end_filename|>
package types
import "strings"
// Findable is an interface for iterable types that allows the FindUtil function to work
type Findable interface {
ElAtIndex(index int) string
Len() int
}
// FindUtil is a binary search funcion for types that support the Findable interface (elements must be sorted)
func FindUtil(group Findable, el string) int {
if group.Len() == 0 {
return -1
}
low := 0
high := group.Len() - 1
median := 0
for low <= high {
median = (low + high) / 2
switch compare := strings.Compare(group.ElAtIndex(median), el); {
case compare == 0:
// if group[median].element == el
return median
case compare == -1:
// if group[median].element < el
low = median + 1
default:
// if group[median].element > el
high = median - 1
}
}
return -1
}
| stakewithus/cosmos-sdk |
<|start_filename|>Compat.Configuration.Install.Tests/System.Configuration.Install/InstallContextTests.cs<|end_filename|>
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace System.Configuration.Install.Tests.System.Configuration.Install
{
[TestClass]
public class InstallContextTests
{
[TestMethod]
public void Should_Parse_Command_Parameters()
{
var installContext = new InstallContext("/var/log/log.log",new []{"-LogToConsole=true"});
Assert.AreEqual("/var/log/log.log", installContext.Parameters["logFile"]);
Assert.AreEqual("true", installContext.Parameters["LogToConsole"]);
}
[TestMethod]
public void Should_Return_Parameter_True_For_Parameters()
{
var installContext = new InstallContext("/var/log/log.log", new[] { "/whatever", "/i", "-debug" });
Assert.IsTrue(installContext.IsParameterTrue("debug"));
Assert.IsTrue(installContext.IsParameterTrue("Debug"));
Assert.IsTrue(installContext.IsParameterTrue("i"));
Assert.IsTrue(installContext.IsParameterTrue("I"));
Assert.IsTrue(installContext.IsParameterTrue("whatever"));
Assert.IsTrue(installContext.IsParameterTrue("Whatever"));
}
}
} | cklutz/Compat.Configuration.Install |
<|start_filename|>计算机专业课/设计模式/Decorator-Pattern/Makefile<|end_filename|>
all:
g++ -g -Wall -o main main.cpp
rm:
main
<|start_filename|>plugin/css/me.css<|end_filename|>
/*结束文章符号*/
.over {
width: 40px;
height: 40px;
border: 1px solid #000001;
border-radius: 50%;
text-align: center;
line-height: 40px;
margin: 20px auto 5px;
text-shadow: 0 0 black;
}
<|start_filename|>计算机专业课/设计模式/Template Method/Makefile<|end_filename|>
all:
g++ -g -Wall -o template1 template1_lib.cpp template1_app.cpp
g++ -g -Wall -o template2 template2_lib.cpp template2_app.cpp
clean:
rm -rf template1 template2
<|start_filename|>计算机专业课/设计模式/Strategy-Pattern/Makefile<|end_filename|>
all:
g++ -g -Wall ./*.cpp -o main
clean:
rm main
<|start_filename|>plugin/css/friends-link.css<|end_filename|>
.friends {
display: flex;
flex-wrap: wrap;
}
.friends .a-friend {
display: flex;
margin: 10px 10px 0 0;
text-decoration: none;
font-weight: 300;
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.2);
padding: 10px;
transition: box-shadow 0.2s, transform 0.2s;
}
.friends .a-friend:hover {
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.5);
}
.friends .a-friend:active {
transform: scale(0.95);
}
.friends .a-friend .blog-avatar {
background-color: white;
border-radius: 50%;
width: 50px;
height: 50px;
}
.friends .a-friend .text-container {
margin-left: 10px;
}
.friends .a-friend .text-container .name {
font-size: 14px;
}
.friends .a-friend .text-container .description {
font-size: 12px;
margin-top: 5px;
}
<|start_filename|>计算机专业课/设计模式/Template-Pattern/Makefile<|end_filename|>
all:
g++ -g -Wall main.cpp -o main
clean:
rm main
| bytesfly/NoteBook |
<|start_filename|>web/resources/css/main.css<|end_filename|>
/*
* I add this to html files generated with pandoc.
*/
html {
font-size: 100%;
overflow-y: scroll;
-webkit-text-size-adjust: 100%;
-ms-text-size-adjust: 100%;
}
body {
color: #444;
font-family: Georgia, Palatino, 'Palatino Linotype', Times, 'Times New Roman', serif;
font-size: 12px;
line-height: 1.7;
padding: 1em 1em 1em 300px;
margin: auto;
max-width: 700px;
background: #fefefe;
}
.load-tests button {
padding: 7px;
cursor: pointer;
border-color: #b3b2b2;
border-radius: 10px;
opacity: 0.7;
}
.load-tests button:hover {
opacity: 1;
}
h2 {
display: block;
}
.download-section-as-pdf-text-link {
display: inline-block;
position: relative;
padding-left: 8px;
opacity: 0.7;
}
.download-section-as-pdf-text-link:hover {
opacity: 1;
}
.download-section-as-pdf-text-link img {
width: 20px;
}
.toc-body.empty{
background: url(../images/loading.gif) no-repeat center;
}
.toc-bar {
position: fixed;
left: 0;
top: 60px;
width: 300px;
height: 100%;
background: #fbfbfd;
font-size: 13px;
font-family: Arial, serif;
z-index: 2;
border-right: 1px solid rgba(39, 40, 44, 0.20);
box-sizing: border-box;
-webkit-box-shadow: -10px 0px 0px 10px rgba(0, 0, 0, 0.15);
-moz-box-shadow: 2px 5px 18px 0px rgba(0, 0, 0, 0.15);
box-shadow: 0px 5px 10px 0px rgba(0, 0, 0, 0.15);
}
.toc-body {
display: block;
position: relative;
box-sizing: border-box;
width: 299px;
overflow: auto;
height: -moz-calc(100% - 161px); /* Firefox */
height: -webkit-calc(100% - 161px); /* Chrome, Safari */
height: calc(100% - 161px); /* IE9+ and future browsers */
}
.toc-body::-webkit-scrollbar {
width: 8px;
}
.toc-body::-webkit-scrollbar-track {
-webkit-box-shadow: inset 0 0 6px rgba(183, 84, 84, 0.3);
}
.toc-body::-webkit-scrollbar-thumb {
background-color: #c5bfbf8a;
}
.toc-header {
display: block;
width: 299px;
position: relative;
font-size: 20px;
font-weight: bold;
text-align: left;
box-sizing: border-box;
z-index: 3;
padding: 10px 10px 20px 10px;
top: 0;
border-bottom: 1px solid rgba(39, 40, 44, 0.08);
}
#TOC ul a.underlined {
border-bottom: 1px solid rgba(39, 40, 44, 0.20);
}
#TOC ul {
margin: 0;
padding: 0;
}
#TOC ul a.download-section-as-pdf {
background: url(../images/pdf.png);
background-size: contain;
width: 22px;
height: 22px;
display: block;
right: 15px;
position: absolute;
margin-top: 6px;
z-index: 2;
opacity: 0.7;
}
#TOC ul a.download-section-as-pdf:hover {
opacity: 1;
}
#TOC.loaded > ul:before {
display: none;
}
#TOC ul li {
display: none;
}
#TOC li {
list-style: none;
position: relative;
}
#TOC .toc-element.active, #TOC .toc-element.active:hover {
background-color: #c1c1c1;
color: #404040;
}
#TOC .toc-element:hover {
background-color: #F4F4F4;
}
#TOC .toc-element {
display: inline-block;
width: 100%;
color: gray;
line-height: 16px;
padding: 8px 30px;
box-sizing: border-box;
outline: none;
}
#TOC > ul > li > ul::before, #TOC .toc-element.toggled ~ ul::before {
border: 4px solid transparent;
border-top: 4px solid gray;
}
#TOC li ul::before {
content: '';
cursor: pointer;
border: 4px solid transparent;
border-left: 4px solid gray;
width: 0;
height: 0;
margin-right: 7px;
display: inline-block;
margin-left: 15px;
margin-top: 12px;
top: 0;
position: absolute;
}
#TOC > ul > li > ul > li > .toc-element {
padding-left: 45px;
}
#TOC > ul > li > ul > li > ul::before {
margin-left: 30px;
}
#TOC > ul > li > ul > li > ul > li > .toc-element {
padding-left: 60px;
}
#TOC > ul > li > ul > li > ul > li > ul::before {
margin-left: 45px;
}
#TOC > ul > li > ul > li > ul > li > ul > li > .toc-element {
padding-left: 75px;
}
#TOC > ul > li > ul > li > ul > li > ul > li > ul::before {
margin-left: 60px;
}
#TOC > ul > li > ul > li > ul > li > ul > li > ul > li > .toc-element {
padding-left: 90px;
}
#toc-search-bar {
width: 100%;
z-index: 3;
box-sizing: border-box;
border: 1px solid rgba(39, 40, 44, 0.20);
padding: 5px;
border-radius: 3px;
}
.toc-section img {
outline: none;
width: 6px;
height: 6px;
}
.icon-menu {
margin-top: 16px;
margin-left: 5px;
display: none;
height: 17px;
width: 17px;
position: fixed;
cursor: pointer;
padding: 5px;
opacity: 0.7;
}
.icon-menu .divide {
display: block;
height: 2px;
margin: 3px 0;
background: #c7bcbc;
}
.icon-menu:hover {
opacity: 1;
}
@media (max-width: 1050px) {
body {
padding-left: 40px;
}
.header-bar .download-full-pdf {
top: 15px;
}
.header-bar .sentence-finder-bar {
display: none;
}
.header-bar .icon-menu {
display: block;
}
.toc-bar {
left: -310px;
}
.toc-bar.active {
left: 0;
}
.header-bar .main-page {
margin-left: 40px;
}
}
@media (max-width: 500px) {
.toc-bar.active,
.toc-bar.active .toc-bar,
.toc-bar.active .toc-body,
.toc-bar.active .toc-header {
width: 100%;
}
.toc-bar ul a.download-section-as-pdf {
right: 25px;
}
.toc-bar.active.toc-header{
padding: 10px 20px 0 0;
}
}
.header-section-number {
display: none;
}
a {
color: #0645ad;
text-decoration: none;
}
a:visited {
color: #0b0080;
}
a:hover {
color: #06e;
}
a:active {
color: #faa700;
}
*::-moz-selection {
background: rgba(255, 255, 0, 0.3);
color: #000;
}
*::selection {
background: rgba(255, 255, 0, 0.3);
color: #000;
}
a::-moz-selection {
background: rgba(255, 255, 0, 0.3);
color: #0645ad;
}
a::selection {
background: rgba(255, 255, 0, 0.3);
color: #0645ad;
}
p {
margin: 1em 0;
}
h1, h2, h3, h4, h5, h6 {
color: #111;
line-height: 125%;
margin-top: 2em;
font-weight: normal;
}
h4, h5, h6 {
font-weight: bold;
}
h1 {
font-size: 2.5em;
counter-increment: part;
counter-reset: chapter section subsection subsubsection paragraph;
}
h1.unnumbered {
font-size: 2.5em;
counter-increment: none;
counter-reset: none;
}
h1:before {
content: counter(part) "\0000a0\0000a0";
}
h1.unnumbered:before {
content: none;
}
h2 {
font-size: 2em;
counter-increment: chapter;
counter-reset: section subsection subsubsection paragraph;
}
h2.unnumbered {
font-size: 2em;
counter-increment: none;
counter-reset: none;
}
h2:before {
content: "Chapter\00a0" counter(chapter);
display: block;
margin-bottom: 2ex;
}
h2.unnumbered:before {
content: none;
}
h3 {
font-size: 1.5em;
counter-increment: section;
counter-reset: subsection subsubsection paragraph;
}
h3.unnumbered {
font-size: 1.5em;
counter-increment: none;
counter-reset: none;
}
h3:before {
content: counter(chapter) "." counter(section) "\0000a0\0000a0";
}
h3.unnumbered:before {
content: none;
}
h4 {
font-size: 1.2em;
counter-increment: subsection;
counter-reset: subsubsection paragraph;
}
h4.unnumbered {
font-size: 1.2em;
counter-increment: none;
counter-reset: none;
}
h4:before {
content: counter(chapter) "." counter(section) "." counter(subsection) "\0000a0\0000a0";
}
h4.unnumbered:before {
content: none;
}
h5 {
font-size: 1em;
counter-increment: subsubsection;
}
h5.unnumbered {
font-size: 1em;
counter-increment: none;
counter-reset: none;
}
/* uncomment if we want level 5 headers to be numbered, too */
/*h5:before {*/
/* content: counter(chapter)"."counter(section)"."counter(subsection)"."counter(subsubsection)"\0000a0\0000a0";*/
/*}*/
/*h5.unnumbered:before {*/
/* content: none;*/
/*}*/
h6 {
font-size: 0.9em;
}
blockquote {
color: #666666;
margin: 0;
padding-left: 3em;
border-left: 0.5em #EEE solid;
}
hr {
display: block;
height: 2px;
border: 0;
border-top: 1px solid #aaa;
border-bottom: 1px solid #eee;
margin: 1em 0;
padding: 0;
}
pre, code, kbd, samp {
color: #000;
font-family: monospace, monospace;
_font-family: 'courier new', monospace;
font-size: 0.98em;
}
pre {
white-space: pre;
white-space: pre-wrap;
word-wrap: break-word;
}
b, strong {
font-weight: bold;
}
dfn {
font-style: italic;
}
ins {
background: #ff9;
color: #000;
text-decoration: none;
}
mark {
background: #ff0;
color: #000;
font-style: italic;
font-weight: bold;
}
sub, sup {
font-size: 75%;
line-height: 0;
position: relative;
vertical-align: baseline;
}
sup {
top: -0.5em;
}
sub {
bottom: -0.25em;
}
ul, ol {
margin: 1em 0;
padding: 0 0 0 2em;
}
li p:last-child {
margin-bottom: 0;
}
ul ul, ol ol {
margin: .3em 0;
}
dl {
margin-bottom: 1em;
}
dt {
font-weight: bold;
margin-bottom: .8em;
}
dd {
margin: 0 0 .8em 2em;
}
dd:last-child {
margin-bottom: 0;
}
img {
border: 0;
-ms-interpolation-mode: bicubic;
vertical-align: middle;
}
figure {
display: block;
text-align: center;
margin: 1em 0;
}
figure img {
border: none;
margin: 0 auto;
}
figcaption {
font-size: 0.8em;
font-style: italic;
margin: 0 0 .8em;
}
table {
margin-bottom: 2em;
border-bottom: 1px solid #ddd;
border-right: 1px solid #ddd;
border-spacing: 0;
border-collapse: collapse;
}
table th {
padding: .2em 1em;
background-color: #eee;
border-top: 1px solid #ddd;
border-left: 1px solid #ddd;
}
table td {
padding: .2em 1em;
border-top: 1px solid #ddd;
border-left: 1px solid #ddd;
vertical-align: top;
}
.author {
font-size: 1.2em;
text-align: center;
}
@media only screen and (min-width: 480px) {
body {
font-size: 14px;
}
}
@media only screen and (min-width: 768px) {
body {
font-size: 16px;
}
}
@media print {
* {
background: transparent !important;
color: black !important;
filter: none !important;
-ms-filter: none !important;
}
body {
font-size: 12pt;
max-width: 100%;
}
a, a:visited {
text-decoration: underline;
}
hr {
height: 1px;
border: 0;
border-bottom: 1px solid black;
}
a[href]:after {
content: " (" attr(href) ")";
}
abbr[title]:after {
content: " (" attr(title) ")";
}
.ir a:after, a[href^="javascript:"]:after, a[href^="#"]:after {
content: "";
}
pre, blockquote {
border: 1px solid #999;
padding-right: 1em;
page-break-inside: avoid;
}
tr, img {
page-break-inside: avoid;
}
img {
max-width: 100% !important;
}
@page :left {
margin: 15mm 20mm 15mm 10mm;
}
@page :right {
margin: 15mm 10mm 15mm 20mm;
}
p, h2, h3 {
orphans: 3;
widows: 3;
}
h2, h3 {
page-break-after: avoid;
}
}
.with-tests,
.with-tests.dl,
.with-tests.ul,
.with-tests.ol {
border: 3px solid rgb(228, 228, 228);
border-radius: 7px;
padding: 10px 30px;
box-sizing: border-box;
width: calc(100% + 26px);
margin-left: -33px;
margin-top: 20px;
}
.TODO .with-tests {
border: 3px solid red;
}
div.TODO {
background: #ffa4a4;
border-radius: 7px;
padding: 1px 10px;
margin-bottom: 5px;
}
span.TODO {
background-color: #ff9686;
}
span.TODO-marker {
color: red;
}
.with-tests .sentence {
padding: 2px 5px;
position: relative;
background: rgb(218, 218, 218);
border-left: 3px solid rgba(117, 117, 117, 0.5);
border-right: 3px solid rgba(117, 117, 117, 0.5);
margin-right: 5px;
border-radius: 5px;
}
.with-tests .sentence.covered {
background: rgb(213, 236, 206);
border-left: 3px solid rgba(2, 130, 0, 0.5);
border-right: 3px solid rgba(2, 130, 0, 0.5);
cursor: pointer;
}
.with-tests .unexpected-behaviour-marker {
color: red;
position: absolute;
left: -30px;
width: 16px;
height: 16px;
background: red;
border-radius: 8px;
margin-top: 5px;
}
.with-tests .sentence.unexpected-behaviour .number-info:before {
content: "!!! ";
color: red;
}
.with-tests .sentence .coverage-info {
visibility: hidden;
width: 250px;
background-color: #555;
color: #fff;
border-radius: 6px;
padding: 5px 10px;
position: absolute;
z-index: 1;
bottom: 125%;
left: 50%;
margin-left: -175px;
opacity: 0;
transition: opacity 0.3s;
font-size: 12px;
font-family: 'Arial', sans-serif;
font-weight: bold;
}
.with-tests .sentence .coverage-info::after {
content: "";
position: absolute;
top: 100%;
left: 50%;
margin-left: -5px;
border-width: 5px;
border-style: solid;
border-color: #555 transparent transparent transparent;
}
.with-tests .sentence:hover .coverage-info {
visibility: visible;
opacity: 1;
}
.with-tests .sentence > .number-info {
font-weight: bold;
border-right: 3px solid rgba(117, 117, 117, 0.5);
padding: 0 6px 0 3px;
margin-right: 5px;
line-height: 18px;
display: inline-block;
cursor: pointer;
}
.with-tests .sentence.covered > .number-info {
border-right: 3px solid rgba(2, 130, 0, 0.5);
}
.test-links {
text-transform: uppercase;
right: 5px;
top: -22px;
font-size: 13px;
position: absolute;
}
.test-links a {
margin-left: 5px;
}
.paragraph-link {
position: absolute;
margin-top: -27px;
margin-left: -27px;
background: #fff;
padding: 0 10px;
border-radius: 20px;
border: 1px solid #969696;
cursor: pointer;
}
.set-branch {
float: right;
font-weight: normal;
font-size: 16px;
margin-right: 10px;
}
.set-branch img {
outline: none;
width: 16px;
height: 16px;
}
.load-tests, .loaded-tests {
float: right;
font-weight: normal;
font-size: 16px;
margin-right: 10px;
}
.load-tests img, .loaded-tests img {
outline: none;
width: 20px;
height: 20px;
}
.loaded-tests {
cursor: default;
}
.box_shadow {
height: 100%;
width: 100%;
top: 0;
left: 0;
position: fixed;
background: rgba(0, 0, 0, 0.15);
z-index: 2;
}
.box {
position: fixed;
box-shadow: 0px 0px 5px 1px rgba(0, 0, 0, 0.2);
border-radius: 3px;
opacity: 0.0;
z-index: 2;
}
.box .title {
background: #5fc5b6;
padding: 5px 10px;
background: -moz-linear-gradient(#5fc5b6, #29b29d);
background: -webkit-linear-gradient(#5fc5b6, #29b29d);
background: -o-linear-gradient(#5fc5b6, #29b29d);
background: -ms-linear-gradient(#5fc5b6, #29b29d);
background: linear-gradient(#5fc5b6, #29b29d);
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#5fc5b6', endColorstr='#29b29d');
border-top-left-radius: 3px;
border-top-right-radius: 3px;
border-bottom: 1px solid #5daa92;
}
.box .title .text {
font-family: 'ClearSansBold';
color: white;
height: 18px;
line-height: 18px;
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
padding-right: 5px;
}
.box .title .close {
display: block;
float: right;
width: 11px;
margin-top: 4px;
height: 11px;
background: url("../images/close.png") no-repeat #b9b9b9;
background: -webkit-linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
background: -moz-linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
background: -o-linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
background: -ms-linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
background: linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
opacity: 0.4;
transition: opacity .5s ease;
}
.box .title .close:hover {
opacity: 1.0;
}
.box .body {
background: #f6f5f2;
padding: 10px;
border-bottom-left-radius: 3px;
color: #3e3e3e;
border-bottom-right-radius: 3px;
width: inherit;
box-sizing: border-box;
overflow-y: auto;
line-height: 16px;
}
.box .body .loading {
margin: 20px auto;
width: 35px;
height: 35px;
}
.box form.main .item {
width: 600px;
}
.box form.main .item .name {
width: 200px;
}
.box form.main .item .value {
width: 400px;
}
.box form.main .item .value textarea {
max-width: 384px;
}
.test-coverage-view select[name="test-type"], .test-coverage-view select[name="test-number"] {
display: none;
}
.test-coverage-view select[name="test-area"], .test-coverage-view select[name="test-type"] {
width: 150px;
}
.test-coverage-view select[name="test-number"] {
display: none;
width: 250px;
}
.prev-testcase.disabled, .next-testcase.disabled {
color: gray;
cursor: default;
}
.alert {
position: fixed;
bottom: 15px;
right: 15px;
width: 200px !important;
box-shadow: 0 0 5px 1px rgba(0, 0, 0, 0.2);
border-radius: 3px;
z-index: 5;
}
.alert .title {
background: #5fc5b6;
padding: 5px 10px;
background: -moz-linear-gradient(#5fc5b6, #29b29d);
background: -webkit-linear-gradient(#5fc5b6, #29b29d);
background: -o-linear-gradient(#5fc5b6, #29b29d);
background: -ms-linear-gradient(#5fc5b6, #29b29d);
background: linear-gradient(#5fc5b6, #29b29d);
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#5fc5b6', endColorstr='#29b29d');
border-top-left-radius: 3px;
border-top-right-radius: 3px;
border-bottom: 1px solid #5daa92;
}
.alert .title .text {
color: white;
height: 18px;
line-height: 18px;
}
.alert .title .close {
display: block;
float: right;
width: 11px;
margin-top: 4px;
height: 11px;
background: url("../images/close.png") no-repeat #b9b9b9;
background: -webkit-linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
background: -moz-linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
background: -o-linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
background: -ms-linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
background: linear-gradient(transparent, transparent), url("../images/close.svg") no-repeat;
opacity: 0.4;
transition: opacity .5s ease;
}
.alert .title .close:hover {
opacity: 1.0;
}
.alert .body {
background: #f6f5f2;
padding: 10px;
border-bottom-left-radius: 3px;
color: #3e3e3e;
border-bottom-right-radius: 3px;
display: table-cell;
vertical-align: middle;
width: inherit;
height: 65px;
font-size: 13px;
line-height: 16px;
}
.sentence.highlighted {
background: yellow;
border-radius: 5px;
}
.paragraph.highlighted, dl.highlighted, ol.highlighted, ul.highlighted {
border: 3px solid yellow !important;
border-radius: 7px;
padding: 10px 30px;
box-sizing: border-box;
width: calc(100% + 26px);
margin-left: -33px;
margin-top: 20px;
}
.sentence-link {
border-right: 3px solid rgba(117, 117, 117, 0.5);
padding-right: 5px;
margin-right: 3px;
cursor: pointer;
}
.sentence-link img {
width: 12px;
height: 12px;
}
.link-sentence-description-link-type {
width: 200px;
display: inline-block;
}
.sentence-links-popup input[type="text"] {
width: 360px;
padding: 5px;
}
.sentence-links-popup .sentence-links-row {
margin-bottom: 10px;
}
.sentence-finder-bar {
display: inline-block;
}
.header-bar {
display: block;
position: absolute;
height: 61px;
background: #333333;
width: 100%;
left: 0;
top: 0;
border-bottom: 1px solid #a0a0a0;
text-align: left;
z-index: 3;
vertical-align: middle;
line-height: 61px;
}
.with-toggled-toc .header-bar {
position: fixed;
}
.with-toggled-toc .toc-bar {
margin-top: 0 !important;
}
@media (max-width: 500px) {
.with-toggled-toc, .with-toggled-toc body {
overflow: hidden;
}
.with-toggled-toc-if-needed, .with-toggled-toc-if-needed body {
overflow: visible !important;
}
.with-toggled-toc-if-needed .header-bar {
position: absolute !important;
}
.with-toggled-toc-if-needed .toc-bar {
margin-top: auto;
}
.with-toggled-toc-if-needed .toc-bar {
display: none;
}
}
.main-page {
background: url("https://kotlinlang.org/assets/images/favicon.ico") center left no-repeat;
background-size: 25px;
padding-left: 28px;
margin-left: 20px;
font-size: 28px;
font-family: sans-serif;
font-weight: bold;
}
.sentence-finder-bar {
position: absolute;
padding-left: 50px;
margin-left: 20px;
}
.main-page:focus {
outline: none;
}
.download-full-pdf:focus {
outline: none;
}
.header-bar a,
.header-bar a:visited,
.header-bar a:hover,
.header-bar a:active,
.header-bar a:focus,
.header-bar a::selection,
.header-bar a:focus-within {
color: white;
}
.header-bar .download-full-pdf:hover {
opacity: 1;
}
.header-bar .download-full-pdf {
float: right;
margin-right: 30px;
color: white;
font-family: sans-serif;
opacity: 0.7;
font-weight: lighter;
}
.spec-location-search input[type="text"] {
padding: 5px;
width: 400px;
}
.spec-location-search button {
padding: 5px 10px;
margin-left: 10px;
}
.show-markup-link, .hide-markup-link {
margin-left: 10px;
}
.spec-location-format {
font-size: 11px;
position: absolute;
top: 22px;
color: white;
}
h2, h3, h4, h5 {
cursor: pointer;
}
.disable-dev-mode,
.show-markup-link,
.hide-markup-link {
opacity: 0.7;
display: block;
}
.header-bar a:hover{
opacity: 1;
}
.sentence-finder-settings{
display: inline-block;
position:absolute;
line-height: 1.7;
width: fit-content;
padding-left: 10px;
top: 12px;
font-family: sans-serif;
font-weight: lighter;
font-size: 12px;
}
#toc-element-kotlincore{
pointer-events: none;
cursor: default;
}
<|start_filename|>web/src/main/kotlin/org/jetbrains/kotlin/spec/loader/SpecTestsLoader.kt<|end_filename|>
package org.jetbrains.kotlin.spec.loader
import js.externals.jquery.JQuery
import js.externals.jquery.`$`
import org.jetbrains.kotlin.spec.entity.TestsLoadingInfo
import org.jetbrains.kotlin.spec.entity.SpecSection
import org.jetbrains.kotlin.spec.entity.test.parameters.testArea.TestArea
import org.jetbrains.kotlin.spec.utils.format
import org.jetbrains.kotlin.spec.utils.isDevMode
import org.jetbrains.kotlin.spec.viewer.MarkUpArranger
import org.jetbrains.kotlin.spec.viewer.SpecCoverageHighlighter
import kotlinx.browser.window
import kotlin.js.Promise
class SpecTestsLoader {
private val loader = LoaderByTestsMapFile()
companion object {
private const val EXCEPTED_SELECTORS = ".grammar-rule"
private val paragraphSelector = listOf(".paragraph", "DL", "UL", "OL").joinToString(",")
private val sectionTagNames = listOf("H1", "H2", "H3", "H4", "H5")
private const val LOADING_ICON_PATH = "./resources/images/loading.gif"
private const val LOADING_ICON_HTML = "<img src=\"$LOADING_ICON_PATH\" />"
private const val SET_BRANCH_ICON = "./resources/images/set-branch.png"
private val notLoadedTestsText = "Tests for \"{1}\" in \"${GithubTestsLoader.getBranch()}\" branch aren't yet written."
private const val SECTION_PATH_SEPARATOR = ", "
fun getButtonToLoadTests(link: JQuery, isToReload: Boolean = false) = when (isToReload) {
false -> """
<a href="#"
data-id="${link.attr("id")}"
data-type="${link.prop("tagName").toString().toLowerCase()}"
class="load-tests"
title="Show tests coverage">
<button>Load tests</button>
</a>"""
else -> """
<button>Reload tests</button>
""".trimIndent()
}
fun insertLoadIcon(headerElement: JQuery) {
headerElement.append(
buildString {
if (isDevMode)
append("""<a href="#" class="set-branch" title="The compiler repo branch from which the tests will be taken"><img src="$SET_BRANCH_ICON" /></a></span>""")
append(getButtonToLoadTests(headerElement))
}
)
}
fun getParagraphsInfo(sectionElement: JQuery): List<Map<String, Any>>? {
var nextSibling = sectionElement.get().run {
if (size == 0) return@getParagraphsInfo null
this[0].nextElementSibling
}
val sectionName = sectionElement.attr("id")
val paragraphsMap = mutableListOf<Map<String, Any>>()
var paragraphCounter = 1
if (sectionName.isEmpty())
return null
while (nextSibling != null) {
if (sectionTagNames.indexOf(nextSibling.tagName) != -1) break
val isParagraph = nextSibling.matches(paragraphSelector)
val childParagraph = nextSibling.querySelector(".paragraph")
if ((isParagraph || childParagraph != null) && !`$`(nextSibling).`is`(EXCEPTED_SELECTORS)) {
val nextParagraph = childParagraph ?: nextSibling
paragraphsMap.add(
mapOf(
"paragraphElement" to nextParagraph,
"sentenceCount" to `$`(nextParagraph).find(".sentence").length.toString()
)
)
paragraphCounter++
}
nextSibling = nextSibling.nextElementSibling
}
return paragraphsMap
}
fun showMarkup() {
`$`("h2, h3, h4, h5").each { _, section ->
val sectionTagName = section.tagName.toLowerCase()
val sectionElement = `$`(section)
val paragraphsInfo = getParagraphsInfo(sectionElement)
?: return@each null
val sectionsPath = mutableSetOf<String>().apply {
if (sectionTagName == "h3" || sectionTagName == "h4" || sectionTagName == "h5") {
add(getParentSectionName(sectionElement, "h2"))
}
if (sectionTagName == "h4" || sectionTagName == "h5") {
add(getParentSectionName(sectionElement, "h3"))
}
if (sectionTagName == "h5") {
add(getParentSectionName(sectionElement, "h4"))
}
add(sectionElement.attr("id"))
}
MarkUpArranger.showMarkUpForParagraphs(paragraphsInfo, sectionsPath.joinToString(", "))
}
}
private fun getNestedSections(sectionElement: JQuery): List<String> {
val placeCurrentSectionLevel = sectionTagNames.indexOf(sectionElement.prop("tagName").toString().toUpperCase())
val otherSectionTagNames = sectionTagNames.slice(0..placeCurrentSectionLevel)
val nestedSectionTagNames = sectionTagNames.slice(placeCurrentSectionLevel until sectionTagNames.size)
var nextSibling = sectionElement.get()[0].nextElementSibling
val nestedSectionIds = mutableListOf<String>()
while (nextSibling != null) {
if (otherSectionTagNames.indexOf(nextSibling.tagName) != -1)
break
if (nestedSectionTagNames.indexOf(nextSibling.tagName) != -1) {
nestedSectionIds.add(nextSibling.getAttribute("id")!!)
}
nextSibling = nextSibling.nextElementSibling
}
return nestedSectionIds
}
fun parseTestFiles(
specSectionTestSet: SpecSection,
currentSection: String,
sectionsPath: List<String>,
paragraphsInfo: List<Map<String, Any>>
) {
val pathPrefix = "${sectionsPath.joinToString(SECTION_PATH_SEPARATOR)}$SECTION_PATH_SEPARATOR$currentSection"
SpecCoverageHighlighter.showCoverageOfParagraphs(paragraphsInfo, specSectionTestSet, pathPrefix)
}
fun getParentSectionName(element: JQuery, type: String) = element.prevAll(type).first().attr("id")
fun onSetBranchIconClick() {
val currentBranch = window.localStorage.getItem("spec-tests-branch") ?: GithubTestsLoader.DEFAULT_BRANCH
val newBranch = window.prompt("Specify the Kotlin compiler repo branch from which the spec tests will be taken:", currentBranch)
if (newBranch != null && newBranch != currentBranch) {
window.localStorage.setItem("spec-tests-branch", newBranch)
}
}
fun loadHelperFile(helperName: String, testArea: TestArea): Promise<String> {
return GithubTestsLoader.loadHelperFromRawGithub(
"$helperName.kt",
testArea = testArea
)
}
}
private lateinit var sectionPrevLoaded: String
private var originalSectionName: String? = null
private var numberSectionsLoaded = 0
fun onTestsLoadingLinkClick(link: JQuery) {
loader.loadSectionsMapFiles()
.then { sectionsMapsByTestArea ->
loadTests(link, sectionsMapsByTestArea)
}
}
private fun loadTests(link: JQuery, sectionsMapsByTestArea: Map<TestArea, TestsLoadingInfo.Sections>) {
val section = link.parent("h2, h3, h4, h5")
val paragraphsInfo = getParagraphsInfo(section)
val nestedSections = getNestedSections(section)
val sectionToLoadName = section.attr("id")
val sectionsPath: MutableList<String> = mutableListOf()
val mainSectionsPath = getParentSectionName(section, "h2")
if (originalSectionName == null) {
originalSectionName = sectionToLoadName
numberSectionsLoaded = 1
}
link.html(LOADING_ICON_HTML)
if (link.data("type") == "h4" || link.data("type") == "h5") {
sectionsPath.add(getParentSectionName(section, "h3"))
}
if (link.data("type") == "h5") {
sectionsPath.add(getParentSectionName(section, "h4"))
}
loader.loadTestFiles(
sectionToLoadName = sectionToLoadName,
mainSectionPath = mainSectionsPath,
sectionsPath = sectionsPath,
sectionsMapsByTestArea = sectionsMapsByTestArea)
.then { sectionTestSet ->
if (paragraphsInfo != null)
parseTestFiles(sectionTestSet, sectionToLoadName, sectionsPath, paragraphsInfo)
link.html(getButtonToLoadTests(link, true))
if (originalSectionName == sectionToLoadName) {
section.nextAll(".paragraph.with-tests").first().get()[0].scrollIntoView()
originalSectionName = null
sectionPrevLoaded = sectionToLoadName
}
}.catch {
numberSectionsLoaded--
if (originalSectionName == sectionToLoadName) {
originalSectionName = null
sectionPrevLoaded = sectionToLoadName
}
if (numberSectionsLoaded == 0) {
window.alert(notLoadedTestsText.format(sectionPrevLoaded))
}
link.html(getButtonToLoadTests(link, true))
}
nestedSections.forEach { sectionId ->
numberSectionsLoaded++
loadTests(`$`("#${sectionId.replace(".", """\\.""")} .load-tests").click(), sectionsMapsByTestArea)
}
}
}
<|start_filename|>docs/src/main/kotlin/org/jetbrains/kotlin/spec/markSentencesFilter.kt<|end_filename|>
package org.jetbrains.kotlin.spec
import ru.spbstu.pandoc.*
private const val SENTENCE_CLASS = "sentence"
private const val PARAGRAPH_CLASS = "paragraph"
private fun <T> Iterable<T>.breakBy(f : (T) -> Boolean): Pair<MutableList<T>, MutableList<T>> {
val it = iterator()
val before: MutableList<T> = mutableListOf()
val after: MutableList<T> = mutableListOf()
for(t in it) {
if(f(t)) {
after += t
break
}
before += t
}
after.addAll(it.asSequence())
return before to after
}
private fun <T> MutableList<T>.unconsed() = first() to subList(1, lastIndex + 1)
private val stopList = setOf(
"e.g.", "i.e.", "w.r.t.", "ca.",
"cca.", "etc.", "f.", "ff.",
"i.a.", "Ph.D.", "Q.E.D.", "vs."
) // et.al. is commonly at the end of sentence => not here
private fun breakSentence(inlines: List<Inline>): Pair<List<Inline>, List<Inline>> {
fun isEndLine(i: Inline): Boolean = when {
i is Inline.Str
&& (i.text.endsWith(".") || i.text.endsWith("?") || i.text.endsWith("!"))
&& i.text !in stopList ->
true
i is Inline.LineBreak -> true
else -> false
}
val (ac, bc) = inlines.breakBy(::isEndLine)
if(bc.isEmpty()) return ac to emptyList()
if(bc.size == 1) return inlines to emptyList()
val (h, t_) = bc.unconsed()
val (h2, tail) = t_.unconsed()
when {
h2 is Inline.Space
|| h2 is Inline.SoftBreak
|| (h2 is Inline.Span && SENTENCE_CLASS in h2.attr.classes)
|| (h == Inline.Str(".")) && h2 is Inline.Str && h2.text.startsWith(")") -> {
ac += h
ac += h2
return ac to tail
}
(h is Inline.Str) && h.text.startsWith(".)")
|| (h is Inline.LineBreak) && h2 is Inline.Str && h2.text.startsWith(".") -> {
ac += h
tail.add(0, h2)
return ac to tail
}
else -> {
tail.add(0, h2)
val (dc, ec) = breakSentence(tail)
return (ac + h + dc) to ec
}
}
}
private fun splitSentences(inlines: List<Inline>): MutableList<List<Inline>> {
if(inlines.isEmpty()) return mutableListOf()
var (sent, rest) = breakSentence(inlines)
val res = mutableListOf(sent)
while(rest.isNotEmpty()) {
val (sent_, rest_) = breakSentence(rest)
rest = rest_
res += sent_
}
return res
}
private fun process(inlines: List<Inline>): List<Inline> =
splitSentences(inlines).map { Inline.Span(Attr(classes = listOf(SENTENCE_CLASS)), it) }
object SpecSentencesFilterVisitor : PandocVisitor() {
override fun visit(b: Block.Para): Block {
return Block.Div(
Attr(classes = listOf(PARAGRAPH_CLASS)),
listOf(b.copy(inlines = process(b.inlines)))
)
}
override fun visit(b: Block.Plain): Block {
return b.copy(inlines = process(b.inlines))
}
override fun visit(b: Block.Div): Block {
if(PARAGRAPH_CLASS in b.attr.classes) return b;
return super.visit(b)
}
override fun visit(i: Inline.Span): Inline {
if(SENTENCE_CLASS in i.attr.classes) return i
return super.visit(i)
}
}
fun main() = makeFilter(SpecSentencesFilterVisitor)
<|start_filename|>docs/src/main/kotlin/org/jetbrains/kotlin/spec/compoundFilter.kt<|end_filename|>
package org.jetbrains.kotlin.spec
import com.fasterxml.jackson.module.kotlin.readValue
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.arguments.convert
import com.github.ajalt.clikt.parameters.options.convert
import com.github.ajalt.clikt.parameters.options.defaultLazy
import com.github.ajalt.clikt.parameters.options.flag
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.file
import ru.spbstu.pandoc.Format
import ru.spbstu.pandoc.Pandoc
import ru.spbstu.pandoc.PandocVisitor
import ru.spbstu.pandoc.jackson.constructObjectMapper
import ru.spbstu.pandoc.makeFilter
import java.io.File
operator fun PandocVisitor.plus(otherVisitor: PandocVisitor) = object : PandocVisitor() {
override fun visit(doc: Pandoc): Pandoc {
return otherVisitor.visit(this@plus.visit(doc))
}
}
private object CompoundFilter : CliktCommand() {
val format: Format by argument("Pandoc output format").convert { Format(it) }
val split: Boolean by option().flag("--no-split")
val disableTODOS: Boolean by option().flag("--enable-todos")
val imageDirectory: File? by option().file(fileOkay = false)
val embed: Boolean by option().flag()
val defaultFormat: String? by option()
val disableStaticMath by option().flag("--enable-static-math")
val katex by option()
val outputDirectory: File by option().file(fileOkay = false).defaultLazy { File(".") }
val generateTOC: Boolean by option().flag("--no-toc")
override fun run() {
outputDirectory.mkdirs()
imageDirectory?.mkdirs()
var visitor = listOf(
SpecTodoFilterVisitor(format, disableTODOS),
SpecSentencesFilterVisitor,
SpecCopyPasteFilterVisitor,
SpecInlineDiagramFilterVisitor(defaultFormat, format, imageDirectory, embed),
MathInCode,
BrokenReferencesReporter
).reduce { a, b -> a + b }
if (!disableStaticMath && format.isHTML())
visitor += InlineKatex(katex)
val om = constructObjectMapper()
val ii = om.readValue<Pandoc>(System.`in`)
if (split) {
visitor += Splitter(outputDirectory, format.format, generateTOC)
visitor.visit(ii)
} else {
om.writeValue(System.out, visitor.visit(ii))
}
}
}
fun main(args: Array<String>) = CompoundFilter.main(args)
<|start_filename|>web/src/main/kotlin/org/jetbrains/kotlin/spec/viewer/links/SentenceFinder.kt<|end_filename|>
package org.jetbrains.kotlin.spec.viewer.links
import js.externals.jquery.`$`
import org.jetbrains.kotlin.spec.viewer.links.SpecPlaceHighlighter.getSentenceInfoFromSearchField
import org.jetbrains.kotlin.spec.viewer.links.SpecPlaceHighlighter.highlightParagraph
import org.jetbrains.kotlin.spec.viewer.links.SpecPlaceHighlighter.highlightSentence
object SentenceFinder {
const val FINDER_BAR_HTML = """<div class="sentence-finder-bar">
<div class="spec-location-search">
<input type="text" name="spec-sentence-location" />
<button class="spec-sentence-find">Find sentence</button>
<div class="sentence-finder-settings">
<a href="#" class="{1}-markup-link">{2} markup</a>
<a href="#" class="disable-dev-mode">Disable dev mode</a>
</div>
</div>
<div class="spec-location-format">Format: sections-hierarchy{,} -> paragraph {n} -> sentence {m}</div>
</div>"""
fun findSentence() =
findSentence(`$`(".spec-location-search input[name=\"spec-sentence-location\"]").`val`().toString())
private fun findSentence(place: String) {
val specPlaceComponents = getSentenceInfoFromSearchField(place.trimEnd())
if (specPlaceComponents.sentenceNumber != null) {
highlightSentence(specPlaceComponents)
} else {
highlightParagraph(specPlaceComponents)
}
}
}
<|start_filename|>web/src/main/kotlin/org/jetbrains/kotlin/spec/utils/helpers.kt<|end_filename|>
package org.jetbrains.kotlin.spec.utils
import org.w3c.dom.Location
import org.w3c.dom.get
import kotlinx.browser.window
fun String.format(vararg args: Any): String {
return this.replace(Regex("""\{(\d+)}""", RegexOption.MULTILINE)) {
val number = it.groupValues[1].toInt()
if (args.size >= number) args[number - 1].toString() else ""
}
}
fun String.escapeHtml(): String {
return this.replace(Regex("&", RegexOption.MULTILINE), "&")
.replace(Regex("<", RegexOption.MULTILINE), "<")
.replace(Regex(">"), ">")
.replace(Regex("\"", RegexOption.MULTILINE), """)
.replace(Regex("'", RegexOption.MULTILINE), "'")
}
val Location.searchMap: MutableMap<String, String>
get() {
val rawSearch = search.substring(1).split("&")
val objURL = mutableMapOf<String, String>()
rawSearch.forEach { param ->
val paramComponents = param.split("=")
if (paramComponents.size != 2) return@forEach
objURL[paramComponents[0]] = paramComponents[1]
}
return objURL
}
val isDevMode = window.localStorage["isDevMode"] != null || window.location.searchMap["mode"] == "dev"
val shouldBeShowedMarkup = window.localStorage["showMarkup"] != null
val sentenceToBeHighlighted = window.location.searchMap["sentence"]
val paragraphToBeHighlighted = window.location.searchMap["paragraph"]
val sectionToBeHighlighted = window.location.hash.takeIf { it.isNotEmpty() }?.substring(1)
<|start_filename|>docs/pdfSections/build.gradle.kts<|end_filename|>
import java.nio.file.Paths
val pdfBuildDir = "${project.parent?.buildDir}/spec/pdf"
val scriptsDir = "${project.parent?.projectDir}/scripts/build"
tasks.create<Exec>("build") {
group = "internal"
inputs.dir("${project.parent?.projectDir}/src/md/kotlin.core")
dependsOn(":docs:prepareShell")
dependsOn(":docs:convertGrammar")
dependsOn(":docs:filtersJar")
environment["PROJECT_DIR"] = project.parent?.projectDir!!
doFirst {
workingDir = File(scriptsDir)
commandLine = listOf("bash", "$scriptsDir/buildPdfBySections.sh")
Paths.get("$pdfBuildDir/sections").toFile().apply { deleteRecursively(); mkdirs() }
}
}
<|start_filename|>grammar/src/test/org/jetbrains/kotlin/spec/grammar/util/TestUtil.kt<|end_filename|>
package org.jetbrains.kotlin.spec.grammar.util
import com.intellij.openapi.util.io.FileUtil
import com.intellij.openapi.util.text.StringUtil
import com.intellij.openapi.vfs.CharsetToolkit
import com.intellij.rt.execution.junit.FileComparisonFailure
import org.apache.commons.codec.digest.DigestUtils
import org.junit.Assume.assumeTrue
import java.io.File
import java.nio.file.Files
data class TestDataFileHeader(
val marker: String?,
val hash: String
)
sealed class TestData(
val sourceCode: String,
val sourceCodeHash: String,
val antlrParseTreeText: File
)
class PsiTestData(
sourceCode: String,
sourceCodeHash: String,
antlrParseTreeText: File,
val psiParseTreeText: String
) : TestData(sourceCode, sourceCodeHash, antlrParseTreeText)
class DiagnosticTestData(
sourceCode: String,
sourceCodeHash: String,
antlrParseTreeText: File
) : TestData(sourceCode, sourceCodeHash, antlrParseTreeText)
object TestUtil {
private const val TESTS_DIR = "./testData"
val ls: String = System.lineSeparator()
private fun String.trimTrailingWhitespacesAndAddNewlineAtEOF() =
split(ls).joinToString(ls, transform = String::trimEnd).let { result ->
if (result.endsWith(ls)) result else result + ls
}
fun assertEqualsToFile(message: String, expectedFile: File, actual: String,
forceApplyChanges: Boolean,
dumpErroneousData: Boolean) {
val actualText = StringUtil.convertLineSeparators(actual.trim { it <= ' ' }).trimTrailingWhitespacesAndAddNewlineAtEOF()
if (!expectedFile.exists()) {
FileUtil.writeToFile(expectedFile, actualText)
println("Expected data file did not exist. Generating: $expectedFile")
assumeTrue(false)
}
val expected = FileUtil.loadFile(expectedFile, CharsetToolkit.UTF8, true)
val expectedText = StringUtil.convertLineSeparators(expected.trim { it <= ' ' }).trimTrailingWhitespacesAndAddNewlineAtEOF()
if (expectedText != actualText) {
if (forceApplyChanges) {
FileUtil.writeToFile(expectedFile, actualText)
println("Changes are applied forcibly for $expectedFile")
assumeTrue(false)
} else {
if (dumpErroneousData) {
val dumpFile = File(expectedFile.parent, expectedFile.name + ".actual")
FileUtil.writeToFile(dumpFile, actualText)
println("Actual file dumped for $expectedFile")
}
throw FileComparisonFailure(message + ": " + expectedFile.name, expected, actual, expectedFile.absolutePath)
}
}
}
fun getTestData(testFile: File): TestData {
val relativePath = "${testFile.parent}/${testFile.nameWithoutExtension}"
val testPathPrefix = "$TESTS_DIR/$relativePath"
val sourceCodeFile = File("$testPathPrefix.kt")
val sourceCode = sourceCodeFile.readText()
val antlrTreeFile = File("$testPathPrefix.antlrtree.txt")
val isPsiTest = File("$testPathPrefix.txt").exists()
val fileContentHash = Files.newInputStream(sourceCodeFile.toPath()).use { DigestUtils.md5Hex(it) }
println("Source code path: ${sourceCodeFile.absolutePath}")
println("ANTLR tree path: ${antlrTreeFile.absolutePath}")
return if (isPsiTest)
PsiTestData(sourceCode, fileContentHash, antlrTreeFile, File("$testPathPrefix.txt").readText())
else
DiagnosticTestData(sourceCode, fileContentHash, antlrTreeFile)
}
}
<|start_filename|>docs/src/main/kotlin/org/jetbrains/kotlin/spec/convertGrammar.kt<|end_filename|>
package org.jetbrains.kotlin.spec
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.options.required
import org.antlr.v4.Tool
import org.antlr.v4.parse.ANTLRParser
import org.antlr.v4.tool.Grammar
import org.antlr.v4.tool.LexerGrammar
import org.antlr.v4.tool.ast.*
import ru.spbstu.grammarConverter.parseRules
import ru.spbstu.grammarConverter.toMarkdown
import java.io.File
import java.util.*
fun inlineGrammar(grammarDir: String, lexerGrammar: String, parserGrammar: String): String {
val tool = Tool().apply { libDirectory = grammarDir }
val lexerGrammarText = File("$grammarDir/$lexerGrammar").readText()
val parserGrammarSourceLines = File("$grammarDir/$parserGrammar").readLines()
val lexer = LexerGrammar(tool, tool.parseGrammarFromString(lexerGrammarText)).apply {
fileName = lexerGrammar
tool.process(this, false)
}
val parser = Grammar(tool, tool.parseGrammarFromString(parserGrammarSourceLines.joinToString(System.lineSeparator()))).apply {
fileName = parserGrammar
tool.process(this, false)
}
val visitor = GrammarVisitor(lexer, parserGrammarSourceLines).also { parser.ast.visit(it) }
return visitor.processedParserGrammarSourceLines.joinToString(System.lineSeparator())
}
private fun convertGrammar(outputMarkdownFilePath: String, grammarDir: String, lexerGrammar: String, parserGrammar: String) {
val parserGrammarWithInlinedLexerRules = inlineGrammar(grammarDir, lexerGrammar, parserGrammar)
val outputMarkdownFile = File(outputMarkdownFilePath).outputStream()
outputMarkdownFile.bufferedWriter().use { writer ->
val rules = parseRules(parserGrammarWithInlinedLexerRules.byteInputStream())
writer.append(rules.toMarkdown(true)).appendLine()
}
}
private object Driver : CliktCommand() {
val outputFilePath by option("-o", "--output", help="path to output file in markdown format (.md)").required()
val grammarDir by option("-d", "--grammar_dir", help="path to dir with lexer and parser grammars").required()
val lexerGrammar by option("-l", "--lexer_grammar", help="lexer grammar filename").required()
val parserGrammar by option("-p", "--parser_grammar", help="parser grammar filename").required()
override fun run() = convertGrammar(outputFilePath, grammarDir, lexerGrammar, parserGrammar)
}
class GrammarVisitor(private val lexer: LexerGrammar, parserGrammarSourceLines: List<String>): GrammarASTVisitor {
val processedParserGrammarSourceLines: MutableList<String> = parserGrammarSourceLines.toMutableList()
private val offsets = mutableMapOf<Int, TreeMap<Int, Int>>()
override fun visit(node: GrammarAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: GrammarRootAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: RuleAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: BlockAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: OptionalBlockAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: PlusBlockAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: StarBlockAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: AltAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: NotAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: PredAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: RangeAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: SetAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: RuleRefAST) { if (node.children != null) { node.childrenAsArray.map { it.visit(this) } } }
override fun visit(node: TerminalAST) {
val lexerRule = lexer.rules[node.token.text]
if (lexerRule == null || !isSimpleLexerRule(lexerRule.ast.childrenAsArray[1]) || node.token.text in excludedNodes)
return
val tokenValue = lexerRule.tokenRefs.singleOrNull() ?: return
val tokenNameLength = node.token.text.length
val startPositionInLine = node.charPositionInLine
val endPositionInLine = startPositionInLine + tokenNameLength
val lineIndex = node.line - 1
val offsetForLine = offsets[lineIndex]
val offset = offsetForLine?.subMap(0, startPositionInLine)?.values?.sum() ?: 0
processedParserGrammarSourceLines[lineIndex] = processedParserGrammarSourceLines[lineIndex].replaceRange(
range = (startPositionInLine - offset) until (endPositionInLine - offset),
replacement = tokenValue
)
if (lineIndex !in offsets) {
offsets[lineIndex] = TreeMap()
}
val offsetsTree = offsets[lineIndex]
if (offsetsTree != null) {
offsetsTree[startPositionInLine] = node.token.text.length - tokenValue.length
}
}
private fun isSimpleLexerRule(rule: GrammarAST): Boolean {
if (rule.children != null && rule.childrenAsArray.count { supportedNodes.contains(it.type) } > 1)
return false
if (rule.children == null || rule.children.size == 0)
return rule.type == ANTLRParser.STRING_LITERAL
return isSimpleLexerRule(rule.childrenAsArray[0])
}
companion object {
private val supportedNodes = setOf(
ANTLRParser.TOKEN_REF, ANTLRParser.LEXER_CHAR_SET, ANTLRParser.RULE_REF, ANTLRParser.BLOCK,
ANTLRParser.ALT, ANTLRParser.SET, ANTLRParser.RULE, ANTLRParser.STRING_LITERAL, ANTLRParser.RULEMODIFIERS,
ANTLRParser.POSITIVE_CLOSURE, ANTLRParser.CLOSURE, ANTLRParser.OPTIONAL
)
private val excludedNodes = setOf(
"AT_NO_WS", "AT_PRE_WS", "AT_POST_WS", "QUEST_NO_WS", "QUEST_WS", "NL", "EOF"
)
}
}
fun main(args: Array<String>) = Driver.main(args)
<|start_filename|>web/build.gradle.kts<|end_filename|>
import org.jetbrains.kotlin.gradle.targets.js.webpack.KotlinWebpackConfig.Mode
import org.jetbrains.kotlin.gradle.targets.js.webpack.KotlinWebpackOutput
plugins {
kotlin("js")
}
group = "org.jetbrains.kotlin.spec"
version = "0.1"
val buildMode = findProperty("mode")?.toString() ?: "production" // production | development
repositories {
mavenCentral()
jcenter()
}
tasks.create<Copy>("copyKatex") {
dependsOn(":kotlinNpmInstall")
group = "internal"
from("$rootDir/build/js/node_modules/katex/dist")
into("$buildDir/js/katex".also { File(it).mkdirs() })
}
kotlin {
js {
moduleName = "main"
compilations.all {
packageJson {
dependencies["jquery"] = "2.2.4"
}
kotlinOptions {
moduleKind = "amd"
}
}
browser {
webpackTask {
dependsOn("copyKatex")
output.apply {
libraryTarget = KotlinWebpackOutput.Target.AMD_REQUIRE
library = "main"
}
destinationDirectory = file("${buildDir}/js")
outputFileName = "main.js"
mode = Mode.valueOf(buildMode.toUpperCase())
sourceMaps = (mode == Mode.DEVELOPMENT)
}
}
}
sourceSets {
main {
dependencies {
compileOnly("kotlin.js.externals:kotlin-js-jquery:2.0.0-0")
implementation(npm("katex", "0.11.1"))
implementation(npm("jquery", "2.2.4"))
implementation(npm("kotlin-playground", "1.24.2"))
implementation("org.jetbrains.kotlinx:kotlinx-serialization-runtime-js:0.12.0")
}
}
}
}
<|start_filename|>docs/pdf/build.gradle.kts<|end_filename|>
val pdfBuildDir = "${project.parent?.buildDir}/spec/pdf"
val scriptsDir = "${project.parent?.projectDir}/scripts/build"
tasks.create<Exec>("build") {
group = "internal"
inputs.dir("${project.parent?.projectDir}/src/md/kotlin.core")
outputs.files("${pdfBuildDir}/kotlin-spec.pdf")
dependsOn(":docs:prepareShell")
dependsOn(":docs:convertGrammar")
dependsOn(":docs:filtersJar")
environment["PROJECT_DIR"] = project.parent?.projectDir!!
doFirst {
workingDir = File(scriptsDir)
commandLine = listOf("bash", "$scriptsDir/buildPdf.sh")
File(pdfBuildDir).mkdirs()
}
}
<|start_filename|>web/src/main/kotlin/org/jetbrains/kotlin/spec/loader/LoaderByTestsMapFile.kt<|end_filename|>
package org.jetbrains.kotlin.spec.loader
import org.jetbrains.kotlin.spec.entity.SpecSection
import org.jetbrains.kotlin.spec.entity.TestsLoadingInfo
import org.jetbrains.kotlin.spec.entity.test.SpecTest
import org.jetbrains.kotlin.spec.entity.test.TestPlace
import org.jetbrains.kotlin.spec.entity.test.parameters.TestInfo
import org.jetbrains.kotlin.spec.entity.test.parameters.TestType
import org.jetbrains.kotlin.spec.entity.test.parameters.testArea.TestArea
import org.jetbrains.kotlin.spec.loader.GithubTestsLoader.Companion.loadSectionsMapFileFromRawGithub
import org.jetbrains.kotlin.spec.loader.GithubTestsLoader.Companion.loadTestFileFromRawGithub
import org.jetbrains.kotlin.spec.loader.GithubTestsLoader.Companion.loadTestMapFileFromRawGithub
import org.jetbrains.kotlin.spec.loader.GithubTestsLoader.Companion.testAreasToLoad
import kotlin.js.Promise
class LoaderByTestsMapFile : GithubTestsLoader {
private fun loadTestsMapFile(mainSectionName: String, sectionsPath: String, sectionsMapByTestArea: Map<TestArea, TestsLoadingInfo.Sections>
): Promise<Map<TestArea, TestsLoadingInfo.Tests>> {
return loadTestMapFileFromRawGithub(
mainSectionName = mainSectionName,
path = sectionsPath,
testType = GithubTestsLoader.TestOrigin.SPEC_TEST,
sectionsMapByTestArea = sectionsMapByTestArea
)
}
private fun loadSectionsMapFile() = loadSectionsMapFileFromRawGithub()
private fun getPromisesForTestFilesFromTestMap(testsMap_: TestsLoadingInfo.Tests?, testArea: TestArea): Array<Promise<SpecTest>> {
val promises = mutableListOf<Promise<SpecTest>>()
val testsMap = testsMap_?.json ?: return promises.toTypedArray()
for ((paragraph, testsByParagraphs) in testsMap.jsonObject) {
for ((testType, testsByTypes) in testsByParagraphs.jsonObject) {
for ((testSentence, testsBySentences) in testsByTypes.jsonObject) {
testsBySentences.jsonArray.forEachIndexed { i, testInfo ->
val testFilePath = testInfo.jsonObject["path"]?.primitive?.content ?: return@forEachIndexed
val testElementInfo = TestInfo(testInfo.jsonObject, i + 1)
val testPath = TestPlace(paragraph.toInt(), TestType.getByShortName(testType), testSentence.toInt())
promises.add(loadTestFileFromRawGithub(testFilePath, testElementInfo, testPath, testArea))
}
}
}
}
return promises.toTypedArray()
}
override fun loadTestFiles(sectionToLoadName: String,
mainSectionPath: String,
sectionsPath: List<String>,
sectionsMapsByTestArea: Map<TestArea, TestsLoadingInfo.Sections>
) = loadTestsMapFile(mainSectionName = mainSectionPath,
sectionsPath = when {
mainSectionPath == sectionToLoadName && sectionsPath.isEmpty() -> "";
sectionsPath.isNotEmpty() -> sectionsPath.joinToString("/") + "/" + sectionToLoadName;
else -> sectionToLoadName
},
sectionsMapByTestArea = sectionsMapsByTestArea)
.then { testsMapsByTestArea ->
val resultMap = mutableMapOf<TestArea, List<SpecTest>>()
Promise.all(testAreasToLoad.asList()
.associateWith { getPromiseForTests(it, testsMapsByTestArea, resultMap) }
.values.toTypedArray()
).then { SpecSection(resultMap) }
}
fun loadSectionsMapFiles() = loadSectionsMapFile()
private fun getPromiseForTests(
testArea: TestArea,
testMaps: Map<TestArea, TestsLoadingInfo.Tests>,
mapOfTests: MutableMap<TestArea, List<SpecTest>>
) = Promise.all(
getPromisesForTestFilesFromTestMap(testMaps[testArea], testArea))
.then { mapOfTests[testArea] = it.toList() }
}
<|start_filename|>settings.gradle.kts<|end_filename|>
rootProject.name = "kotlin-spec"
pluginManagement {
repositories {
gradlePluginPortal()
mavenCentral()
}
resolutionStrategy {
eachPlugin {
when (requested.id.id) {
"at.phatbl.shellexec" -> useModule("gradle.plugin.at.phatbl:shellexec:${requested.version}")
}
}
}
}
val withGrammarProject: String? by settings
include("docs")
include("docs:pdf")
include("docs:pdfSections")
include("docs:html")
include("docs:htmlSections")
include("web")
if (withGrammarProject?.toBoolean() != false) {
include("grammar")
}
<|start_filename|>web/src/main/kotlin/org/jetbrains/kotlin/spec/entity/TestsLoadingInfo.kt<|end_filename|>
package org.jetbrains.kotlin.spec.entity
import kotlinx.serialization.json.JsonElement
sealed class TestsLoadingInfo() {
class Tests(val json: JsonElement)
class Sections(val json: JsonElement)
}
<|start_filename|>web/src/main/kotlin/org/jetbrains/kotlin/spec/viewer/Header.kt<|end_filename|>
package org.jetbrains.kotlin.spec.viewer
import js.externals.jquery.`$`
import org.jetbrains.kotlin.spec.utils.format
import org.jetbrains.kotlin.spec.utils.isDevMode
import org.jetbrains.kotlin.spec.utils.shouldBeShowedMarkup
import org.jetbrains.kotlin.spec.viewer.links.SentenceFinder
object Header {
private const val HEADER_BAR = ".header-bar";
fun init() {
if (isDevMode)
`$`(SentenceFinder.FINDER_BAR_HTML.format(*(if (shouldBeShowedMarkup) arrayOf("hide", "Hide") else arrayOf("show", "Show")))).appendTo(HEADER_BAR)
}
}
<|start_filename|>grammar/src/test/org/jetbrains/kotlin/spec/grammar/parsetree/ComparisonTree.kt<|end_filename|>
package org.jetbrains.kotlin.spec.grammar.parsetree
import com.intellij.openapi.util.io.FileUtil
import java.io.File
import java.util.*
data class ComparisonTree(
val text: String,
val children: MutableList<ComparisonTree> = mutableListOf()
) {
fun dump(indent: Int = 0, sb: Appendable = StringBuilder()) {
sb.append(" ".repeat(indent)).appendln(text)
for (child in children) child.dump(indent + 2, sb)
}
fun dumpChildren(sb: Appendable = StringBuilder()) {
for (child in children) child.dump(0, sb)
}
fun firstDifference(that: ComparisonTree): Pair<ComparisonTree, ComparisonTree>? {
if (text != that.text || children.size != that.children.size) return this to that
children.zip(that.children) { here, there ->
val diff = here.firstDifference(there)
if (diff != null) return diff
}
return null
}
fun allDifferences(that: ComparisonTree, collector: MutableList<Pair<ComparisonTree, ComparisonTree>> = mutableListOf()): List<Pair<ComparisonTree, ComparisonTree>> {
if (text != that.text || children.size != that.children.size) collector.add(this to that)
else children.zip(that.children) { here, there ->
here.allDifferences(there, collector)
}
return collector
}
companion object {
fun parse(s: List<String>): ComparisonTree {
data class StackElement(val tree: ComparisonTree, val depth: Int)
val root = ComparisonTree("<root>")
val stack = Stack<StackElement>()
stack.push(StackElement(root, -1))
for (line in s) {
val depth = line.takeWhile { it.isWhitespace() }.length
while(depth <= stack.peek().depth) {
stack.pop()
}
val newTree = ComparisonTree(line.trim())
stack.peek().tree.children.add(newTree)
stack.push(StackElement(newTree, depth))
}
return root
}
fun parse(f: File) = parse(f.readLines())
}
}
fun isDiffCompatible_28_1_21(left: ComparisonTree, right: ComparisonTree): Boolean {
if (left.text == "postfixUnaryExpression" &&
left.children.firstOrNull()?.children?.firstOrNull()?.text == "callableReference")
return true
if (left.text == "genericCallLikeComparison" &&
left.children.getOrNull(1)?.text == "callSuffix")
return true
if (left.text == "parameterWithOptionalType" && right.text == "functionValueParameterWithOptionalType")
return true
return false
}
fun regression_28_1_21() {
File("/home/belyaev/kotlin-spec/grammar/testData")
.walkTopDown()
.filter { it.name.endsWith(".actual") }
.forEach { file ->
println("Processing: $file")
val original = File(file.parent, file.name.removeSuffix(".actual"))
check(original.exists())
println("Original: $original")
val source = File(file.parent, original.name.removeSuffix(".antlrtree.txt") + ".kt")
println("Source: $source")
val ct1 = ComparisonTree.parse(original)
val ct2 = ComparisonTree.parse(file)
val diffs = ct1.allDifferences(ct2)
print("Diffs compatible?")
val diffsCompatible = diffs.all { (l, r) -> isDiffCompatible_28_1_21(l, r) }
println(diffsCompatible)
if (diffsCompatible) {
FileUtil.writeToFile(original, file.readBytes())
}
}
}
fun isDiffCompatible_22_3_21(left: ComparisonTree, right: ComparisonTree): Boolean {
if (left.text == """Identifier("value")""" && right.text == """VALUE("value")""") return true
return false
}
fun main() {
File("/home/belyaev/kotlin-spec/grammar/testData")
.walkTopDown()
.filter { it.name.endsWith(".actual") }
.forEach { file ->
println("Processing: $file")
val original = File(file.parent, file.name.removeSuffix(".actual"))
check(original.exists())
println("Original: $original")
val source = File(file.parent, original.name.removeSuffix(".antlrtree.txt") + ".kt")
println("Source: $source")
val ct1 = ComparisonTree.parse(original)
val ct2 = ComparisonTree.parse(file)
val diffs = ct1.allDifferences(ct2)
print("Diffs compatible?")
val diffsCompatible = diffs.all { (l, r) -> isDiffCompatible_22_3_21(l, r) }
println(diffsCompatible)
if (diffsCompatible) {
FileUtil.writeToFile(original, file.readBytes())
}
}
}
<|start_filename|>docs/html/build.gradle.kts<|end_filename|>
val htmlBuildDir = "${project.parent?.buildDir}/spec/html"
val scriptsDir = "${project.parent?.projectDir}/scripts/build"
tasks.create<Exec>("build") {
group = "internal"
inputs.dir("${project.parent?.projectDir}/src/md/kotlin.core")
dependsOn(":docs:prepareShell")
dependsOn(":docs:convertGrammar")
dependsOn(":docs:filtersJar")
environment["PROJECT_DIR"] = project.parent?.projectDir!!
doFirst {
workingDir = File(scriptsDir)
commandLine = listOf("bash", "$scriptsDir/buildHtml.sh")
File(htmlBuildDir).mkdirs()
}
}
<|start_filename|>docs/src/main/kotlin/org/jetbrains/kotlin/spec/mathInCodeFilter.kt<|end_filename|>
package org.jetbrains.kotlin.spec
import ru.spbstu.pandoc.*
object MathInCode : PandocVisitor() {
override fun visit(b: Block.CodeBlock): Block {
b.attr.classes.any { it.endsWith("+math") } || return super.visit(b)
val newAttr = b.attr.copy(classes = b.attr.classes.map { it.replace("+math", "") })
val codeLines = b.text.lines()
val res = mutableListOf<Inline>()
for(line in codeLines) {
if("$$$" !in line) res += Inline.Code(attr = newAttr, text = line)
else {
for(chunk in line.split("$$$").chunked(2)) {
res += Inline.Code(attr = newAttr, text = chunk[0])
if(chunk.size > 1) {
res += Inline.Math(MathType.InlineMath, text = chunk[1].trimEnd().removeSuffix("$$$"))
}
}
}
res += Inline.LineBreak
}
return Block.Plain(res)
}
}
fun main() = makeFilter(MathInCode)
<|start_filename|>web/src/main/kotlin/org/jetbrains/kotlin/spec/viewer/Sidebar.kt<|end_filename|>
package org.jetbrains.kotlin.spec.viewer
import js.externals.jquery.JQuery
import js.externals.jquery.`$`
import org.w3c.dom.HTMLInputElement
import org.w3c.dom.events.KeyboardEvent
import kotlinx.browser.document
import kotlinx.browser.window
object Sidebar {
private const val LOAD_PDF_ICON = "./resources/images/pdf.png"
private const val TOC = "#TOC"
private const val TOC_BAR =".toc-bar"
private const val TOC_BODY =".toc-body"
private const val OFFSET_BEFORE_SCROLLED_ELEMENT = 100
private fun expandItemsHierarchy(sectionMenuItem: JQuery) {
if (sectionMenuItem.length == 0) return
// Clean previously set classes on another elements
`$`(".toc-element").removeClass("active")
sectionMenuItem.addClass("active").addClass("toggled")
sectionMenuItem.next("ul").find("> li").show()
sectionMenuItem.parents("$TOC ul").find("> li").show()
sectionMenuItem.parents("li").find(" > a").addClass("toggled")
}
private fun expandItemsHierarchy(sectionId: String) {
val escapedSectionId = sectionId.replace(".", "\\.")
val sectionMenuItem = `$`("#toc-element-$escapedSectionId")
expandItemsHierarchy(sectionMenuItem)
}
private fun expandItemsHierarchyByUrl(shouldScrollToItem: Boolean = true) {
val sectionIdFromHash = window.location.hash.removePrefix("#")
val sectionIdFromPath = window.location.pathname.split("/").last().removeSuffix(".html")
expandItemsHierarchy(if (sectionIdFromHash.isNotBlank()) sectionIdFromHash else sectionIdFromPath)
if (shouldScrollToItem) {
scrollToActiveItem()
}
}
private fun scrollToActiveItem() {
if (`$`("$TOC_BAR .active").length != 0) {
val tocSelector = `$`(TOC_BODY)
tocSelector.scrollTop(
tocSelector.scrollTop().toInt() - tocSelector.offset().top.toInt() + `$`("$TOC_BAR .active").offset().top.toInt() - OFFSET_BEFORE_SCROLLED_ELEMENT
)
}
}
private fun toggleItem(element: JQuery) {
element.find("> li").toggle()
element.parent("li").find("> a").toggleClass("toggled")
}
private fun toggleSidebar() {
val `$html` = `$`("html")
if (`$html`.hasClass("with-toggled-toc-if-needed")) {
`$html`.removeClass("with-toggled-toc-if-needed")
toggleSidebar()
}
`$`(TOC_BAR).toggleClass("active")
`$html`.toggleClass("with-toggled-toc")
`$`(".icon-menu").toggleClass("active")
}
private fun hideSideBarIfNeeded() {
`$`("html").addClass("with-toggled-toc-if-needed")
}
private fun setScrollSettings() {
`$`(window).scroll {
val x = (`$`(document).scrollTop().toDouble())
val y = maxOf(-61.0, -x)
`$`(TOC_BAR).css("margin-top", y)
`$`(".toc-body").css("height", "-moz-calc(100% - ${161 + y}px)")
.css("height", "-webkit-calc(100% - ${161 + y}px)")
.css("height", "calc(100% - ${161 + y}px)")
}
}
fun init() {
// wrap generated by pandoc TOC into .toc-bar.toc-body element
`$`(TOC_BODY).removeClass("empty")
`$`(TOC).appendTo(TOC_BODY)
expandItemsHierarchyByUrl(shouldScrollToItem = true)
`$`(window).on("hashchange") { _, _ ->
expandItemsHierarchyByUrl()
hideSideBarIfNeeded()
}
`$`("$TOC > ul > li, #TOC > ul > li > ul > li").show() // show "Kotlin core" subsections by default
`$`(TOC).addClass("loaded")
`$`("$TOC > ul > li ul").on("click") { e, _ ->
e.stopPropagation()
val target = `$`(e.target)
if (e.offsetY.toInt() < 0 && !target.`is`("#TOC > ul > li > ul")) {
toggleItem(`$`(e.target))
}
}
document.body?.let { `$`(it) }?.run {
on("click", ".icon-menu") { _, _ -> toggleSidebar() }
}
installSearchBar()
setScrollSettings()
addPdfLinks()
}
private var currSearchString = ""
private var currResultIdx = 0
private fun installSearchBar() {
val searchBar = `$`("#toc-search-bar")[0] ?: return
searchBar.addEventListener("keyup", callback = cb@{ e ->
// If key isn't "Enter" then return
if ((e as? KeyboardEvent)?.which != 13) return@cb
val searchString = (e.target as? HTMLInputElement)?.value ?: return@cb
if (searchString.isBlank()) return@cb
val foundItem = `$`("$TOC .toc-element:contains($searchString)")
if (foundItem.length == 0) return@cb
if (currSearchString != searchString) {
currSearchString = searchString
currResultIdx = -1
}
currResultIdx += 1
currResultIdx %= foundItem.length.toInt()
val currItem = foundItem.eq(currResultIdx)
expandItemsHierarchy(currItem)
scrollToActiveItem()
})
}
private fun addPdfLinks() {
`$`("$TOC > ul > li > ul > li").each { _, el ->
val sectionName = `$`(el).find("> a").attr("href").substringBefore(".html")
`$`(el).prepend("<a href=\"./pdf/sections/$sectionName.pdf\" target=\"_blank\" class=\"download-section-as-pdf\" title=\"Download section as pdf\"></a>")
}
`$`("h2").each { _, el ->
val sectionName = `$`(el).attr("id")
`$`(el).append("<a href=\"./pdf/sections/$sectionName.pdf\" target=\"_blank\" onclick=\"event.stopPropagation();\" class=\"download-section-as-pdf-text-link\" title=\"Download section as pdf\">"
+ """<img src="${LOAD_PDF_ICON}" />""".trimIndent() + "</a>")
}
}
}
<|start_filename|>web/src/main/kotlin/org/jetbrains/kotlin/spec/main.kt<|end_filename|>
package org.jetbrains.kotlin.spec
import js.externals.jquery.`$`
import org.jetbrains.kotlin.spec.loader.SpecTestsLoader
import org.jetbrains.kotlin.spec.utils.*
import org.jetbrains.kotlin.spec.viewer.Header
import org.jetbrains.kotlin.spec.viewer.NavigationType
import org.jetbrains.kotlin.spec.viewer.Sidebar
import org.jetbrains.kotlin.spec.viewer.SpecTestsViewer
import org.jetbrains.kotlin.spec.viewer.links.SentenceFinder
import org.jetbrains.kotlin.spec.viewer.links.SpecPlaceHighlighter
import org.w3c.dom.set
import kotlinx.browser.document
import kotlinx.browser.window
fun turnOnPermanentDevModeIfNeeded() {
if (window.location.searchMap["mode"] == "dev") {
window.localStorage["isDevMode"] = "true"
}
}
fun init() {
val specTestsLoader = SpecTestsLoader()
val specTestsViewer = SpecTestsViewer()
turnOnPermanentDevModeIfNeeded()
Sidebar.init()
Header.init()
`$`("h2, h3, h4, h5").each { _, el ->
val idValue = `$`(el).attr("id")
if (idValue !in SpecTestsViewer.excludedSectionsToLoadTests) {
SpecTestsLoader.insertLoadIcon(`$`(el))
}
}
if (shouldBeShowedMarkup && isDevMode) {
SpecTestsLoader.showMarkup()
}
if (sentenceToBeHighlighted != null) {
SpecPlaceHighlighter.highlightSentence(sentenceToBeHighlighted)
}
if (paragraphToBeHighlighted != null) {
SpecPlaceHighlighter.highlightParagraph(paragraphToBeHighlighted)
}
if (sectionToBeHighlighted != null) {
SpecPlaceHighlighter.highlightSection(sectionToBeHighlighted)
}
document.body?.let { `$`(it) }?.run {
on("click", ".sentence.covered") { e, _ ->
specTestsViewer.showViewer(`$`(e.currentTarget))
}
on("change", ".test-coverage-view select[name='test-area']") { _, _ ->
specTestsViewer.onTestAreaChange()
}
on("change", ".test-coverage-view select[name='test-type']") { _, _ ->
specTestsViewer.onTestTypeChange()
}
on("change", ".test-coverage-view select[name='test-link-type']") { _, _ ->
specTestsViewer.onTestPriorityChange()
}
on("change", ".test-coverage-view select[name='test-number']") { _, _ ->
specTestsViewer.onTestNumberChange()
}
on("click", ".prev-testcase") { e, _ ->
specTestsViewer.navigateTestCase(`$`(e.currentTarget), NavigationType.PREV)
false
}
on("click", ".next-testcase") { e, _ ->
specTestsViewer.navigateTestCase(`$`(e.currentTarget), NavigationType.NEXT)
false
}
on("click", ".load-tests") { e, _ ->
specTestsLoader.onTestsLoadingLinkClick(`$`(e.currentTarget))
false
}
on("click", ".set-branch") { _, _ ->
SpecTestsLoader.onSetBranchIconClick()
false
}
on("click", ".number-info[data-path]") { e, _ ->
SpecPlaceHighlighter.onSentenceGetLinkClick(`$`(e.currentTarget))
e.stopPropagation()
false
}
on("click", ".paragraph-link") { e, _ ->
SpecPlaceHighlighter.onParagraphGetLinkClick(`$`(e.currentTarget))
false
}
on("click", ".loaded-tests") { _, _ -> false }
on("click", ".spec-sentence-find") { _, _ -> SentenceFinder.findSentence() }
on("keyup", ".spec-location-search input[name=\"spec-sentence-location\"]") { e, _ ->
if (e.keyCode == 13) {
SentenceFinder.findSentence()
}
}
mutableMapOf(18 to false, 69 to false).let { keys ->
`$`(document).keydown { e ->
keys[e.keyCode.toInt()] = true
true
}.keyup { e ->
if (keys[18] == true && keys[69] == true) {
`$`(".spec-location-search input[name=\"spec-sentence-location\"]").focus().select()
}
keys[e.keyCode.toInt()] = false
true
}
}
on("click", ".show-markup-link") { _, _ ->
window.localStorage["showMarkup"] = "true"
SpecTestsLoader.showMarkup()
false
}
on("click", ".disable-dev-mode") { _, _ ->
window.localStorage.removeItem("isDevMode")
window.location.reload()
false
}
on("click", ".hide-markup-link") { _, _ ->
window.localStorage.removeItem("showMarkup")
window.location.reload()
false
}
on("click", "h2, h3, h4, h5") { e, _ ->
SpecPlaceHighlighter.onHeaderGetLinkClick(`$`(e.currentTarget))
}
}
`$`(document).ready {
val firstH2Heading = `$`("h2").first()
if (firstH2Heading.length != 0) {
val headingValue = firstH2Heading.contents()[0]?.nodeValue
if (headingValue != null) {
document.title = "$headingValue - ${document.title}"
}
}
true
}
}
fun main() = init()
<|start_filename|>web/src/main/kotlin/org/jetbrains/kotlin/spec/loader/GithubTestsLoader.kt<|end_filename|>
package org.jetbrains.kotlin.spec.loader
import js.externals.jquery.JQueryAjaxSettings
import js.externals.jquery.JQueryXHR
import js.externals.jquery.`$`
import kotlinx.serialization.json.Json
import kotlinx.serialization.json.JsonConfiguration
import org.jetbrains.kotlin.spec.entity.TestsLoadingInfo
import org.jetbrains.kotlin.spec.entity.SpecSection
import org.jetbrains.kotlin.spec.entity.test.SpecTest
import org.jetbrains.kotlin.spec.entity.test.TestPlace
import org.jetbrains.kotlin.spec.entity.test.parameters.TestInfo
import org.jetbrains.kotlin.spec.entity.test.parameters.testArea.TestArea
import org.jetbrains.kotlin.spec.utils.format
import kotlinx.browser.window
import kotlin.js.Promise
interface GithubTestsLoader {
enum class TestOrigin { SPEC_TEST, IMPLEMENTATION_TEST }
companion object {
private const val RAW_GITHUB_URL = "https://raw.githubusercontent.com/JetBrains/kotlin"
private const val SPEC_TEST_DATA_PATH = "compiler/tests-spec/testData"
private const val LINKED_SPEC_TESTS_FOLDER = "linked"
private const val HELPERS_FOLDER = "helpers"
private const val SECTIONS_MAP_FILENAME = "sectionsMap.json"
private const val TESTS_MAP_FILENAME = "testsMap.json"
const val DEFAULT_BRANCH = "spec-tests"
protected val testAreasToLoad = TestArea.values()
fun getBranch() = window.localStorage.getItem("spec-tests-branch") ?: DEFAULT_BRANCH
fun loadHelperFromRawGithub(fileName: String, testArea: TestArea): Promise<String> {
return Promise { requestResolve, requestReject ->
`$`.ajax(getFullHelperPath(testArea, fileName),
jQueryAjaxSettings { requestReject(Exception()) }
).then({ response: Any?, _: Any ->
requestResolve(response.toString())
})
}
}
fun loadTestFileFromRawGithub(
path: String,
testInfo: TestInfo,
testPlace: TestPlace,
testArea: TestArea
): Promise<SpecTest> = Promise { requestResolve, requestReject ->
`$`.ajax(getFullTestPath(path),
jQueryAjaxSettings { requestReject(Exception()) }
).then({ response: Any?, _: Any ->
requestResolve(SpecTest(testInfo, testPlace, response.toString(), testArea))
})
}
fun loadTestMapFileFromRawGithub(
mainSectionName: String,
path: String,
testType: TestOrigin,
sectionsMapByTestArea: Map<TestArea, TestsLoadingInfo.Sections>
): Promise<Map<TestArea, TestsLoadingInfo.Tests>> = Promise { resolve, _ ->
val resultMap = mutableMapOf<TestArea, TestsLoadingInfo.Tests>()
val loadableTestAreas: MutableSet<TestArea> = mutableSetOf()
testAreasToLoad.forEach {
if (sectionsMapByTestArea.isTestsMapExists(testArea = it, requestedMainSection = mainSectionName, requestedSubsectionPath = path)) {
loadableTestAreas.add(it)
}
}
`$`.`when`(
*(loadableTestAreas.associateWith {
`$`.ajax(getFullTestMapPath(testType, it, mainSectionName, path), jQueryAjaxSettings { })
.then({ response: Any?, _: Any ->
resultMap[it] = TestsLoadingInfo.Tests(parseJsonText(response.toString()))
})
}.values.toTypedArray())
).then({ _: Any?, _: Any -> resolve(resultMap) }, { resolve(resultMap) })
}
private fun Map<TestArea, TestsLoadingInfo.Sections>.isTestsMapExists(testArea: TestArea, requestedMainSection: String, requestedSubsectionPath: String): Boolean {
val subsectionsArray = this[testArea]?.json?.jsonObject?.get(requestedMainSection) ?: return false
subsectionsArray.jsonArray.forEach { jsonElement ->
if (jsonElement.primitive.content == requestedSubsectionPath)
return true
}
return false
}
private fun getFullTestMapPath(testOrigin: TestOrigin, testArea: TestArea, mainSectionName: String, path: String) =
when (testOrigin) {
TestOrigin.SPEC_TEST -> "{1}/{2}/{3}/{4}/{5}/{6}/{7}/{8}"
.format(RAW_GITHUB_URL, getBranch(), SPEC_TEST_DATA_PATH, testArea.path, LINKED_SPEC_TESTS_FOLDER, mainSectionName, path, TESTS_MAP_FILENAME)
TestOrigin.IMPLEMENTATION_TEST -> "{1}/{2}/{3}/{4}/{5}".format(RAW_GITHUB_URL, getBranch(), mainSectionName, path, TESTS_MAP_FILENAME)
}
fun loadSectionsMapFileFromRawGithub(): Promise<Map<TestArea, TestsLoadingInfo.Sections>> = Promise { resolve, _ ->
val resultMap = mutableMapOf<TestArea, TestsLoadingInfo.Sections>()
`$`.`when`(
*(testAreasToLoad.asList().associateWith {
`$`.ajax(getFullSectionsMapPath(it), jQueryAjaxSettings { })
.then({ response: Any?, _: Any ->
resultMap[it] = TestsLoadingInfo.Sections(parseJsonText(response.toString()))
})
}.values.toTypedArray())
).then({ _: Any?, _: Any -> resolve(resultMap) }, { resolve(resultMap) })
}
private fun getFullSectionsMapPath(testArea: TestArea) = "{1}/{2}/{3}/{4}/{5}/{6}"
.format(RAW_GITHUB_URL, getBranch(), SPEC_TEST_DATA_PATH, testArea.path, LINKED_SPEC_TESTS_FOLDER, SECTIONS_MAP_FILENAME)
private fun getFullHelperPath(testArea: TestArea, helperFile: String) =
"{1}/{2}/{3}/{4}/{5}/{6}"
.format(RAW_GITHUB_URL, getBranch(), SPEC_TEST_DATA_PATH, testArea.path, HELPERS_FOLDER, helperFile)
private fun getFullTestPath(path: String) = "{1}/{2}/{3}".format(RAW_GITHUB_URL, getBranch(), path)
private fun parseJsonText(text: String) = Json(JsonConfiguration.Stable).parseJson(text)
private fun jQueryAjaxSettings(requestReject: (Throwable) -> Unit) = object : JQueryAjaxSettings {
override var cache: Boolean?
get() = false
set(_) {}
override var type: String?
get() = "GET"
set(_) {}
override val error: ((jqXHR: JQueryXHR, textStatus: String, errorThrown: String) -> Any)?
get() = { _, _, _ -> requestReject(Exception()) }
}
}
fun loadTestFiles(sectionToLoadName: String, mainSectionPath: String, sectionsPath: List<String>, sectionsMapsByTestArea: Map<TestArea, TestsLoadingInfo.Sections>): Promise<Promise<SpecSection>>
}
| greck2908/kotlin-spec |
<|start_filename|>com.mushoo.RunAppleScript.sdPlugin/Index.js<|end_filename|>
var websocket = null,
uuid = null,
actionInfo = {},
inInfo = {},
runningApps = [],
isQT = navigator.appVersion.includes('QtWebEngine');
function connectSocket(inPort, inUUID, inRegisterEvent, inInfo, inActionInfo) {
uuid = inUUID;
actionInfo = JSON.parse(inActionInfo); // cache the info
inInfo = JSON.parse(inInfo);
websocket = new WebSocket('ws://localhost:' + inPort);
addDynamicStyles(inInfo.colors);
websocket.onopen = function () {
var json = {
event: inRegisterEvent,
uuid: inUUID
};
websocket.send(JSON.stringify(json));
// Notify the plugin that we are connected
sendValueToPlugin('propertyInspectorConnected', 'property_inspector');
};
websocket.onmessage = function (evt) {
// Received message from Stream Deck
var jsonObj = JSON.parse(evt.data);
if (jsonObj.event === 'sendToPropertyInspector') {
var payload = jsonObj.payload;
if (payload.error) {
// Show Error
// You can use this to show any errors and short circuit the rest of the refresh code
return;
}
var select_single = document.getElementById('select_single');
select_single.value = payload.selectedValue;
var text_demo = document.getElementById('text_demo');
text_demo.value = payload.textDemoValue;
select_single.disabled = false;
text_demo.disabled = false;
}
};
}
function updateSettings() {
var select_single = document.getElementById('select_single');
var text_demo = document.getElementById('text_demo');
var payload = {};
payload.property_inspector = 'updateSettings';
payload.selectedValue = select_single.value;
payload.textDemoValue = text_demo.value;
sendPayloadToPlugin(payload);
}
// our method to pass values to the plugin
function sendPayloadToPlugin(payload) {
if (websocket && (websocket.readyState === 1)) {
const json = {
'action': actionInfo['action'],
'event': 'sendToPlugin',
'context': uuid,
'payload': payload
};
websocket.send(JSON.stringify(json));
}
}
function sendValueToPlugin(value, param) {
if (websocket && (websocket.readyState === 1)) {
const json = {
'action': actionInfo['action'],
'event': 'sendToPlugin',
'context': uuid,
'payload': {
[param]: value
}
};
websocket.send(JSON.stringify(json));
}
}
if (!isQT) {
document.addEventListener('DOMContentLoaded', function () {
initPropertyInspector();
});
}
window.addEventListener('beforeunload', function (e) {
e.preventDefault();
// Notify the plugin we are about to leave
sendValueToPlugin('propertyInspectorWillDisappear', 'property_inspector');
// Don't set a returnValue to the event, otherwise Chromium with throw an error.
});
function addDynamicStyles(clrs) {
const node = document.getElementById('#sdpi-dynamic-styles') || document.createElement('style');
if (!clrs.mouseDownColor) clrs.mouseDownColor = fadeColor(clrs.highlightColor, -100);
const clr = clrs.highlightColor.slice(0, 7);
const clr1 = fadeColor(clr, 100);
const clr2 = fadeColor(clr, 60);
const metersActiveColor = fadeColor(clr, -60);
node.setAttribute('id', 'sdpi-dynamic-styles');
node.innerHTML = `
input[type="radio"]:checked + label span,
input[type="checkbox"]:checked + label span {
background-color: ${clrs.highlightColor};
}
input[type="radio"]:active:checked + label span,
input[type="radio"]:active + label span,
input[type="checkbox"]:active:checked + label span,
input[type="checkbox"]:active + label span {
background-color: ${clrs.mouseDownColor};
}
input[type="radio"]:active + label span,
input[type="checkbox"]:active + label span {
background-color: ${clrs.buttonPressedBorderColor};
}
td.selected,
td.selected:hover,
li.selected:hover,
li.selected {
color: white;
background-color: ${clrs.highlightColor};
}
.sdpi-file-label > label:active,
.sdpi-file-label.file:active,
label.sdpi-file-label:active,
label.sdpi-file-info:active,
input[type="file"]::-webkit-file-upload-button:active,
button:active {
background-color: ${clrs.buttonPressedBackgroundColor};
color: ${clrs.buttonPressedTextColor};
border-color: ${clrs.buttonPressedBorderColor};
}
::-webkit-progress-value,
meter::-webkit-meter-optimum-value {
background: linear-gradient(${clr2}, ${clr1} 20%, ${clr} 45%, ${clr} 55%, ${clr2})
}
::-webkit-progress-value:active,
meter::-webkit-meter-optimum-value:active {
background: linear-gradient(${clr}, ${clr2} 20%, ${metersActiveColor} 45%, ${metersActiveColor} 55%, ${clr})
}
`;
document.body.appendChild(node);
};
/** UTILITIES */
/*
Quick utility to lighten or darken a color (doesn't take color-drifting, etc. into account)
Usage:
fadeColor('#061261', 100); // will lighten the color
fadeColor('#200867'), -100); // will darken the color
*/
function fadeColor(col, amt) {
const min = Math.min, max = Math.max;
const num = parseInt(col.replace(/#/g, ''), 16);
const r = min(255, max((num >> 16) + amt, 0));
const g = min(255, max((num & 0x0000FF) + amt, 0));
const b = min(255, max(((num >> 8) & 0x00FF) + amt, 0));
return '#' + (g | (b << 8) | (r << 16)).toString(16).padStart(6, 0);
}
<|start_filename|>com.mushoo.RunAppleScript.sdPlugin/en.json<|end_filename|>
{
"Description": "Run an arbitrary chunk of Applescript code, or a .scpt file",
"Name": "Run AppleScript",
"com.mushoo.runapplescript.action": {
"Name": "Run AppleScript",
"Tooltip": "Run an arbitrary chunk of Applescript code, or a .scpt file"
}
}
<|start_filename|>com.mushoo.RunAppleScript.sdPlugin/app.js<|end_filename|>
var websocket = null,
piContext = 0,
MActions = {},
runningApps = [],
contextArray = [],
DestinationEnum = Object.freeze({ 'HARDWARE_AND_SOFTWARE': 0, 'HARDWARE_ONLY': 1, 'SOFTWARE_ONLY': 2 });
function connectSocket (
inPort,
inUUID,
inMessageType,
inApplicationInfo,
inActionInfo
) {
if (websocket) {
websocket.close();
websocket = null;
};
var appInfo = JSON.parse(inApplicationInfo);
var isMac = appInfo.application.platform === 'mac';
var getApplicationName = function (jsn) {
if (jsn && jsn['payload'] && jsn.payload['application']) {
return isMac ? jsn.payload.application.split('.').pop() : jsn.payload.application.split('.')[0];
}
return '';
};
websocket = new WebSocket('ws://localhost:' + inPort);
websocket.onopen = function () {
var json = {
event: inMessageType,
uuid: inUUID
};
websocket.send(JSON.stringify(json));
};
websocket.onclose = function (evt) {
console.log('[STREAMDECK]***** WEBOCKET CLOSED **** reason:', evt);
};
websocket.onerror = function (evt) {
console.warn('WEBOCKET ERROR', evt, evt.data);
};
websocket.onmessage = function (evt) {
try {
var jsonObj = JSON.parse(evt.data);
var event = jsonObj['event'];
if (~['applicationDidLaunch', 'applicationDidTerminate'].indexOf(event)) {
const app = capitalize(getApplicationName(jsonObj));
const img = `images/${jsonObj.payload.application}.png`;
const arrImages = event === 'applicationDidTerminate' ? [img, 'images/terminated.png'] : img;
contextArray.forEach(a => {
loadAndSetImage(a, arrImages);
});
if (event === 'applicationDidLaunch') {
if (!runningApps.includes(app)) { runningApps.push(app); };
} else if (event === 'applicationDidTerminate') {
runningApps = runningApps.filter(item => item !== app);
}
if (piContext && piContext !== 0) { // there's a property inspector
sendToPropertyInspector(piContext, { runningApps });
}
} else {
/** dispatch message for V1 */
const aEvt = !jsonObj.hasOwnProperty('action') ? jsonObj.event : jsonObj['action'] + '.' + jsonObj['event'];
if (MActions.hasOwnProperty(aEvt)) {
MActions[aEvt](jsonObj);
}
/** dispatch message for V2 */
/* we could also use this (although a bit hacky), to keep the original plugin-structure */
if (jsonObj.hasOwnProperty('action')) {
var actn = jsonObj.action.split('.').splice(-1)[0];
console.log('actn:', actn);
if (window[actn].hasOwnProperty(jsonObj.event)) {
window[actn][jsonObj.event](jsonObj);
}
}
/** dispatch message for V3 */
/* even more hacky, but works for multi-actions */
let bEvt;
if (jsonObj['event'] && jsonObj['event'] === 'willAppear') {
bEvt = jsonObj['event'];
} else {
bEvt = !jsonObj.hasOwnProperty('action') ? jsonObj.event : jsonObj.event + jsonObj['context'];
}
if (actionV3.hasOwnProperty(bEvt)) {
actionV3[bEvt](jsonObj);
}
}
} catch (error) {
console.trace('Could not parse incoming message', error, evt.data);
}
};
}
/** Here are a bunch of experiments how to even more streamline communication with the PI */
/**
* V1 define the actions in an object
* Will not work in multiactions, but can be extended as in V3
* */
MActions['com.elgato.pisamples.action.willAppear'] = function (jsn) {
console.log('**V1** MActions.WILLAPPEAR', jsn.context);
};
MActions['com.elgato.pisamples.action.willDisappear'] = function (jsn) {
console.log('**V1** MActions.WILLDISAPPEAR ', jsn.context);
};
MActions['com.elgato.pisamples.action.keyUp'] = function (jsn) {
console.log('**V1** MActions.KEYUP ', jsn.context);
};
MActions['com.elgato.pisamples.action.keyDown'] = function (jsn) {
console.log('**V1** MActions.KEYDOWN ', jsn.context);
};
MActions['com.elgato.pisamples.action.sendToPlugin'] = function (jsn) {
console.log('**V1** MActions.SENDTOPLUGIN:', jsn.context, jsn);
console.log('%c%s', 'color: white; background: darkgreen; font-size: 12px;', `**V1** PI SENDTOPLUGIN for ${jsn.context}`);
};
/**
* V2 keep old plugin structure
* Will not work in multi-actions (without caching the context)
* */
var action = {
willAppear: function (jsn) {
console.log('**V2** action.WILLAPPEAR', jsn.context);
},
willDisappear: function (jsn) {
console.log('**V2** action.WILLDISAPPEAR', jsn.context);
},
keyDown: function (jsn) {
console.log('**V2** action.KEYDOWN', jsn.context);
},
keyUp: function (jsn) {
console.log('**V2** action.KEYUP', jsn.context);
},
sendToPlugin: function (jsn) {
console.log('**V2** action.SENDTOPLUGIN', jsn.context, jsn);
console.log('%c%s', 'color: white; background: pink; font-size: 12px;', `**V2** PI SENDTOPLUGIN for ${jsn.context}`);
}
};
/**
* V3 restrict to context
* This will also work with multi-actions stored in different contexts
*/
var actionV3 = {
willAppear: function (jsn) {
console.log('**V3** actionV3.WILLAPPEAR', jsn.context);
if (!contextArray.includes(jsn.context)) {
contextArray.push(jsn.context);
}
actionV3['keyDown' + jsn.context] = function (jsn) {
console.log('**V3** actionV3.KEYDOWN', jsn.context);
};
actionV3['keyUp' + jsn.context] = function (jsn) {
console.log('**V3** actionV3.KEYUP', jsn.context);
};
actionV3['sendToPlugin' + jsn.context] = function (jsn) {
console.log('**V3** actionV3.SENDTOPLUGIN', jsn.context, jsn);
if (jsn.hasOwnProperty('payload')) {
const pl = jsn.payload;
if (pl.hasOwnProperty('property_inspector')) {
const pi = pl.property_inspector;
console.log('%c%s', 'font-style: bold; color: white; background: blue; font-size: 15px;', `PI-event for ${jsn.context}:${pi}`);
switch (pl.property_inspector) {
case 'propertyInspectorWillDisappear':
loadAndSetImage(jsn.context, `images/piterminated.png`);
setTimeout(() => {
loadAndSetImage(jsn.context, `images/default.png`);
}, 500);
setContext(0); // set a flag, that our PI was removed
break;
case 'propertyInspectorConnected':
setContext(jsn.context);
sendToPropertyInspector(jsn.context, { runningApps });
break;
};
} else {
if (pl.hasOwnProperty('sdpi_collection')) {
console.log('%c%s', 'color: white; background: blue; font-size: 12px;', `PI SENDTOPLUGIN for ${jsn.context}`);
if (pl.sdpi_collection['key'] === 'your_canvas') {
setImage(jsn.context, pl.sdpi_collection['value']);
} else {
setTitle(jsn.context, pl.sdpi_collection['value']);
}
} else if (pl.hasOwnProperty('DOM')) {
} else {
console.log('%c%s', 'color: white; background: green; font-size: 12px;', `PI SENDTOPLUGIN for ${jsn.context}`);
}
}
}
};
actionV3['willDisappear' + jsn.context] = function (jsn) {
console.log('**V3** action.WILLDISAPPEAR', jsn.context, contextArray);
contextArray = contextArray.filter(item => item !== jsn.context);
console.log(contextArray);
};
}
};
/** STREAM DECK COMMUNICATION */
function sendToPropertyInspector (context, jsonData, xx) {
var json = {
'event': 'sendToPropertyInspector',
'context': context,
'payload': jsonData
};
console.log('-----');
console.log('sending to Property Inspector', xx, context, piContext, json, JSON.stringify(json));
websocket.send(JSON.stringify(json));
};
function setTitle (context, newTitle) {
// var apps = runningApps.join('\n');
var json = {
'event': 'setTitle',
'context': context,
'payload': {
// 'title': `${newTitle}\n${apps}`,
'title': `${newTitle}`,
'target': DestinationEnum.HARDWARE_AND_SOFTWARE
}
};
websocket.send(JSON.stringify(json));
};
function setImage (context, imgData) {
var json = {
'event': 'setImage',
'context': context,
'payload': {
'image': imgData,
'target': DestinationEnum.HARDWARE_AND_SOFTWARE
}
};
websocket.send(JSON.stringify(json));
};
function loadAndSetImage (context, imageNameOrArr) {
loadImage(imageNameOrArr, function (data) {
var json = {
'event': 'setImage',
'context': context,
'payload': {
'image': data,
'target': DestinationEnum.HARDWARE_AND_SOFTWARE
}
};
websocket.send(JSON.stringify(json));
});
};
/** UTILS */
function capitalize (str) {
return str.charAt(0).toUpperCase() + str.slice(1);
};
function equalArray (a, b) {
if (a.length != b.length) {
return false;
}
return a.filter(function (i) {
return !b.includes(i);
}).length === 0;
}
function setContext (ctx) {
console.log('%c%s', 'color: white; background: blue; font-size: 12px;', 'piContext', ctx, piContext);
piContext = ctx;
console.log('new context: ', piContext);
}
function loadImage (inUrl, callback, inCanvas, inFillcolor) {
/** Convert to array, so we may load multiple images at once */
const aUrl = !Array.isArray(inUrl) ? [inUrl] : inUrl;
const canvas = inCanvas && inCanvas instanceof HTMLCanvasElement
? inCanvas
: document.createElement('canvas');
var imgCount = aUrl.length - 1;
const imgCache = {};
var ctx = canvas.getContext('2d');
ctx.globalCompositeOperation = 'source-over';
for (let url of aUrl) {
let image = new Image();
let cnt = imgCount;
let w = 144, h = 144;
image.onload = function () {
imgCache[url] = this;
// look at the size of the first image
if (url === aUrl[0]) {
canvas.width = this.naturalWidth; // or 'width' if you want a special/scaled size
canvas.height = this.naturalHeight; // or 'height' if you want a special/scaled size
}
// if (Object.keys(imgCache).length == aUrl.length) {
if (cnt < 1) {
if (inFillcolor) {
ctx.fillStyle = inFillcolor;
ctx.fillRect(0, 0, canvas.width, canvas.height);
}
// draw in the proper sequence FIFO
aUrl.forEach(e => {
if (!imgCache[e]) {
console.warn(imgCache[e], imgCache);
}
if (imgCache[e]) {
ctx.drawImage(imgCache[e], 0, 0);
ctx.save();
}
});
callback(canvas.toDataURL('image/png'));
// or to get raw image data
// callback && callback(canvas.toDataURL('image/png').replace(/^data:image\/(png|jpg);base64,/, ''));
}
};
imgCount--;
image.src = url;
}
};
| jameswhite/streamdeck-applescript |
<|start_filename|>index.js<|end_filename|>
'use strict'
const got = require('got')
const cheerio = require('cheerio')
const splitMeta = (str) => str.substr(str.indexOf(':') + 1).trim()
module.exports = function (word, opts) {
if (typeof word !== 'string') {
return Promise.reject(new TypeError(`Expected word to be string, got ${typeof word}`))
}
opts = opts || {}
const token = opts.token || '<PASSWORD>'
return got
.post('http://www.markerapi.com/', {body: {token, search: word}})
.then(res => {
if (res.body.indexOf('no trademarks found') !== -1) {
return false
}
const $ = cheerio.load(res.body)
const result = []
$('.full').each(function () {
const el = $(this)
const meta = el.next().text().trim().split('\n\t\t\t\t\t\t\t')
if (meta[0]) {
const wordmark = el.find('.left').text()
const reg = new Date(el.find('.right').text().substr(4))
const description = splitMeta(meta[0])
const sn = splitMeta(meta[1])
const serviceCode = splitMeta(meta[2])
result.push({
wordmark,
reg,
description,
sn,
serviceCode
})
}
})
return result
})
}
<|start_filename|>test.js<|end_filename|>
import test from 'ava'
import fn from './'
test('wildcard', async t => {
const a = await fn('name*')
t.true(a.length > 2)
t.is(a[0].wordmark, 'NAME')
})
test('single', async t => {
const a = await fn('name')
t.is(a[0].wordmark, 'NAME')
t.is(a[0].description, 'General feature magazines')
})
| egoist/is-trademarked |
<|start_filename|>miniserver/forwardResponse.go<|end_filename|>
package miniserver
import (
"bytes"
"io"
"net/http"
)
type ForwardResponse struct {
u string
}
func NewForwardResponse(u string) *ForwardResponse {
return &ForwardResponse{u}
}
func (forwardResponse *ForwardResponse) write(writer http.ResponseWriter, client *Client) {
errWriter := func() {
writer.WriteHeader(http.StatusNotFound)
}
uClient := &http.Client{}
uRequest, err := http.NewRequest(client.Method, forwardResponse.u,
bytes.NewReader(client.Request))
if err != nil {
errWriter()
return
}
for key := range client.Header {
uRequest.Header.Set(key, client.Header.Get(key))
}
uResponse, err := uClient.Do(uRequest)
if err != nil {
errWriter()
return
}
defer uResponse.Body.Close()
for key := range uResponse.Header {
writer.Header().Set(key, uResponse.Header.Get(key))
}
writer.WriteHeader(uResponse.StatusCode)
io.Copy(writer, uResponse.Body)
}
<|start_filename|>main.go<|end_filename|>
package main
import (
"flag"
"fmt"
"net/http"
"os"
"os/exec"
"os/signal"
"strconv"
"strings"
"github.com/Grarak/GoYTFetcher/api"
"github.com/Grarak/GoYTFetcher/database"
"github.com/Grarak/GoYTFetcher/logger"
"github.com/Grarak/GoYTFetcher/miniserver"
"github.com/Grarak/GoYTFetcher/utils"
)
var indexDir string
func clientHandler(client *miniserver.Client) miniserver.Response {
args := strings.Split(client.Url, "/")[1:]
if len(args) >= 3 && args[0] == "api" {
return api.GetResponse(args[1], args[2], args[3:], client)
}
if !utils.StringIsEmpty(indexDir) {
if client.Url != "/" && utils.FileExists(indexDir+client.Url) {
return client.ResponseFile(indexDir + client.Url)
}
if utils.FileExists(indexDir + "/index.html") {
return client.ResponseFile(indexDir + "/index.html")
}
}
response := client.ResponseBody("Not found")
response.SetStatusCode(http.StatusNotFound)
return response
}
func main() {
logger.Init()
if _, err := exec.LookPath(utils.YOUTUBE_DL); err != nil {
logger.E(utils.YOUTUBE_DL + " is not installed!")
return
}
ffmpeg, err := exec.LookPath(utils.FFMPEG)
if err != nil {
logger.E(utils.FFMPEG + " is not installed!")
return
}
codecs, err := utils.ExecuteCmd(ffmpeg, "-codecs")
if err != nil || !strings.Contains(codecs, "libvorbis") {
logger.E(utils.FFMPEG + " vorbis is not enabled")
return
}
var port int
var ytKey string
flag.IntVar(&port, "p", 6713, "Which port to use")
flag.StringVar(&ytKey, "yt", "", "Youtube Api key")
flag.StringVar(&indexDir, "i", "", "Directory with index.html")
flag.Parse()
utils.Panic(utils.MkDir(utils.DATABASE))
utils.Panic(utils.MkDir(utils.YOUTUBE_DIR))
databaseInstance := database.GetDatabase(utils.GenerateRandom(16), ytKey)
server := miniserver.NewServer(port)
c := make(chan os.Signal, 1)
cleanup := make(chan bool)
signal.Notify(c, os.Interrupt)
go func() {
for sig := range c {
logger.I(fmt.Sprintf("Captured %s, killing...", sig))
server.StopListening()
if err := databaseInstance.Close(); err != nil {
logger.E(fmt.Sprintf("Failed to close database %s", err))
}
cleanup <- true
}
}()
logger.I("Starting server on port " + strconv.Itoa(port))
go server.StartListening(clientHandler)
<-cleanup
}
<|start_filename|>miniserver/simpleResponse.go<|end_filename|>
package miniserver
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"strconv"
"strings"
"github.com/Grarak/GoYTFetcher/utils"
)
type SimpleResponse struct {
contentType, serverDescription string
headers http.Header
statusCode int
readHolder rangeReadHolder
}
type rangeReadHolder interface {
Size() int64
Close() error
io.ReaderAt
}
type rangeReadHolderBytes struct {
bytesReader *bytes.Reader
}
type rangeReadHolderFile struct {
file *os.File
}
type rangeReader struct {
start, end, size int64
holder rangeReadHolder
}
func newResponse() *SimpleResponse {
return &SimpleResponse{
contentType: ContentText,
serverDescription: "Go MiniServer",
headers: make(map[string][]string),
statusCode: http.StatusOK,
}
}
func (client *Client) ResponseBody(body string) *SimpleResponse {
return client.ResponseBodyBytes([]byte(body))
}
func (client *Client) ResponseBodyBytes(body []byte) *SimpleResponse {
return client.ResponseReader(&rangeReadHolderBytes{bytes.NewReader(body)})
}
func (client *Client) ResponseFile(file string) *SimpleResponse {
reader, _ := os.Open(file)
response := client.ResponseReader(&rangeReadHolderFile{reader})
response.contentType = getContentTypeForFile(file)
return response
}
func (client *Client) ResponseReader(readHolder rangeReadHolder) *SimpleResponse {
response := newResponse()
response.readHolder = readHolder
return response
}
func (client *Client) CreateJsonResponse(data interface{}) *SimpleResponse {
b, err := json.Marshal(data)
utils.Panic(err)
response := client.ResponseBody(string(b))
response.SetContentType(ContentJson)
return response
}
func (client *Client) CreateResponse(statusCode int) *SimpleResponse {
type ResponseStruct struct {
StatusCode int `json:"statuscode"`
Path string `json:"path"`
}
b, err := json.Marshal(ResponseStruct{statusCode,
client.Url})
utils.Panic(err)
response := client.ResponseBody(string(b))
if statusCode == utils.StatusNoError {
response.SetStatusCode(http.StatusOK)
} else {
response.SetStatusCode(http.StatusNotFound)
}
response.SetContentType(ContentJson)
return response
}
func (response *SimpleResponse) SetContentType(contentType string) {
response.contentType = contentType
}
func (response *SimpleResponse) SetStatusCode(code int) {
response.statusCode = code
}
func (response *SimpleResponse) SetHeader(key, value string) {
response.headers.Set(key, value)
}
func (response *SimpleResponse) write(writer http.ResponseWriter, client *Client) {
if !utils.StringIsEmpty(response.contentType) {
writer.Header().Set("Content-Type", response.contentType)
}
writer.Header().Set("Server", response.serverDescription)
for key := range response.headers {
writer.Header().Set(key, response.headers.Get(key))
}
readerSize := response.readHolder.Size()
contentLength := readerSize
start, end := int64(0), readerSize-1
ranges := client.Header.Get("Range")
statusCode := response.statusCode
if statusCode == http.StatusOK &&
strings.HasPrefix(ranges, "bytes=") &&
strings.Contains(ranges, "-") {
start, end = rangeParser(ranges)
if end < 0 {
end = readerSize - 1
}
if start >= readerSize-1 {
start = readerSize - 1
}
if end >= readerSize-1 {
end = readerSize - 1
}
if end < start {
end = start
}
writer.Header().Set("Content-Range",
fmt.Sprintf("bytes %d-%d/%d", start, end, readerSize))
statusCode = http.StatusPartialContent
contentLength = end - start + 1
}
reader := &rangeReader{
start, end, readerSize,
response.readHolder,
}
defer reader.Close()
writer.Header().Set("Accept-Ranges", "bytes")
writer.Header().Set("Content-Length", fmt.Sprint(contentLength))
writer.WriteHeader(statusCode)
io.Copy(writer, reader)
}
func rangeParser(ranges string) (int64, int64) {
ranges = strings.Replace(ranges, "bytes=", "", 1)
middleIndex := strings.Index(ranges, "-")
start, err := strconv.ParseInt(ranges[:middleIndex], 10, 64)
if err != nil {
return 0, 0
}
end := int64(-1)
if middleIndex < len(ranges)-1 {
end, err = strconv.ParseInt(ranges[middleIndex+1:], 10, 64)
if err != nil {
return start, 0
}
}
return start, end
}
func (rangeReadHolderBytes *rangeReadHolderBytes) Size() int64 {
return rangeReadHolderBytes.bytesReader.Size()
}
func (rangeReadHolderBytes *rangeReadHolderBytes) ReadAt(p []byte, off int64) (n int, err error) {
return rangeReadHolderBytes.bytesReader.ReadAt(p, off)
}
func (rangeReadHolderBytes *rangeReadHolderBytes) Close() error {
return nil
}
func (rangeReadHolderFile *rangeReadHolderFile) Size() int64 {
info, err := rangeReadHolderFile.file.Stat()
if err != nil {
return 0
}
return info.Size()
}
func (rangeReadHolderFile *rangeReadHolderFile) ReadAt(p []byte, off int64) (n int, err error) {
return rangeReadHolderFile.file.ReadAt(p, off)
}
func (rangeReadHolderFile *rangeReadHolderFile) Close() error {
return rangeReadHolderFile.file.Close()
}
func (rangeReader *rangeReader) Read(b []byte) (n int, err error) {
if rangeReader.start >= rangeReader.size {
return 0, io.EOF
}
read, _ := rangeReader.holder.ReadAt(b, rangeReader.start)
newStart := rangeReader.start + int64(read)
if newStart > rangeReader.end {
read = int(rangeReader.end-rangeReader.start) + 1
rangeReader.start = rangeReader.size
}
rangeReader.start = newStart
return read, nil
}
func (rangeReader *rangeReader) Close() error {
return rangeReader.holder.Close()
}
<|start_filename|>testing/youtube.go<|end_filename|>
package main
type Youtube struct {
Apikey string `json:"apikey"`
Id string `json:"id"`
}
type YoutubeSearch struct {
Apikey string `json:"apikey"`
Searchquery string `json:"searchquery"`
}
<|start_filename|>database/users.go<|end_filename|>
package database
import (
"crypto/sha256"
"database/sql"
"encoding/json"
"fmt"
"reflect"
"regexp"
"sync"
"github.com/Grarak/GoYTFetcher/utils"
"golang.org/x/crypto/pbkdf2"
)
const TableUsers = "users"
type User struct {
ApiKey string `json:"apikey,omitempty"`
Name string `json:"name,omitempty"`
Password string `json:"password,omitempty"`
PasswordSalt string `json:"-"`
PasswordHash string `json:"-"`
Admin *bool `json:"admin,omitempty"`
Verified *bool `json:"verified,omitempty"`
}
func NewUser(data []byte) (User, error) {
var user User
err := json.Unmarshal(data, &user)
if err == nil {
if user.Admin == nil {
admin := false
user.Admin = &admin
}
if user.Verified == nil {
verified := false
user.Verified = &verified
}
}
return user, err
}
func (user User) ToJson() (string, error) {
b, err := json.Marshal(user)
if err != nil {
return "", err
}
return string(b), nil
}
func hashPassword(password, salt []byte) []byte {
return pbkdf2.Key(password, salt, 4096, sha256.Size, sha256.New)
}
func generatePassword(password []byte) (string, string) {
salt := utils.GenerateRandom(32)
hash := hashPassword(password, salt)
return utils.ToURLBase64(hash), utils.ToURLBase64(salt)
}
type UsersDB struct {
db *sql.DB
rwLock *sync.RWMutex
namePattern *regexp.Regexp
}
func newUsersDB(db *sql.DB, rwLock *sync.RWMutex) (*UsersDB, error) {
cmd := newTableBuilder(TableUsers).
addUniqueKeyPair(ColumnApikey).
addUniqueKeyPair(ColumnName).
addColumn(ColumnPasswordSalt).
addColumn(ColumnPasswordHash).
addColumn(ColumnAdmin).
addColumn(ColumnVerified).build()
_, err := db.Exec(cmd)
if err != nil {
return nil, err
}
regex, err := regexp.Compile("^[a-zA-Z0-9_]*$")
if err != nil {
return nil, err
}
return &UsersDB{db, rwLock, regex}, nil
}
func (usersDB *UsersDB) AddUser(user User) (User, int) {
if len(user.Name) <= 3 {
return user, utils.StatusNameShort
}
if len(user.Name) > 50 {
return user, utils.StatusNameLong
}
if !usersDB.namePattern.MatchString(user.Name) {
return user, utils.StatusNameInvalid
}
password, err := utils.Decode(user.Password)
if err != nil {
return user, utils.StatusPasswordInvalid
}
if len(password) <= 4 {
return user, utils.StatusPasswordShort
}
if len(password) > 50 {
return user, utils.StatusPasswordLong
}
usersDB.rwLock.Lock()
defer usersDB.rwLock.Unlock()
if _, err := usersDB.findUserByName(user.Name); err == nil {
return user, utils.StatusUserAlreadyExists
}
// Hash password
hash, salt := generatePassword(password)
// Generate api token
user.ApiKey = usersDB.generateApiToken()
user.Password = ""
// If this is the first user
// Make him admin
count, _ := rowCountInTable(usersDB.db, TableUsers)
var admin bool
var verified bool
if count == 0 {
admin = true
verified = true
}
user.Admin = &admin
user.Verified = &verified
_, err = usersDB.db.Exec(fmt.Sprintf(
"INSERT INTO %s "+
"(%s, %s, %s, %s, %s, %s) "+
"VALUES (?, ?, ?, ?, ?, ?)",
TableUsers,
ColumnApikey.name, ColumnName.name,
ColumnPasswordSalt.name, ColumnPasswordHash.name,
ColumnAdmin.name, ColumnVerified.name),
user.ApiKey, user.Name, salt, hash,
*user.Admin, *user.Verified)
if err != nil {
return user, utils.StatusAddUserFailed
}
return user, utils.StatusNoError
}
func (usersDB *UsersDB) GetUserWithPassword(name, password string) (User, int) {
user, err := usersDB.FindUserByName(name)
if err == nil {
password, err := utils.Decode(password)
if err == nil {
salt, err := utils.FromURLBase64(user.PasswordSalt)
if err == nil {
newHash := hashPassword(password, salt)
oldHash, err := utils.FromURLBase64(user.PasswordHash)
if err == nil && reflect.DeepEqual(oldHash, newHash) {
user.Password = ""
return user, utils.StatusNoError
}
}
}
}
return User{}, utils.StatusInvalidPassword
}
func (usersDB *UsersDB) generateApiToken() string {
token := utils.ToURLBase64(utils.GenerateRandom(32))
if _, err := usersDB.findUserByApiKey(token); err == nil {
return usersDB.generateApiToken()
}
return token
}
func (usersDB *UsersDB) FindUserByApiKey(apiKey string) (User, error) {
usersDB.rwLock.RLock()
defer usersDB.rwLock.RUnlock()
return usersDB.findUserByApiKey(apiKey)
}
func (usersDB *UsersDB) findUserByApiKey(apiKey string) (User, error) {
users, err := usersDB.createUserWithWhere(
ColumnApikey.name+" = ?", apiKey)
if len(users) > 0 {
return users[0], err
}
return User{}, fmt.Errorf("no users found")
}
func (usersDB *UsersDB) FindUserByName(name string) (User, error) {
usersDB.rwLock.RLock()
defer usersDB.rwLock.RUnlock()
return usersDB.findUserByName(name)
}
func (usersDB *UsersDB) findUserByName(name string) (User, error) {
users, err := usersDB.createUserWithWhere(
ColumnName.name+" = ? COLLATE NOCASE", name)
if len(users) > 0 {
return users[0], err
}
return User{}, fmt.Errorf("no users found")
}
func (usersDB *UsersDB) ListUsers(page int) ([]User, error) {
usersDB.rwLock.RLock()
defer usersDB.rwLock.RUnlock()
if page < 1 {
page = 1
}
users, err := usersDB.createUsers(fmt.Sprintf(
"LIMIT 10 OFFSET %d", 10*(page-1)))
if err != nil {
return nil, err
}
usersNoApiKey := make([]User, len(users))
for i := range users {
usersNoApiKey[i] = users[i]
usersNoApiKey[i].ApiKey = ""
}
return usersNoApiKey, nil
}
func (usersDB *UsersDB) SetVerificationUser(request User) error {
usersDB.rwLock.Lock()
defer usersDB.rwLock.Unlock()
_, err := usersDB.db.Exec(fmt.Sprintf(
"UPDATE %s SET %s = ? WHERE %s = ?",
TableUsers, ColumnVerified.name, ColumnName.name), *request.Verified, request.Name)
return err
}
func (usersDB *UsersDB) DeleteUser(request User) error {
usersDB.rwLock.Lock()
defer usersDB.rwLock.Unlock()
_, err := usersDB.db.Exec(fmt.Sprintf(
"DELETE FROM %s WHERE %s = ?",
TableUsers, ColumnName.name), request.Name)
return err
}
func (usersDB *UsersDB) DeleteAllNonVerifiedUsers(request User) error {
usersDB.rwLock.Lock()
defer usersDB.rwLock.Unlock()
_, err := usersDB.db.Exec(fmt.Sprintf(
"DELETE FROM %s WHERE %s = %d OR %s = null",
TableUsers, ColumnVerified.name, 0, ColumnVerified.name))
return err
}
func (usersDB *UsersDB) ResetPasswordUser(request User) error {
usersDB.rwLock.Lock()
defer usersDB.rwLock.Unlock()
password, err := utils.Decode(request.Password)
if len(password) <= 4 {
return fmt.Errorf("password too short")
}
if err != nil {
return err
}
hash, salt := generatePassword(password)
_, err = usersDB.db.Exec(fmt.Sprintf(
"UPDATE %s SET %s = ?, %s = ? WHERE %s = ?",
TableUsers, ColumnPasswordHash.name,
ColumnPasswordSalt.name,
ColumnName.name), hash, salt, salt)
return err
}
func (usersDB *UsersDB) createUserWithWhere(where string, args ...interface{}) ([]User, error) {
return usersDB.createUsers("WHERE "+where, args...)
}
func (usersDB *UsersDB) createUsers(condition string, args ...interface{}) ([]User, error) {
stmt, err := usersDB.db.Prepare(fmt.Sprintf(
"SELECT %s,%s,%s,%s,%s,%s FROM %s %s",
ColumnApikey.name, ColumnName.name, ColumnPasswordSalt.name,
ColumnPasswordHash.name, ColumnAdmin.name,
ColumnVerified.name, TableUsers, condition))
if err != nil {
return nil, err
}
defer stmt.Close()
rows, err := stmt.Query(args...)
if err != nil {
return nil, err
}
defer rows.Close()
var users []User
for rows.Next() {
admin := false
verified := false
user := User{Admin: &admin, Verified: &verified}
err := rows.Scan(&user.ApiKey, &user.Name, &user.PasswordSalt,
&user.PasswordHash, user.Admin, user.Verified)
if err != nil {
return nil, err
}
if utils.StringIsEmpty(user.Name) {
return nil, fmt.Errorf("couldn't find user with %s", condition)
}
users = append(users, user)
}
return users, nil
}
<|start_filename|>database/youtubesong.go<|end_filename|>
package database
import (
"crypto/aes"
"net/url"
"os"
"sync"
"time"
"io"
"github.com/Grarak/GoYTFetcher/logger"
"github.com/Grarak/GoYTFetcher/utils"
"github.com/Grarak/GoYTFetcher/ytdl"
)
type YoutubeSong struct {
id string
downloadUrl string
downloadUrlTime time.Time
count int
downloaded bool
downloading bool
filePath string
deleted bool
encryptedId string
songLock sync.Mutex
stateLock sync.RWMutex
readLock sync.RWMutex
}
type YoutubeSongReader struct {
song *YoutubeSong
file *os.File
io.ReaderAt
io.Closer
}
func newYoutubeSong(id string) *YoutubeSong {
return &YoutubeSong{id: id, count: 1}
}
func (youtubeSong *YoutubeSong) isDownloaded() bool {
youtubeSong.stateLock.RLock()
defer youtubeSong.stateLock.RUnlock()
return youtubeSong.downloaded
}
func (youtubeSong *YoutubeSong) setDownloaded(downloaded bool) {
youtubeSong.stateLock.Lock()
defer youtubeSong.stateLock.Unlock()
youtubeSong.downloaded = downloaded
}
func (youtubeSong *YoutubeSong) IsDownloading() bool {
youtubeSong.stateLock.RLock()
defer youtubeSong.stateLock.RUnlock()
return youtubeSong.downloading
}
func (youtubeSong *YoutubeSong) setDownloading(downloading bool) {
youtubeSong.stateLock.Lock()
defer youtubeSong.stateLock.Unlock()
youtubeSong.downloading = downloading
}
func (youtubeSong *YoutubeSong) Reader() (*YoutubeSongReader, error) {
youtubeSong.readLock.RLock()
defer youtubeSong.readLock.RUnlock()
file, err := os.Open(youtubeSong.filePath)
if err != nil {
return nil, err
}
return &YoutubeSongReader{song: youtubeSong, file: file}, nil
}
func (youtubeSong *YoutubeSong) getDownloadUrl() (string, error) {
currentTime := time.Now()
if currentTime.Sub(youtubeSong.downloadUrlTime).Hours() < 1 &&
!utils.StringIsEmpty(youtubeSong.downloadUrl) {
return youtubeSong.downloadUrl, nil
}
info, err := ytdl.GetVideoDownloadInfo(youtubeSong.id)
if err != nil {
defer youtubeSong.setDownloading(false)
return "", err
}
var link *url.URL
if info.VideoInfo.Duration.Minutes() <= 20 {
link, err = info.GetDownloadURL()
} else {
link, err = info.GetDownloadURLWorst()
}
if err != nil {
return "", err
}
youtubeSong.downloadUrl = link.String()
youtubeSong.downloadUrlTime = currentTime
return youtubeSong.downloadUrl, nil
}
func (youtubeSong *YoutubeSong) download(youtubeDB *youtubeDBImpl) error {
youtubeSong.setDownloading(true)
info, err := ytdl.GetVideoDownloadInfo(youtubeSong.id)
if err != nil {
youtubeSong.setDownloading(false)
return err
}
if info.VideoInfo.Duration.Minutes() <= 20 {
logger.I("Downloading " + info.VideoInfo.Title)
defer logger.I("Finished downloading " + info.VideoInfo.Title)
defer youtubeSong.setDownloading(false)
path, err := info.VideoInfo.Download(utils.YOUTUBE_DIR, youtubeDB.youtubeDL)
if err != nil {
return err
}
youtubeSong.filePath = path
if youtubeSong.deleted {
os.Remove(youtubeSong.filePath)
} else {
defer youtubeSong.setDownloaded(true)
}
return nil
}
logger.I(info.VideoInfo.Title + " is too long, skipping download")
return nil
}
func (youtubeSong *YoutubeSong) delete() {
youtubeSong.readLock.Lock()
defer youtubeSong.readLock.Unlock()
youtubeSong.deleted = true
if youtubeSong.isDownloaded() {
os.Remove(youtubeSong.filePath)
}
}
func (youtubeSong *YoutubeSong) getEncryptedId(key []byte) string {
if utils.StringIsEmpty(youtubeSong.encryptedId) {
id := youtubeSong.id
for i := len(id); i < aes.BlockSize; i++ {
id += " "
}
youtubeSong.encryptedId = utils.Encrypt(key, id)
}
return youtubeSong.encryptedId
}
func (youtubeSong *YoutubeSong) increaseCount() {
youtubeSong.stateLock.Lock()
defer youtubeSong.stateLock.Unlock()
youtubeSong.count++
}
func (youtubeSong YoutubeSong) GetUniqueId() string {
return youtubeSong.id
}
func (youtubeSong YoutubeSong) GetCount() int {
return youtubeSong.count
}
func (youtubeSongReader *YoutubeSongReader) Size() int64 {
youtubeSongReader.song.readLock.RLock()
defer youtubeSongReader.song.readLock.RUnlock()
info, err := youtubeSongReader.file.Stat()
if err != nil {
return 0
}
return info.Size()
}
func (youtubeSongReader *YoutubeSongReader) ReadAt(p []byte, off int64) (n int, err error) {
youtubeSongReader.song.readLock.RLock()
defer youtubeSongReader.song.readLock.RUnlock()
return youtubeSongReader.file.ReadAt(p, off)
}
func (youtubeSongReader *YoutubeSongReader) Close() error {
youtubeSongReader.song.readLock.RLock()
defer youtubeSongReader.song.readLock.RUnlock()
return youtubeSongReader.file.Close()
}
<|start_filename|>utils/codes.go<|end_filename|>
package utils
const (
StatusNoError = 0
StatusInvalid = 1
StatusNameShort = 2
StatusPasswordShort = 3
StatusPasswordInvalid = 4
StatusNameInvalid = 5
StatusAddUserFailed = 6
StatusUserAlreadyExists = 7
StatusInvalidPassword = 8
StatusPasswordLong = 9
StatusNameLong = 10
StatusYoutubeFetchFailure = 11
StatusYoutubeSearchFailure = 12
StatusYoutubeGetFailure = 13
StatusYoutubeGetInfoFailure = 14
StatusYoutubeGetChartsFailure = 15
StatusPlaylistIdAlreadyExists = 16
StatusAddHistoryFailed = 17
)
<|start_filename|>database/const.go<|end_filename|>
package database
type dataType string
type column struct {
name string
dataType dataType
}
type foreignKey struct {
name string
dataType dataType
referenceTable string
referenceKey string
primaryKey bool
}
func text() dataType {
return "text"
}
func boolean() dataType {
return "boolean"
}
func datetime() dataType {
return "datetime"
}
var ColumnApikey = column{"api_key", text()}
var ColumnName = column{"name", text()}
var ColumnPasswordSalt = column{"password_salt", text()}
var ColumnPasswordHash = column{"password_hash", text()}
var ColumnAdmin = column{"admin", boolean()}
var ColumnVerified = column{"verified", boolean()}
var ColumnPublic = column{"public", boolean()}
var ColumnId = column{"id", text()}
var ColumnIds = column{"ids", text()}
var ColumnDate = column{"date", datetime()}
var ForeignKeyApikey = foreignKey{ColumnApikey.name, text(), TableUsers,
ColumnApikey.name, true}
<|start_filename|>miniserver/response.go<|end_filename|>
package miniserver
import (
"net/http"
)
type Response interface {
write(writer http.ResponseWriter, client *Client)
}
<|start_filename|>api/v1/info.go<|end_filename|>
package v1
import "github.com/Grarak/GoYTFetcher/miniserver"
func HandleInfoV1(_ string, client *miniserver.Client) miniserver.Response {
return client.ResponseBody("Welcome to V1 API!")
}
<|start_filename|>utils/paths.go<|end_filename|>
package utils
const (
LOG_DIR = "log"
LOG_PREFIX = "log"
LOG_EXTENSION = ".txt"
FILES = "files"
DATABASE = FILES + "/sqldata"
DATADB = DATABASE + "/data.db"
YOUTUBE_DIR = FILES + "/youtube"
YOUTUBE_DL = "youtube-dl"
FFMPEG = "ffmpeg"
)
<|start_filename|>database/database.go<|end_filename|>
package database
import (
"database/sql"
"sync"
"github.com/Grarak/GoYTFetcher/utils"
_ "github.com/mattn/go-sqlite3"
)
var singletonLock sync.Mutex
var databaseInstance *Database
type Database struct {
db *sql.DB
UsersDB *UsersDB
PlaylistsDB *PlaylistsDB
HistoriesDB *HistoriesDB
YoutubeDB YouTubeDB
}
func GetDefaultDatabase() *Database {
return GetDatabase(nil, "")
}
func GetDatabase(key []byte, ytKey string) *Database {
singletonLock.Lock()
defer singletonLock.Unlock()
if databaseInstance != nil {
return databaseInstance
}
db, err := sql.Open("sqlite3", utils.DATADB)
utils.Panic(err)
_, err = db.Exec("PRAGMA foreign_keys = ON")
utils.Panic(err)
rwLock := &sync.RWMutex{}
usersDB, err := newUsersDB(db, rwLock)
utils.Panic(err)
playlistsDB, err := newPlaylistsDB(db, rwLock)
utils.Panic(err)
historiesDB, err := newHistoriesDB(db, rwLock)
utils.Panic(err)
youtubeDB, err := newYoutubeDB(key, ytKey)
utils.Panic(err)
databaseInstance = &Database{
db,
usersDB,
playlistsDB,
historiesDB,
youtubeDB,
}
return databaseInstance
}
func (database *Database) Close() error {
return database.db.Close()
}
<|start_filename|>Makefile<|end_filename|>
.PHONY: all install test
all:
go build -ldflags "-s -w" -i -o GoYTFetcher main.go
install:
go install -ldflags "-s" -i
test:
go build -i -o ytfetcher_test testing/*.go
clean:
rm -f ytfetcher
rm -f ytfetcher_test
<|start_filename|>database/youtubesearch.go<|end_filename|>
package database
import (
"bufio"
"fmt"
"net/http"
"net/url"
"os/exec"
"regexp"
"sort"
"strconv"
"strings"
"sync"
"unicode"
"github.com/Grarak/GoYTFetcher/logger"
"github.com/Grarak/GoYTFetcher/utils"
"github.com/Grarak/GoYTFetcher/ytdl"
)
var searchApiRegex = regexp.MustCompile("\"videoId\":\\s+\"([a-z_A-Z0-9\\-]{11})\"")
var youtubeApiKeyRegex = regexp.MustCompile("\"INNERTUBE_API_KEY\"[\\s]*:[\\s]*\"([^\"]+)\"")
type YoutubeSearch struct {
query string
results []YoutubeSearchResult
count int
valuesLock sync.RWMutex
rwLock sync.RWMutex
}
type querySort []string
func (query querySort) Len() int {
return len(query)
}
func (query querySort) Less(i, j int) bool {
return query[i] < query[j]
}
func (query querySort) Swap(i, j int) {
query[i], query[j] = query[j], query[i]
}
func newYoutubeSearch(searchQuery string) *YoutubeSearch {
searchQuery = strings.ToLower(searchQuery)
searchQuery = regexp.MustCompile("\\s+").ReplaceAllString(searchQuery, " ")
words := querySort(strings.Split(searchQuery, " "))
sort.Sort(words)
searchQuery = strings.Join(words, " ")
return &YoutubeSearch{query: searchQuery, count: 1}
}
type YoutubeSearchResult struct {
Title string `json:"title"`
Id string `json:"id"`
Thumbnail string `json:"thumbnail"`
Duration string `json:"duration"`
}
func (youtubeSearch *YoutubeSearch) search(youtubeDB *youtubeDBImpl) ([]YoutubeSearchResult, error) {
youtubeSearch.rwLock.Lock()
defer youtubeSearch.rwLock.Unlock()
results, err := youtubeSearch.getSearchFromWebsite(youtubeDB)
if err != nil && !utils.StringIsEmpty(youtubeDB.ytKey) {
results, err = youtubeSearch.getSearchFromApi(youtubeDB)
}
if err != nil {
results, err = youtubeSearch.getSearchFromYoutubeDL(youtubeDB)
}
if err != nil {
return nil, err
}
youtubeSearch.results = results
return results, err
}
func (youtubeSearch *YoutubeSearch) getSearchFromWebsite(youtubeDB *youtubeDBImpl) ([]YoutubeSearchResult, error) {
infos, err := ytdl.GetVideosFromSearch(youtubeSearch.query)
if err != nil {
return nil, err
}
results := make([]YoutubeSearchResult, len(infos))
for i, info := range infos {
if utils.StringIsEmpty(info.Title) || info.Duration == 0 {
result, err := youtubeDB.GetYoutubeInfo(info.ID)
if err != nil {
continue
}
results[i] = result
} else {
seconds := int(info.Duration.Seconds()) % 60
minutes := int(info.Duration.Minutes())
results[i] = YoutubeSearchResult{info.Title, info.ID,
info.GetThumbnailURL(ytdl.ThumbnailQualityMedium).String(),
utils.FormatMinutesSeconds(minutes, seconds)}
if youtubeDB.idRanking.getSize() < 1000 {
youtubeId := newYoutubeId(info.ID)
youtubeId.result = results[i]
youtubeDB.ids.LoadOrStore(info.ID, youtubeId)
}
}
}
return results, nil
}
func (youtubeSearch *YoutubeSearch) getSearchFromApi(youtubeDB *youtubeDBImpl) ([]YoutubeSearchResult, error) {
searchUrl := "https://www.googleapis.com/youtube/v3/search?"
query := url.Values{}
query.Set("q", youtubeSearch.query)
query.Set("type", "video")
query.Set("maxResults", "10")
query.Set("part", "snippet")
query.Set("key", youtubeDB.ytKey)
res, err := http.Get(searchUrl + query.Encode())
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode != 200 {
return nil, fmt.Errorf("couldn't get website")
}
ids := make([]string, 0)
reader := bufio.NewReader(res.Body)
for {
line, err := reader.ReadString('\n')
if err != nil {
break
}
matches := searchApiRegex.FindAllStringSubmatch(line, 1)
if len(matches) > 0 && len(matches[0]) > 1 {
id := matches[0][1]
if !utils.StringArrayContains(ids, id) {
ids = append(ids, id)
if len(ids) >= 10 {
break
}
}
}
}
results := make([]YoutubeSearchResult, 0)
for _, id := range ids {
result, err := youtubeDB.GetYoutubeInfo(id)
if err == nil {
results = append(results, result)
}
}
if len(results) == 0 {
return nil, fmt.Errorf("no results found")
}
return results, nil
}
func (youtubeSearch *YoutubeSearch) getSearchFromYoutubeDL(youtubeDB *youtubeDBImpl) ([]YoutubeSearchResult, error) {
cmd := exec.Command(youtubeDB.youtubeDL, "-e", "--get-id", "--get-thumbnail", "--get-duration",
fmt.Sprintf("ytsearch10:%s", youtubeSearch.query))
reader, err := cmd.StdoutPipe()
defer reader.Close()
if err != nil {
return nil, err
}
err = cmd.Start()
if err != nil {
return nil, err
}
results := make([]string, 0)
bufReader := bufio.NewReader(reader)
for i := 0; ; i++ {
line, err := bufReader.ReadString('\n')
if err != nil {
break
}
results = append(results, line)
}
youtubeResults := make([]YoutubeSearchResult, 0)
for _, result := range results {
if youtubeResult, err := youtubeDB.GetYoutubeInfo(result); err == nil {
youtubeResults = append(youtubeResults, youtubeResult)
}
}
return youtubeResults, nil
}
func (youtubeDB *youtubeDBImpl) getYoutubeVideoInfoFromYtdl(id string) (YoutubeSearchResult, error) {
info, err := ytdl.GetVideoInfoFromID(id)
if err != nil {
logger.E(fmt.Sprintf("Couldn't get %s, %v", id, err))
return YoutubeSearchResult{}, err
}
seconds := int(info.Duration.Seconds()) % 60
minutes := int(info.Duration.Minutes())
return YoutubeSearchResult{info.Title, id,
info.GetThumbnailURL(ytdl.ThumbnailQualityMedium).String(),
utils.FormatMinutesSeconds(minutes, seconds)}, nil
}
func (youtubeDB *youtubeDBImpl) getYoutubeVideoInfoFromApi(id string) (YoutubeSearchResult, error) {
infoUrl := "https://www.googleapis.com/youtube/v3/videos?"
query := url.Values{}
query.Set("id", id)
query.Set("part", "snippet,contentDetails")
query.Set("key", youtubeDB.ytKey)
response, err := getYoutubeApiResponseItems(infoUrl + query.Encode())
if err != nil {
return YoutubeSearchResult{}, err
}
if len(response.Items) == 0 {
return YoutubeSearchResult{}, fmt.Errorf("no results")
}
item := response.Items[0]
return YoutubeSearchResult{item.Snippet.Title, id,
item.Snippet.Thumbnails.Medium.Url,
utils.FormatMinutesSeconds(
parseYoutubeApiTime(item.ContentDetails.Duration))}, nil
}
func getYoutubeCharts() ([]YoutubeSearchResult, error) {
trendingUrl := "https://charts.youtube.com/us"
res, err := http.Get(trendingUrl)
if err != nil {
return nil, err
}
defer res.Body.Close()
reader := bufio.NewReader(res.Body)
apiKey := ""
for {
line, err := reader.ReadString('\n')
if err != nil {
break
}
if strings.Contains(line, "INNERTUBE_API_KEY") {
matches := youtubeApiKeyRegex.FindAllStringSubmatch(line, 1)
if len(matches) > 0 && len(matches[0]) > 1 {
apiKey = matches[0][1]
break
}
}
}
if utils.StringIsEmpty(apiKey) {
return nil, fmt.Errorf("couldn't retrieve api key")
}
return getYoutubeChartsFromApi(apiKey)
}
func (youtubeSearch *YoutubeSearch) getResults() []YoutubeSearchResult {
youtubeSearch.rwLock.RLock()
defer youtubeSearch.rwLock.RUnlock()
return youtubeSearch.results
}
func (youtubeSearch *YoutubeSearch) increaseCount() {
youtubeSearch.valuesLock.Lock()
defer youtubeSearch.valuesLock.Unlock()
youtubeSearch.count++
}
func (youtubeSearch YoutubeSearch) GetUniqueId() string {
return youtubeSearch.query
}
func (youtubeSearch YoutubeSearch) GetCount() int {
return youtubeSearch.count
}
func parseYoutubeApiTime(duration string) (int, int) {
hours := 0
minutes := 0
seconds := 0
var numbers []rune
for _, c := range duration {
if unicode.IsDigit(c) {
numbers = append(numbers, c)
}
num, err := strconv.Atoi(string(numbers))
if err != nil {
num = 0
}
switch c {
case 'H':
hours = num
numbers = numbers[:0]
break
case 'M':
minutes = num
numbers = numbers[:0]
break
case 'S':
seconds = num
numbers = numbers[:0]
break
}
}
minutes += hours * 60
return minutes, seconds
}
<|start_filename|>api/v1/users.go<|end_filename|>
package v1
import (
"fmt"
"net/http"
"strconv"
"github.com/Grarak/GoYTFetcher/database"
"github.com/Grarak/GoYTFetcher/logger"
"github.com/Grarak/GoYTFetcher/miniserver"
"github.com/Grarak/GoYTFetcher/utils"
)
func usersSignUp(client *miniserver.Client) miniserver.Response {
request, err := database.NewUser(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
user, code := usersDB.AddUser(request)
if code == utils.StatusNoError {
logger.I(client.IPAddr + ": " + "Created new user " + user.Name)
return client.CreateJsonResponse(user)
}
return client.CreateResponse(code)
}
func usersLogin(client *miniserver.Client) miniserver.Response {
request, err := database.NewUser(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
user, code := usersDB.GetUserWithPassword(request.Name, request.Password)
if code == utils.StatusNoError {
logger.I(client.IPAddr + ": " + user.Name + " logged in")
return client.CreateJsonResponse(user)
}
return client.CreateResponse(code)
}
func usersList(client *miniserver.Client) miniserver.Response {
request, err := database.NewUser(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
if user, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *user.Verified {
page, err := strconv.Atoi(client.Queries.Get("page"))
if err != nil {
page = 1
}
users, err := usersDB.ListUsers(page)
if err == nil {
return client.CreateJsonResponse(users)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func usersSetVerification(client *miniserver.Client) miniserver.Response {
request, err := database.NewUser(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Admin {
logger.I(fmt.Sprintf("%s setting verification of %s to %v", requester.Name,
request.Name, *request.Verified))
err = usersDB.SetVerificationUser(request)
if err == nil {
return client.CreateResponse(utils.StatusNoError)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func usersDelete(client *miniserver.Client) miniserver.Response {
request, err := database.NewUser(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Admin {
err = usersDB.DeleteUser(request)
if err != nil {
return client.CreateResponse(utils.StatusNoError)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func usersDeleteAll(client *miniserver.Client) miniserver.Response {
request, err := database.NewUser(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Admin {
err = usersDB.DeleteAllNonVerifiedUsers(request)
if err == nil {
return client.CreateResponse(utils.StatusNoError)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func usersResetPassword(client *miniserver.Client) miniserver.Response {
request, err := database.NewUser(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Admin {
err = usersDB.ResetPasswordUser(request)
if err == nil {
return client.CreateResponse(utils.StatusNoError)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistList(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylist(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
playlists, err := playlistsDB.GetPlaylists(request.ApiKey, false)
if err == nil {
return client.CreateJsonResponse(playlists)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistListPublic(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylist(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
user, err := usersDB.FindUserByName(request.Name)
if err == nil {
playlists, err := playlistsDB.GetPlaylists(user.ApiKey, true)
if err == nil {
return client.CreateJsonResponse(playlists)
}
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistCreate(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylist(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
err := playlistsDB.CreatePlaylist(request)
if err == nil {
return client.CreateResponse(utils.StatusNoError)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistDelete(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylist(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
err = playlistsDB.DeletePlaylist(request)
if err == nil {
return client.CreateResponse(utils.StatusNoError)
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistSetPublic(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylist(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
err = playlistsDB.SetPublic(request)
if err == nil {
return client.CreateResponse(utils.StatusNoError)
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistListIds(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylist(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
ids, err := playlistsDB.GetPlaylistIds(request)
if err == nil {
return client.CreateJsonResponse(ids)
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistListIdsPublic(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylistPublic(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
user, err := usersDB.FindUserByName(request.Name)
if err == nil {
playlist := database.Playlist{ApiKey: user.ApiKey, Name: request.Playlist}
if playlistsDB.IsPlaylistPublic(playlist) {
ids, err := playlistsDB.GetPlaylistIds(playlist)
if err == nil {
return client.CreateJsonResponse(ids)
}
}
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistAddId(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylistId(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
err = playlistsDB.AddIdToPlaylist(request)
if err != nil {
return client.CreateResponse(utils.StatusPlaylistIdAlreadyExists)
}
logger.I(client.IPAddr + ": " + requester.Name + " adding " +
request.Id + " to playlist " + request.Name)
return client.CreateResponse(utils.StatusNoError)
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistDeleteId(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylistId(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
err := playlistsDB.DeleteIdFromPlaylist(request)
if err == nil {
return client.CreateResponse(utils.StatusNoError)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func playlistSetIds(client *miniserver.Client) miniserver.Response {
request, err := database.NewPlaylistIds(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
playlistsDB := database.GetDefaultDatabase().PlaylistsDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
err := playlistsDB.SetPlaylistIds(request)
if err == nil {
return client.CreateResponse(utils.StatusNoError)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func historyAdd(client *miniserver.Client) miniserver.Response {
request, err := database.NewHistory(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
userDB := database.GetDefaultDatabase().UsersDB
historiesDB := database.GetDefaultDatabase().HistoriesDB
if requester, err := userDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
err = historiesDB.AddHistory(request.ApiKey, request.Id)
if err == nil {
logger.I(client.IPAddr + ": " + requester.Name +
" adding " + request.Id + " to history")
return client.CreateResponse(utils.StatusNoError)
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func historyList(client *miniserver.Client) miniserver.Response {
request, err := database.NewHistory(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
historiesDB := database.GetDefaultDatabase().HistoriesDB
histories, err := historiesDB.GetHistory(request.ApiKey)
if err == nil {
return client.CreateJsonResponse(histories)
}
return client.CreateResponse(utils.StatusInvalid)
}
func HandleUsersV1(path string, client *miniserver.Client) miniserver.Response {
if client.Method != http.MethodPost || !client.IsContentJson() {
return nil
}
switch path {
// user database
case "signup":
return usersSignUp(client)
case "login":
return usersLogin(client)
case "list":
return usersList(client)
case "setverification":
return usersSetVerification(client)
case "delete":
return usersDelete(client)
case "deleteall":
return usersDeleteAll(client)
case "resetpassword":
return usersResetPassword(client)
// playlist database
case "playlist/list":
return playlistList(client)
case "playlist/listpublic":
return playlistListPublic(client)
case "playlist/create":
return playlistCreate(client)
case "playlist/delete":
return playlistDelete(client)
case "playlist/setpublic":
return playlistSetPublic(client)
case "playlist/listids":
return playlistListIds(client)
case "playlist/listidspublic":
return playlistListIdsPublic(client)
case "playlist/addid":
return playlistAddId(client)
case "playlist/deleteid":
return playlistDeleteId(client)
case "playlist/setids":
return playlistSetIds(client)
// history database
case "history/add":
return historyAdd(client)
case "history/list":
return historyList(client)
}
return nil
}
<|start_filename|>database/table.go<|end_filename|>
package database
import "strings"
type tableBuilder struct {
name string
primaryKeys []column
uniqueKeysPairs [][]column
foreignKeys []foreignKey
columns []column
}
func newTableBuilder(name string) *tableBuilder {
return &tableBuilder{name: name}
}
func (tableBuilder *tableBuilder) addPrimaryKey(key column) *tableBuilder {
tableBuilder.primaryKeys = append(tableBuilder.primaryKeys, key)
return tableBuilder
}
func (tableBuilder *tableBuilder) addUniqueKeyPair(key ...column) *tableBuilder {
tableBuilder.uniqueKeysPairs = append(tableBuilder.uniqueKeysPairs, key)
return tableBuilder
}
func (tableBuilder *tableBuilder) addForeignKey(key foreignKey) *tableBuilder {
tableBuilder.foreignKeys = append(tableBuilder.foreignKeys, key)
return tableBuilder
}
func (tableBuilder *tableBuilder) addColumn(column column) *tableBuilder {
tableBuilder.columns = append(tableBuilder.columns, column)
return tableBuilder
}
func (tableBuilder *tableBuilder) build() string {
cmd := "CREATE TABLE IF NOT EXISTS " + tableBuilder.name + " ("
for _, foreignKey := range tableBuilder.foreignKeys {
cmd += foreignKey.name + " " + string(foreignKey.dataType) + " NOT NULL,"
}
for _, primaryKey := range tableBuilder.primaryKeys {
line := primaryKey.name + " " + string(primaryKey.dataType)
if !strings.Contains(cmd, line) {
cmd += line + " NOT NULL,"
}
}
for _, uniqueKeyPair := range tableBuilder.uniqueKeysPairs {
for _, uniqueKey := range uniqueKeyPair {
line := uniqueKey.name + " " + string(uniqueKey.dataType)
if !strings.Contains(cmd, line) {
cmd += line + ","
}
}
}
for _, column := range tableBuilder.columns {
line := column.name + " " + string(column.dataType)
if !strings.Contains(cmd, line) {
cmd += line + ","
}
}
referenceTables := make(map[string][]foreignKey)
for _, foreignKey := range tableBuilder.foreignKeys {
referenceKeys := referenceTables[foreignKey.referenceTable]
referenceTables[foreignKey.referenceTable] = append(referenceKeys, foreignKey)
}
for table, keys := range referenceTables {
cmd += "FOREIGN KEY ("
for _, key := range keys {
cmd += key.name + ","
}
cmd = cmd[:len(cmd)-1] + ") REFERENCES " + table + " ("
for _, key := range keys {
cmd += key.referenceKey + ","
}
cmd = cmd[:len(cmd)-1] + ") ON UPDATE CASCADE,"
}
var primaryKeys []string
for _, primaryKey := range tableBuilder.primaryKeys {
primaryKeys = append(primaryKeys, primaryKey.name)
}
for _, foreignKey := range tableBuilder.foreignKeys {
if foreignKey.primaryKey {
primaryKeys = append(primaryKeys, foreignKey.name)
}
}
if len(primaryKeys) > 0 {
cmd += "PRIMARY KEY ("
for _, key := range primaryKeys {
cmd += key + ","
}
cmd = cmd[:len(cmd)-1] + "),"
}
for _, uniqueKeyPair := range tableBuilder.uniqueKeysPairs {
cmd += "UNIQUE ("
for _, uniqueKey := range uniqueKeyPair {
cmd += uniqueKey.name + ","
}
cmd = cmd[:len(cmd)-1] + "),"
}
cmd = cmd[:len(cmd)-1] + ")"
return cmd
}
<|start_filename|>miniserver/client.go<|end_filename|>
package miniserver
import (
"io/ioutil"
"net/http"
"net/url"
"strings"
"github.com/Grarak/GoYTFetcher/utils"
)
type Client struct {
Host, Url, Method, IPAddr string
Request []byte
Header http.Header
Queries url.Values
}
func newClient(request *http.Request) *Client {
defer request.Body.Close()
body, _ := ioutil.ReadAll(request.Body)
ipAddr := request.RemoteAddr[:strings.LastIndex(request.RemoteAddr, ":")]
if cfConnectionIP := request.Header.Get("Cf-Connecting-Ip"); !utils.StringIsEmpty(cfConnectionIP) {
ipAddr = cfConnectionIP
}
return &Client{
request.Host,
request.URL.Path,
request.Method,
ipAddr,
body,
request.Header,
request.Form,
}
}
func (client *Client) IsContentJson() bool {
return strings.HasPrefix(client.Header.Get("Content-Type"), ContentJson)
}
<|start_filename|>database/playlists.go<|end_filename|>
package database
import (
"database/sql"
"encoding/json"
"fmt"
"strings"
"sync"
"github.com/Grarak/GoYTFetcher/utils"
)
const TablePlaylists = "playlists"
type Playlist struct {
ApiKey string `json:"apikey,omitempty"`
Name string `json:"name"`
Public bool `json:"public"`
}
type PlaylistId struct {
ApiKey string `json:"apikey,omitempty"`
Name string `json:"name"`
Id string `json:"id"`
}
type PlaylistIds struct {
ApiKey string `json:"apikey,omitempty"`
Name string `json:"name"`
Ids []string `json:"ids"`
}
type PlaylistLinkPublic struct {
ApiKey string `json:"apikey,omitempty"`
Name string `json:"name"`
Playlist string `json:"playlist"`
}
func NewPlaylist(data []byte) (Playlist, error) {
var name Playlist
err := json.Unmarshal(data, &name)
return name, err
}
func NewPlaylistId(data []byte) (PlaylistId, error) {
var name PlaylistId
err := json.Unmarshal(data, &name)
return name, err
}
func NewPlaylistIds(data []byte) (PlaylistIds, error) {
var name PlaylistIds
err := json.Unmarshal(data, &name)
return name, err
}
func NewPlaylistPublic(data []byte) (PlaylistLinkPublic, error) {
var name PlaylistLinkPublic
err := json.Unmarshal(data, &name)
return name, err
}
type PlaylistsDB struct {
db *sql.DB
rwLock *sync.RWMutex
}
func newPlaylistsDB(db *sql.DB, rwLock *sync.RWMutex) (*PlaylistsDB, error) {
cmd := newTableBuilder(TablePlaylists).
addForeignKey(ForeignKeyApikey).
addPrimaryKey(ColumnName).
addColumn(ColumnPublic).
addColumn(ColumnIds).build()
_, err := db.Exec(cmd)
if err != nil {
return nil, err
}
return &PlaylistsDB{db, rwLock}, nil
}
func (playlistsDB *PlaylistsDB) GetPlaylists(apiKey string, publicOnly bool) ([]Playlist, error) {
playlistsDB.rwLock.RLock()
defer playlistsDB.rwLock.RUnlock()
cmd := fmt.Sprintf(
"SELECT %s,%s FROM %s WHERE %s = ?",
ColumnName.name, ColumnPublic.name, TablePlaylists,
ColumnApikey.name)
if publicOnly {
cmd += fmt.Sprintf(" AND %s = 1", ColumnPublic.name)
}
stmt, err := playlistsDB.db.Prepare(cmd)
if err != nil {
return nil, err
}
defer stmt.Close()
rows, err := stmt.Query(apiKey)
if err != nil {
return nil, err
}
defer rows.Close()
playlists := make([]Playlist, 0)
for rows.Next() {
var name string
var public bool
err := rows.Scan(&name, &public)
if err != nil {
return nil, err
}
playlists = append(playlists, Playlist{Name: name, Public: public})
}
return playlists, nil
}
func (playlistsDB *PlaylistsDB) CreatePlaylist(playlist Playlist) error {
playlistsDB.rwLock.Lock()
defer playlistsDB.rwLock.Unlock()
_, err := playlistsDB.db.Exec(fmt.Sprintf(
"INSERT INTO %s (%s,%s,%s,%s) VALUES (?,?,?,?)",
TablePlaylists,
ColumnApikey.name, ColumnName.name, ColumnPublic.name, ColumnIds.name),
playlist.ApiKey, playlist.Name, playlist.Public, "")
return err
}
func (playlistsDB *PlaylistsDB) DeletePlaylist(playlist Playlist) error {
playlistsDB.rwLock.Lock()
defer playlistsDB.rwLock.Unlock()
_, err := playlistsDB.db.Exec(fmt.Sprintf(
"DELETE FROM %s WHERE %s = ? AND %s = ?",
TablePlaylists, ColumnApikey.name, ColumnName.name),
playlist.ApiKey, playlist.Name)
return err
}
func (playlistsDB *PlaylistsDB) SetPublic(playlist Playlist) error {
playlistsDB.rwLock.Lock()
defer playlistsDB.rwLock.Unlock()
_, err := playlistsDB.db.Exec(fmt.Sprintf(
"UPDATE %s SET %s = ? WHERE %s = ? AND %s = ?",
TablePlaylists, ColumnPublic.name, ColumnApikey.name, ColumnName.name),
playlist.Public, playlist.ApiKey, playlist.Name)
return err
}
func (playlistsDB *PlaylistsDB) GetPlaylistIds(playlist Playlist) ([]string, error) {
playlistsDB.rwLock.RLock()
defer playlistsDB.rwLock.RUnlock()
return playlistsDB.getPlaylistIds(playlist)
}
func (playlistsDB *PlaylistsDB) getPlaylistIds(playlist Playlist) ([]string, error) {
stmt, err := playlistsDB.db.Prepare(fmt.Sprintf(
"SELECT %s FROM %s WHERE %s = ? AND %s = ?",
ColumnIds.name, TablePlaylists, ColumnApikey.name, ColumnName.name))
if err != nil {
return nil, err
}
defer stmt.Close()
row := stmt.QueryRow(playlist.ApiKey, playlist.Name)
var ids string
err = row.Scan(&ids)
if err != nil {
return nil, err
}
list := strings.Split(ids, ",")
if len(list) == 1 && utils.StringIsEmpty(list[0]) {
list = make([]string, 0)
}
return list, nil
}
func (playlistsDB *PlaylistsDB) IsPlaylistPublic(playlist Playlist) bool {
playlistsDB.rwLock.RLock()
defer playlistsDB.rwLock.RUnlock()
row := playlistsDB.db.QueryRow(fmt.Sprintf(
"SELECT 1 FROM %s WHERE %s = ? AND %s = ? AND %s = ?",
TablePlaylists, ColumnApikey.name, ColumnName.name, ColumnPublic.name),
playlist.ApiKey, playlist.Name, true)
var public bool
err := row.Scan(&public)
return err == nil && public
}
func (playlistsDB *PlaylistsDB) AddIdToPlaylist(playlistId PlaylistId) error {
playlistsDB.rwLock.Lock()
defer playlistsDB.rwLock.Unlock()
ids, err := playlistsDB.getPlaylistIds(Playlist{
ApiKey: playlistId.ApiKey, Name: playlistId.Name})
if err != nil {
return err
}
ids = append(ids, playlistId.Id)
return playlistsDB.setPlaylistIds(PlaylistIds{
playlistId.ApiKey, playlistId.Name, ids})
}
func (playlistsDB *PlaylistsDB) DeleteIdFromPlaylist(playlistId PlaylistId) error {
playlistsDB.rwLock.Lock()
defer playlistsDB.rwLock.Unlock()
ids, err := playlistsDB.getPlaylistIds(Playlist{
ApiKey: playlistId.ApiKey, Name: playlistId.Name})
if err != nil {
return err
}
index := -1
for i, id := range ids {
if id == playlistId.Id {
index = i
break
}
}
if index < 0 {
return fmt.Errorf("id to delete not found")
}
newIds := ids[:index]
newIds = append(newIds, ids[index+1:]...)
return playlistsDB.setPlaylistIds(PlaylistIds{
playlistId.ApiKey, playlistId.Name, newIds})
}
func (playlistsDB *PlaylistsDB) SetPlaylistIds(playlistIds PlaylistIds) error {
playlistsDB.rwLock.Lock()
defer playlistsDB.rwLock.Unlock()
return playlistsDB.setPlaylistIds(playlistIds)
}
func (playlistsDB *PlaylistsDB) setPlaylistIds(playlistIds PlaylistIds) error {
set := make(map[string]struct{})
for _, id := range playlistIds.Ids {
if _, ok := set[id]; ok {
return fmt.Errorf("duplicate in ids")
}
set[id] = struct{}{}
}
_, err := playlistsDB.db.Exec(fmt.Sprintf(
"UPDATE %s SET %s = ? WHERE %s = ? AND %s = ?",
TablePlaylists, ColumnIds.name, ColumnApikey.name, ColumnName.name),
strings.Join(playlistIds.Ids, ","), playlistIds.ApiKey, playlistIds.Name)
return err
}
<|start_filename|>ytdl/format_list.go<|end_filename|>
package ytdl
import (
"sort"
"github.com/Grarak/GoYTFetcher/utils"
)
// FormatList is a slice of formats with filtering functionality
type FormatList []Format
func (formats FormatList) Filter(key FormatKey, values []interface{}) FormatList {
var dst FormatList
for _, v := range values {
for _, f := range formats {
if utils.InterfaceToString(f.ValueForKey(key)) == utils.InterfaceToString(v) {
dst = append(dst, f)
}
}
}
return dst
}
func (formats FormatList) Extremes(key FormatKey, best bool) FormatList {
dst := formats.Copy()
if len(dst) > 1 {
dst.Sort(key, best)
first := dst[0]
var i int
for i = 0; i < len(dst)-1; i++ {
if first.CompareKey(dst[i+1], key) != 0 {
break
}
}
i++
dst = dst[0:i]
}
return dst
}
func (formats FormatList) Best(key FormatKey) FormatList {
return formats.Extremes(key, true)
}
func (formats FormatList) Worst(key FormatKey) FormatList {
return formats.Extremes(key, false)
}
func (formats FormatList) Sort(key FormatKey, reverse bool) {
wrapper := formatsSortWrapper{formats, key}
if !reverse {
sort.Stable(wrapper)
} else {
sort.Stable(sort.Reverse(wrapper))
}
}
func (formats FormatList) Subtract(other FormatList) FormatList {
var dst FormatList
for _, f := range formats {
include := true
for _, f2 := range other {
if f2.Itag == f.Itag {
include = false
break
}
}
if include {
dst = append(dst, f)
}
}
return dst
}
func (formats FormatList) Copy() FormatList {
dst := make(FormatList, len(formats))
copy(dst, formats)
return dst
}
type formatsSortWrapper struct {
formats FormatList
key FormatKey
}
func (s formatsSortWrapper) Len() int {
return len(s.formats)
}
func (s formatsSortWrapper) Less(i, j int) bool {
return s.formats[i].CompareKey(s.formats[j], s.key) < 0
}
func (s formatsSortWrapper) Swap(i, j int) {
s.formats[i], s.formats[j] = s.formats[j], s.formats[i]
}
<|start_filename|>database/histories.go<|end_filename|>
package database
import (
"database/sql"
"encoding/json"
"fmt"
"strings"
"sync"
"time"
)
const TableHistories = "histories"
type History struct {
ApiKey string `json:"apikey"`
Id string `json:"id"`
Date time.Time `json:"-"`
}
func NewHistory(data []byte) (History, error) {
var history History
err := json.Unmarshal(data, &history)
return history, err
}
type HistoriesDB struct {
db *sql.DB
rwLock *sync.RWMutex
}
func newHistoriesDB(db *sql.DB, rwLock *sync.RWMutex) (*HistoriesDB, error) {
cmd := newTableBuilder(TableHistories).
addForeignKey(ForeignKeyApikey).
addPrimaryKey(ColumnId).
addColumn(ColumnDate).build()
_, err := db.Exec(cmd)
if err != nil {
return nil, err
}
return &HistoriesDB{db, rwLock}, nil
}
func (historiesDB *HistoriesDB) AddHistory(apiKey, id string) error {
id = strings.TrimSpace(id)
historiesDB.rwLock.Lock()
defer historiesDB.rwLock.Unlock()
history, err := historiesDB.getHistory(apiKey)
if err != nil {
return err
}
for i := 50; i < len(history); i++ {
_, err := historiesDB.db.Exec(fmt.Sprintf(
"DELETE FROM %s WHERE %s = ? AND %s = ?",
TableHistories, ColumnApikey.name, ColumnId.name), apiKey, history[i])
if err != nil {
return err
}
}
_, err = historiesDB.db.Exec(fmt.Sprintf(
"INSERT OR REPLACE INTO %s (%s, %s, %s) VALUES (?, ?, ?)",
TableHistories, ColumnApikey.name, ColumnId.name,
ColumnDate.name),
apiKey, id, time.Now().Format(dateTimeFormat))
return err
}
func (historiesDB *HistoriesDB) GetHistory(apiKey string) ([]string, error) {
historiesDB.rwLock.RLock()
defer historiesDB.rwLock.RUnlock()
return historiesDB.getHistory(apiKey)
}
func (historiesDB *HistoriesDB) getHistory(apiKey string) ([]string, error) {
stmt, err := historiesDB.db.Prepare(fmt.Sprintf(
"SELECT %s FROM %s WHERE %s = ? "+
"ORDER BY %s DESC",
ColumnId.name, TableHistories, ColumnApikey.name,
ColumnDate.name))
if err != nil {
return nil, err
}
defer stmt.Close()
rows, err := stmt.Query(apiKey)
if err != nil {
return nil, err
}
defer rows.Close()
links := make([]string, 0)
for rows.Next() {
var link string
err = rows.Scan(&link)
if err != nil {
return nil, err
}
links = append(links, link)
}
return links, nil
}
<|start_filename|>logger/log.go<|end_filename|>
package logger
import (
"fmt"
"io/ioutil"
"os"
"regexp"
"strconv"
"sync"
"github.com/Grarak/GoYTFetcher/utils"
"github.com/op/go-logging"
)
const logFileName = utils.LOG_DIR + utils.LOG_EXTENSION
var logFileRegex = regexp.MustCompile(utils.LOG_PREFIX + "(\\d+)\\" + utils.LOG_EXTENSION)
const LogFilesLimit = 20
const LogFileSize = 100 * 1024
var log = logging.MustGetLogger("example")
var format = logging.MustStringFormatter("%{color}%{time:Jan 2 15:04:05.000}: %{message}%{color:reset}")
var lock sync.Mutex
var logFile *os.File
func Init() {
utils.MkDir(utils.LOG_DIR)
if !utils.FileExists(utils.LOG_DIR + "/log.txt") {
_, err := os.Create(utils.LOG_DIR + "/log.txt")
utils.Panic(err)
}
file, err := os.OpenFile(utils.LOG_DIR+"/log.txt",
os.O_APPEND|os.O_WRONLY, 0600)
utils.Panic(err)
logFile = file
consoleBackend := logging.NewLogBackend(os.Stderr, "", 0)
fileBackend := logging.NewLogBackend(logFile, "", 0)
logging.SetBackend(logging.NewBackendFormatter(consoleBackend, format),
logging.NewBackendFormatter(fileBackend, format))
}
func I(message interface{}) {
lock.Lock()
defer lock.Unlock()
text := fmt.Sprintf("%v", message)
log.Info(text)
checkLogSize()
}
func E(message interface{}) {
lock.Lock()
defer lock.Unlock()
text := fmt.Sprintf("%v", message)
log.Error(text)
checkLogSize()
}
func checkLogSize() {
info, err := logFile.Stat()
utils.Panic(err)
if info.Size() >= LogFileSize {
utils.Panic(os.Rename(utils.LOG_DIR+"/"+logFileName,
utils.LOG_DIR+"/"+newLogFile(0)))
files, err := ioutil.ReadDir(utils.LOG_DIR)
utils.Panic(err)
highestCount := 0
for _, fileInfo := range files {
if logFileRegex.MatchString(fileInfo.Name()) {
count, err := strconv.Atoi(logFileRegex.
FindAllStringSubmatch(fileInfo.Name(), 1)[0][1])
utils.Panic(err)
if count > highestCount {
highestCount = count
}
}
}
for i := LogFilesLimit; i <= highestCount; i++ {
filePath := utils.LOG_DIR + "/" + newLogFile(i)
utils.Panic(os.Remove(filePath))
}
for ; highestCount >= 0; highestCount-- {
filePath := utils.LOG_DIR + "/" + newLogFile(highestCount)
if utils.FileExists(filePath) {
newFilePath := utils.LOG_DIR + "/" + newLogFile(highestCount+1)
utils.Panic(os.Rename(filePath, newFilePath))
}
}
logFile.Close()
Init()
}
}
func newLogFile(count int) string {
return fmt.Sprintf(utils.LOG_PREFIX+"%d"+utils.LOG_EXTENSION, count)
}
<|start_filename|>utils/crypto.go<|end_filename|>
package utils
import (
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"encoding/base64"
"fmt"
"io"
)
func Encode(text string) string {
return base64.StdEncoding.EncodeToString([]byte(text))
}
func Decode(text string) ([]byte, error) {
return base64.StdEncoding.DecodeString(text)
}
func ToURLBase64(buf []byte) string {
return base64.URLEncoding.EncodeToString(buf)
}
func FromURLBase64(text string) ([]byte, error) {
return base64.URLEncoding.DecodeString(text)
}
func Encrypt(key []byte, text string) string {
plaintext := []byte(text)
if len(plaintext) < aes.BlockSize {
panic("Text too short")
}
block, err := aes.NewCipher(key)
if err != nil {
panic(err)
}
ciphertext := make([]byte, aes.BlockSize+len(plaintext))
iv := ciphertext[:aes.BlockSize]
if _, err := io.ReadFull(rand.Reader, iv); err != nil {
panic(err)
}
stream := cipher.NewCFBEncrypter(block, iv)
stream.XORKeyStream(ciphertext[aes.BlockSize:], plaintext)
return ToURLBase64(ciphertext)
}
func Decrypt(key []byte, cryptoText string) (string, error) {
ciphertext, err := FromURLBase64(cryptoText)
if err != nil {
return "", err
}
block, err := aes.NewCipher(key)
if err != nil {
return "", err
}
if len(ciphertext) < aes.BlockSize {
return "", fmt.Errorf("text too short")
}
iv := ciphertext[:aes.BlockSize]
ciphertext = ciphertext[aes.BlockSize:]
stream := cipher.NewCFBDecrypter(block, iv)
stream.XORKeyStream(ciphertext, ciphertext)
return string(ciphertext), nil
}
func GenerateRandom(length int) []byte {
buf := make([]byte, length)
_, err := rand.Read(buf)
Panic(err)
return buf
}
<|start_filename|>testing/testing.go<|end_filename|>
package main
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"math/rand"
"net/http"
"net/url"
"sync"
)
var port int
func main() {
flag.IntVar(&port, "p", 6713, "Which port to use")
flag.Parse()
}
func createUsers() {
var wait sync.WaitGroup
signup := func(i int) {
signupUser(fmt.Sprintf("someUser%d", i),
"12345")
wait.Done()
}
for i := 0; i < 1000; i++ {
wait.Add(1)
go signup(i)
}
wait.Wait()
}
func signupUser(name, password string) error {
user := User{
Name: name,
Password: Encode(password),
}
res, err := http.Post(
getUrl("v1", "users/signup"),
"application/json",
bytes.NewBuffer(user.ToJson()))
if err != nil {
return err
}
b, err := ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return err
}
fmt.Println("signUp: " + string(b))
return nil
}
func loginUser(name, password string) error {
user := User{
Name: name,
Password: Encode(password),
}
res, err := http.Post(getUrl(
"v1", "users/login?"),
"application/json",
bytes.NewBuffer(user.ToJson()))
if err != nil {
return err
}
b, err := ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return err
}
fmt.Println("login: " + string(b))
return nil
}
func listUsers(apiKey string) error {
user := User{ApiKey: apiKey}
queries := url.Values{}
queries.Set("page", "2")
res, err := http.Post(
getUrl("v1", "users/list?")+queries.Encode(),
"application/json",
bytes.NewBuffer(user.ToJson()))
if err != nil {
return err
}
b, err := ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return err
}
fmt.Println("list users: " + string(b))
return nil
}
func createPlaylist(apiKey, name string) error {
playlist := PlaylistName{
apiKey, name,
}
b, err := json.Marshal(playlist)
if err != nil {
return err
}
res, err := http.Post(
getUrl("v1", "users/playlist/create"),
"application/json",
bytes.NewBuffer(b))
if err != nil {
return err
}
b, err = ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return err
}
fmt.Println("create playlist: " + string(b))
return nil
}
func searchYoutube(apiKey, searchQuery string) error {
youtubeSearch := YoutubeSearch{
apiKey, searchQuery,
}
b, err := json.Marshal(youtubeSearch)
if err != nil {
return err
}
res, err := http.Post(
getUrl("v1", "youtube/search"),
"application/json",
bytes.NewBuffer(b))
if err != nil {
return err
}
b, err = ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return err
}
fmt.Println("search youtube: " + string(b))
return nil
}
func getChartsYoutube(apiKey string) error {
youtubeSearch := YoutubeSearch{
Apikey: apiKey,
}
b, err := json.Marshal(youtubeSearch)
if err != nil {
return err
}
res, err := http.Post(
getUrl("v1", "youtube/getcharts"),
"application/json",
bytes.NewBuffer(b))
if err != nil {
return err
}
b, err = ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return err
}
fmt.Println("charts youtube: " + string(b))
return nil
}
func fetchYoutube(apiKey, id string) error {
youtubeSearch := Youtube{
Apikey: apiKey,
Id: id,
}
b, err := json.Marshal(youtubeSearch)
if err != nil {
return err
}
res, err := http.Post(
getUrl("v1", "youtube/fetch"),
"application/json",
bytes.NewBuffer(b))
if err != nil {
return err
}
b, err = ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return err
}
fmt.Println("fetch youtube: " + string(b))
return nil
}
func testDatastructures() {
ranking := &rankingTree{}
var datas []YoutubeSong
for i := 0; i < 100; i++ {
datas = append(datas, YoutubeSong{
id: fmt.Sprintf("someid%d", i),
count: rand.Intn(1)})
}
var wait sync.WaitGroup
for _, youtube := range datas {
wait.Add(1)
go func(youtube YoutubeSong) {
ranking.insert(youtube)
wait.Done()
}(youtube)
}
wait.Wait()
ranking.delete(datas[9])
fmt.Println(fmt.Sprintf("size: %d", ranking.getSize()))
startNode := ranking.start
startNode.print("", true, "root")
}
func getUrl(apiVersion, path string) string {
return fmt.Sprintf("http://127.0.0.1:%d/api/%s/%s", port, apiVersion, path)
}
<|start_filename|>ytdl/ytdl.go<|end_filename|>
package ytdl
import (
"bufio"
"bytes"
"encoding/json"
"encoding/xml"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"regexp"
"strconv"
"strings"
"time"
"github.com/golang-collections/collections/stack"
"golang.org/x/net/html"
"github.com/Grarak/GoYTFetcher/logger"
"github.com/Grarak/GoYTFetcher/utils"
"github.com/PuerkitoBio/goquery"
)
const youtubeBaseURL = "https://www.youtube.com/watch"
const youtubeInfoURL = "https://www.youtube.com/get_video_info"
var searchWebSiteRegex = regexp.MustCompile("href=\"/watch\\?v=([a-z_A-Z0-9\\-]{11})\"")
var jsonRegex = regexp.MustCompile("ytplayer.config = (.*?);ytplayer.load")
var sigRegex = regexp.MustCompile("\\/s\\/([a-fA-F0-9\\.]+)")
var sigSubRegex = regexp.MustCompile("([a-fA-F0-9\\.]+)")
// VideoInfo contains the info a youtube video
type VideoInfo struct {
ID string `json:"id"`
Title string `json:"title"`
Duration time.Duration
}
type VideoDownloadInfo struct {
VideoInfo *VideoInfo
Formats FormatList `json:"formats"`
htmlPlayerFile string
}
func GetVideoInfoFromID(id string) (*VideoInfo, error) {
u, _ := url.ParseRequestURI(youtubeInfoURL)
values := u.Query()
values.Set("video_id", id)
u.RawQuery = values.Encode()
resp, err := http.Get(u.String())
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return getVideoInfoFromHTML(id)
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
values, err = url.ParseQuery(string(body))
if err != nil {
return getVideoInfoFromHTML(id)
}
if status := values.Get("status"); utils.StringIsEmpty(status) || status != "ok" {
return getVideoInfoFromHTML(id)
}
title := values.Get("title")
length := values.Get("length_seconds")
if utils.StringIsEmpty(title) || utils.StringIsEmpty(length) {
return getVideoInfoFromHTML(id)
}
duration, err := time.ParseDuration(length + "s")
if err != nil {
return getVideoInfoFromHTML(id)
}
return &VideoInfo{ID: id, Title: title, Duration: duration}, nil
}
func getVideoInfoFromHTML(id string) (*VideoInfo, error) {
downloadInfo, err := GetVideoDownloadInfo(id)
if err != nil {
return nil, err
}
return downloadInfo.VideoInfo, nil
}
func GetVideoDownloadInfo(id string) (*VideoDownloadInfo, error) {
u, _ := url.ParseRequestURI(youtubeBaseURL)
values := u.Query()
values.Set("v", id)
u.RawQuery = values.Encode()
resp, err := http.Get(u.String())
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return nil, fmt.Errorf("invalid status code: %d", resp.StatusCode)
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
return parseVideoInfoFromHTML(id, body)
}
func parseVideoInfoFromHTML(id string, html []byte) (*VideoDownloadInfo, error) {
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(html))
if err != nil {
return nil, err
}
info := &VideoDownloadInfo{VideoInfo: &VideoInfo{}}
// extract description and title
info.VideoInfo.Title = strings.TrimSpace(doc.Find("#eow-title").Text())
info.VideoInfo.ID = id
// match json in javascript
matches := jsonRegex.FindSubmatch(html)
var jsonConfig map[string]interface{}
if len(matches) > 1 {
err = json.Unmarshal(matches[1], &jsonConfig)
if err != nil {
return nil, err
}
}
inf, ok := jsonConfig["args"].(map[string]interface{})
if !ok {
return nil, fmt.Errorf("%s: error no args in json", id)
}
if status, ok := inf["status"].(string); ok && status == "fail" {
return nil, fmt.Errorf("%s: error %d:%s", id, inf["errorcode"], inf["reason"])
}
if length, ok := inf["length_seconds"].(string); ok {
if duration, err := strconv.ParseInt(length, 10, 64); err == nil {
info.VideoInfo.Duration = time.Second * time.Duration(duration)
} else {
logger.I(fmt.Sprintf(id+": Unable to parse duration string: %s", length))
}
} else {
logger.E(id + ": Unable to extract duration")
}
info.htmlPlayerFile = jsonConfig["assets"].(map[string]interface{})["js"].(string)
var formatStrings []string
if fmtStreamMap, ok := inf["url_encoded_fmt_stream_map"].(string); ok {
formatStrings = append(formatStrings, strings.Split(fmtStreamMap, ",")...)
}
if adaptiveFormats, ok := inf["adaptive_fmts"].(string); ok {
formatStrings = append(formatStrings, strings.Split(adaptiveFormats, ",")...)
}
var formats FormatList
for _, v := range formatStrings {
query, err := url.ParseQuery(v)
if err == nil {
itag, _ := strconv.Atoi(query.Get("itag"))
if format, ok := newFormat(itag); ok {
if strings.HasPrefix(query.Get("conn"), "rtmp") {
format.meta["rtmp"] = true
}
for k, v := range query {
if len(v) == 1 {
format.meta[k] = v[0]
} else {
format.meta[k] = v
}
}
formats = append(formats, format)
} else {
logger.I(fmt.Sprintf(id+": No metadata found for itag: %d, skipping...", itag))
}
} else {
logger.I(fmt.Sprintf(id+": Unable to format string %s", err.Error()))
}
}
if dashManifestURL, ok := inf["dashmpd"].(string); ok {
tokens, err := getSigTokens(info.htmlPlayerFile)
if err != nil {
return nil, fmt.Errorf("unable to extract signature tokens: %s", err.Error())
}
dashManifestURL = sigRegex.ReplaceAllStringFunc(dashManifestURL, func(str string) string {
return "/signature/" + decipherTokens(tokens, sigSubRegex.FindString(str))
})
dashFormats, err := getDashManifest(dashManifestURL)
if err != nil {
return nil, fmt.Errorf("unable to extract dash manifest: %s", err.Error())
}
for _, dashFormat := range dashFormats {
added := false
for j, format := range formats {
if dashFormat.Itag == format.Itag {
formats[j] = dashFormat
added = true
break
}
}
if !added {
formats = append(formats, dashFormat)
}
}
}
info.Formats = formats
return info, nil
}
type representation struct {
Itag int `xml:"id,attr"`
Height int `xml:"height,attr"`
URL string `xml:"BaseURL"`
}
func getDashManifest(urlString string) (formats []Format, err error) {
resp, err := http.Get(urlString)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return nil, fmt.Errorf("invalid status code %d", resp.StatusCode)
}
dec := xml.NewDecoder(resp.Body)
var token xml.Token
for ; err == nil; token, err = dec.Token() {
if el, ok := token.(xml.StartElement); ok && el.Name.Local == "Representation" {
var rep representation
err = dec.DecodeElement(&rep, &el)
if err != nil {
break
}
if format, ok := newFormat(rep.Itag); ok {
format.meta["url"] = rep.URL
if rep.Height != 0 {
format.Resolution = strconv.Itoa(rep.Height) + "p"
} else {
format.Resolution = ""
}
formats = append(formats, format)
} else {
logger.I(fmt.Sprintf("No metadata found for itag: %d, skipping...", rep.Itag))
}
}
}
if err != io.EOF {
return nil, err
}
return formats, nil
}
func getDownloadFormat(audioEncoding string, formats FormatList) Format {
var downloadFormat Format
for _, format := range formats {
if format.AudioEncoding == audioEncoding && format.Resolution == "" {
downloadFormat = format
break
}
}
if downloadFormat.AudioBitrate == 0 {
for _, format := range formats {
if format.Resolution == "" {
downloadFormat = format
break
}
}
}
return downloadFormat
}
func (info *VideoDownloadInfo) GetDownloadURL() (*url.URL, error) {
vorbisFormat := getDownloadFormat("vorbis", info.Formats.Best(FormatAudioEncodingKey))
vorbisUrl, err := getDownloadURL(vorbisFormat, info.htmlPlayerFile)
if err != nil {
logger.E(info.VideoInfo.ID + ": Failed to get vorbis url")
return nil, err
}
return vorbisUrl, nil
}
func (info *VideoDownloadInfo) GetDownloadURLWorst() (*url.URL, error) {
opusFormat := getDownloadFormat("opus", info.Formats.Worst(FormatAudioEncodingKey))
opusUrl, err := getDownloadURL(opusFormat, info.htmlPlayerFile)
if err != nil {
logger.E(info.VideoInfo.ID + ": Failed to get opus url")
return nil, err
}
return opusUrl, nil
}
func (info *VideoInfo) GetThumbnailURL(quality ThumbnailQuality) *url.URL {
u, _ := url.Parse(fmt.Sprintf("http://img.youtube.com/vi/%s/%s.jpg",
info.ID, quality))
resp, err := http.Get(u.String())
defer resp.Body.Close()
if err != nil || resp.StatusCode != http.StatusOK {
u, _ = url.Parse(fmt.Sprintf("https://i.ytimg.com/vi/%s/%s.jpg",
info.ID, quality))
}
return u
}
func GetVideosFromSearch(searchQuery string) ([]*VideoInfo, error) {
searchUrl := "https://www.youtube.com/results?"
query := url.Values{}
query.Set("search_query", searchQuery)
searchUrl += query.Encode()
res, err := http.Get(searchUrl)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode != 200 {
return nil, fmt.Errorf("couldn't get website")
}
infos := make([]*VideoInfo, 0)
previousLines := make([]string, 3)
reader := bufio.NewReader(res.Body)
for {
line, err := reader.ReadString('\n')
if err != nil {
break
}
if len(previousLines) >= 3 {
previousLines = previousLines[1:]
}
previousLines = append(previousLines, line)
matches := searchWebSiteRegex.FindAllStringSubmatch(line, 1)
if len(matches) > 0 && len(matches[0]) > 1 {
id := matches[0][1]
contains := false
for _, info := range infos {
if info.ID == id {
contains = true
break
}
}
if !contains {
snippet := strings.Join(previousLines, "")
lookupStart := strings.Index(snippet, "<div class=\"yt-lockup-content\">")
previousLines = make([]string, 3)
if lookupStart >= 0 {
start := snippet[lookupStart:]
matches := searchWebSiteRegex.FindAllStringSubmatch(start, 1)
if len(matches) > 0 && len(matches[0]) > 1 {
snippetId := matches[0][1]
if snippetId == id {
xmlSnippet, err := readXmlUntilComplete(start, reader, 0, stack.New())
if err == nil {
node, err := html.Parse(bytes.NewBufferString(xmlSnippet))
if err == nil {
info, err := parseNodeToResult(snippetId, node.FirstChild.LastChild.FirstChild.FirstChild)
if err == nil {
infos = append(infos, info)
}
}
}
}
}
}
}
}
}
if len(infos) == 0 {
return nil, fmt.Errorf("no results found")
}
return infos, nil
}
func parseNodeToResult(id string, node *html.Node) (*VideoInfo, error) {
info := &VideoInfo{ID: id}
for ; node != nil; node = node.NextSibling {
for _, attr := range node.Attr {
if attr.Key == "class" && strings.Trim(attr.Val, " ") == "yt-lockup-title" {
titleNode := node.FirstChild
for ; titleNode != nil; titleNode = titleNode.NextSibling {
switch titleNode.Data {
case "a":
for _, titleAttr := range titleNode.Attr {
if titleAttr.Key == "title" {
info.Title = titleAttr.Val
break
}
}
break
case "span":
times := strings.Split(titleNode.FirstChild.Data, ":")
sum := int64(0)
if len(times) >= 3 && len(times) <= 4 {
for i := 1; i < len(times); i++ {
timeUnit := strings.Trim(times[i], " ")
if len(timeUnit) >= 3 {
timeUnit = timeUnit[:2]
}
convertedTime, err := strconv.Atoi(timeUnit)
if err != nil {
sum = 0
break
}
sum *= 60
sum += int64(convertedTime)
}
info.Duration = time.Duration(sum * 1000 * 1000 * 1000)
}
break
}
}
}
}
}
if len(info.Title) > 0 && info.Duration > 0 {
return info, nil
}
return info, fmt.Errorf("couldn't parse xml")
}
func readXmlUntilComplete(start string, reader *bufio.Reader, position int, tags *stack.Stack) (string, error) {
next := func(position int) (string, error) {
line, err := reader.ReadString('\n')
if err == io.EOF {
return start, err
}
return readXmlUntilComplete(start+line, reader, position, tags)
}
for i := position; i < len(start); i++ {
if rune(start[i]) == rune('<') {
if i+1 == len(start) {
return next(i)
}
isClosing := rune(start[i+1]) == rune('/')
end := i + 1
if isClosing {
end++
}
name := make([]byte, 0)
stopNameAppending := false
for ; end < len(start); end++ {
if rune(start[end]) == rune('>') {
if isClosing {
previousName, ok := tags.Pop().(string)
if !ok || previousName != string(name) {
return start, fmt.Errorf("couldn't parse xml")
}
if tags.Len() == 0 {
return start[:end+1], nil
}
} else {
tags.Push(string(name))
}
name = nil
break
} else {
if rune(start[end]) == rune(' ') {
stopNameAppending = true
}
if !stopNameAppending {
name = append(name, byte(start[end]))
}
}
}
if name != nil {
return next(i)
}
}
}
return start, fmt.Errorf("couldn't parse xml")
}
func (info *VideoInfo) Download(path, youtubeDL string) (string, error) {
destination := path + "/" + info.ID + ".%(ext)s"
output, err := utils.ExecuteCmd(youtubeDL, "--extract-audio", "--audio-format",
"vorbis", "--output", destination, "--", info.ID)
if err != nil {
return "", err
}
lines := strings.Split(output, "\n")
for _, line := range lines {
if strings.HasPrefix(line, "[ffmpeg] Destination:") {
destination = line[strings.Index(line, path):]
break
}
}
if !utils.FileExists(destination) {
return "", fmt.Errorf(destination + " does not exists")
}
return destination, nil
}
<|start_filename|>miniserver/miniserver.go<|end_filename|>
package miniserver
import (
"net"
"net/http"
"strconv"
"strings"
"github.com/Grarak/GoYTFetcher/utils"
)
const (
ContentText = "text/plain"
ContentHtml = "text/html"
ContentJson = "application/json"
ContentJavascript = "text/javascript"
ContentCss = "text/css"
ContentXIcon = "image/x-icon"
ContentSVG = "image/svg+xml"
ContentOgg = "audio/ogg"
ContentOctetStream = "application/octet-stream"
ContentWasm = "application/wasm"
)
var FileExtensions = [][]string{
{"html", ContentHtml},
{"js", ContentJavascript},
{"css", ContentCss},
{"ico", ContentXIcon},
{"svg", ContentSVG},
{"ogg", ContentOgg},
{"wasm", ContentWasm},
}
func getContentTypeForFile(file string) string {
index := strings.LastIndex(file, ".")
if index >= 0 {
extension := file[index+1:]
for _, contentType := range FileExtensions {
if contentType[0] == extension {
return contentType[1]
}
}
}
return ContentOctetStream
}
type MiniServer struct {
port int
listener net.Listener
}
func NewServer(port int) *MiniServer {
return &MiniServer{
port: port,
}
}
func (miniserver *MiniServer) StartListening(callback func(client *Client) Response) {
http.HandleFunc("/", func(writer http.ResponseWriter, request *http.Request) {
defer request.Body.Close()
request.ParseForm()
client := newClient(request)
res := callback(client)
if res == nil {
writer.WriteHeader(http.StatusNotFound)
writer.Write([]byte("Not found"))
} else {
res.write(writer, client)
}
})
listener, err := net.Listen("tcp", ":"+strconv.Itoa(miniserver.port))
utils.Panic(err)
miniserver.listener = listener
http.Serve(listener, nil)
}
func (miniserver *MiniServer) StopListening() {
if miniserver.listener != nil {
miniserver.listener.Close()
}
}
<|start_filename|>database/utils.go<|end_filename|>
package database
import (
"database/sql"
)
const (
dateTimeFormat = "2006-01-02 15:04:05"
)
func rowCountInTable(db *sql.DB, table string) (int, error) {
row := db.QueryRow("SELECT Count(*) FROM " + table)
var count int
err := row.Scan(&count)
return count, err
}
<|start_filename|>database/youtubeid.go<|end_filename|>
package database
import (
"sync"
"github.com/Grarak/GoYTFetcher/utils"
)
type YoutubeId struct {
id string
result YoutubeSearchResult
count int
valuesLock sync.RWMutex
rwLock sync.RWMutex
}
func newYoutubeId(id string) *YoutubeId {
return &YoutubeId{id: id, count: 1}
}
func (youtubeId *YoutubeId) fetchId(youtubeDB *youtubeDBImpl) (YoutubeSearchResult, error) {
youtubeId.rwLock.Lock()
defer youtubeId.rwLock.Unlock()
result, err := youtubeDB.getYoutubeVideoInfoFromYtdl(youtubeId.id)
if err != nil && !utils.StringIsEmpty(youtubeDB.ytKey) {
result, err = youtubeDB.getYoutubeVideoInfoFromApi(youtubeId.id)
}
if err != nil {
return YoutubeSearchResult{}, err
}
youtubeId.result = result
return result, err
}
func (youtubeId *YoutubeId) getResult() YoutubeSearchResult {
youtubeId.rwLock.RLock()
defer youtubeId.rwLock.RUnlock()
return youtubeId.result
}
func (youtubeId *YoutubeId) increaseCount() {
youtubeId.valuesLock.Lock()
defer youtubeId.valuesLock.Unlock()
youtubeId.count++
}
func (youtubeId YoutubeId) GetUniqueId() string {
return youtubeId.id
}
func (youtubeId YoutubeId) GetCount() int {
return youtubeId.count
}
<|start_filename|>utils/utils.go<|end_filename|>
package utils
import (
"fmt"
"io/ioutil"
"log"
"net"
"os"
"os/exec"
"strconv"
)
func Panic(err error) {
if err != nil {
panic(err)
}
}
func MkDir(dir string) error {
return os.MkdirAll(dir, os.ModePerm)
}
func StringIsEmpty(data string) bool {
return len(data) == 0
}
func StringArrayContains(array []string, item string) bool {
for _, value := range array {
if value == item {
return true
}
}
return false
}
func FormatMinutesSeconds(minutes, seconds int) string {
m := strconv.Itoa(minutes)
if len(m) == 1 {
m = "0" + m
}
s := strconv.Itoa(seconds)
if len(s) == 1 {
s = "0" + s
}
return fmt.Sprintf("%s:%s", m, s)
}
func GetOutboundIP() net.IP {
conn, err := net.Dial("udp", "8.8.8.8:80")
if err != nil {
log.Fatal(err)
}
defer conn.Close()
localAddr := conn.LocalAddr().(*net.UDPAddr)
return localAddr.IP
}
func ExecuteCmd(name string, arg ...string) (string, error) {
cmd := exec.Command(name, arg...)
reader, err := cmd.StdoutPipe()
defer reader.Close()
if err != nil {
return "", err
}
err = cmd.Start()
if err != nil {
return "", err
}
buf, err := ioutil.ReadAll(reader)
if err != nil {
return "", err
}
return string(buf), nil
}
func FileExists(file string) bool {
_, err := os.Stat(file)
return err == nil
}
func ReverseStringSlice(s []string) {
for i, j := 0, len(s)-1; i < len(s)/2; i, j = i+1, j-1 {
s[i], s[j] = s[j], s[i]
}
}
func InterfaceToString(val interface{}) string {
return fmt.Sprintf("%v", val)
}
<|start_filename|>database/youtubecharts.go<|end_filename|>
package database
import (
"bytes"
"encoding/json"
"io/ioutil"
"net/http"
"net/url"
"github.com/Grarak/GoYTFetcher/utils"
)
type YoutubeChartThumbnailDetails struct {
Url string `json:"url"`
}
type YoutubeChartThumbnail struct {
Details []YoutubeChartThumbnailDetails `json:"thumbnails"`
}
type YoutubeChartVideoView struct {
Id string `json:"id"`
Title string `json:"title"`
Thumbnail YoutubeChartThumbnail `json:"thumbnail"`
Duration int `json:"videoDuration"`
}
type YoutubeChartVideo struct {
ListType string `json:"listType"`
VideoViews []YoutubeChartVideoView `json:"videoViews"`
}
type YoutubeChartMusicAnalyticsSectionRendererContent struct {
Videos []YoutubeChartVideo `json:"videos"`
}
type YoutubeChartMusicAnalyticsSectionRenderer struct {
Content YoutubeChartMusicAnalyticsSectionRendererContent `json:"content"`
}
type YoutubeChartSectionListRendererContent struct {
MusicAnalyticsSectionRenderer YoutubeChartMusicAnalyticsSectionRenderer `json:"musicAnalyticsSectionRenderer"`
}
type YoutubeChartSectionListRenderer struct {
Contents []YoutubeChartSectionListRendererContent `json:"contents"`
}
type YoutubeChartContents struct {
SectionListRenderer YoutubeChartSectionListRenderer `json:"sectionListRenderer"`
}
type YoutubeChart struct {
Contents YoutubeChartContents `json:"contents"`
}
func getYoutubeChartsFromApi(apiKey string) ([]YoutubeSearchResult, error) {
chartsUrl := "https://charts.youtube.com/youtubei/v1/browse?"
query := url.Values{}
query.Add("alt", "json")
query.Add("maxResults", "30")
query.Add("key", apiKey)
payload := `{
"context": {
"client": {
"clientName": "WEB_MUSIC_ANALYTICS",
"clientVersion": "0.2",
"hl": "en",
"gl": "US",
"experimentIds": null,
"theme": "MUSIC"
},
"capabilities": {},
"request": {
"internalExperimentFlags": []
}
},
"query": "chart_params_type=WEEK&perspective=CHART&flags=viral_video_chart&selected_chart=TRACKS&chart_params_id=weekly%3A0%3A0%3Aus",
"browseId": "FEmusic_analytics"
}`
req, err := http.NewRequest("POST", chartsUrl+query.Encode(),
bytes.NewBuffer([]byte(payload)))
if err != nil {
return nil, err
}
defer req.Body.Close()
req.Header.Add("Content-Type", "application/json")
req.Header.Add("Referer", "https://charts.youtube.com/charts/TrendingVideos/us")
res, err := http.DefaultClient.Do(req)
b, err := ioutil.ReadAll(res.Body)
if err != nil {
return nil, err
}
var chart YoutubeChart
err = json.Unmarshal(b, &chart)
if err != nil {
return nil, err
}
videoTypes := chart.Contents.SectionListRenderer.Contents[0].MusicAnalyticsSectionRenderer.Content.Videos
var videoChart *YoutubeChartVideo
for _, videoType := range videoTypes {
if videoType.ListType == "TRENDING_CHART" {
videoChart = &videoType
}
}
if videoChart == nil {
videoChart = &videoTypes[0]
}
results := make([]YoutubeSearchResult, 0)
for _, video := range videoChart.VideoViews {
minutes := video.Duration / 60
seconds := video.Duration % 60
results = append(results, YoutubeSearchResult{video.Title, video.Id,
video.Thumbnail.Details[1].Url,
utils.FormatMinutesSeconds(minutes, seconds)})
}
return results, nil
}
<|start_filename|>testing/playlist.go<|end_filename|>
package main
type PlaylistName struct {
ApiKey string `json:"apikey"`
Name string `json:"name"`
}
<|start_filename|>database/youtube.go<|end_filename|>
package database
import (
"encoding/json"
"fmt"
"io/ioutil"
"os/exec"
"strings"
"sync"
"time"
"github.com/Grarak/GoYTFetcher/logger"
"github.com/Grarak/GoYTFetcher/utils"
)
type Youtube struct {
ApiKey string `json:"apikey"`
SearchQuery string `json:"searchquery"`
Id string `json:"id"`
AddHistory bool `json:"addhistory"`
}
func NewYoutube(data []byte) (Youtube, error) {
var youtube Youtube
err := json.Unmarshal(data, &youtube)
return youtube, err
}
type YouTubeDB interface {
GetYoutubeSong(id string) (*YoutubeSong, error)
FetchYoutubeSong(id string) (string, string, error)
GetYoutubeSearch(searchQuery string) ([]YoutubeSearchResult, error)
GetYoutubeInfo(id string) (YoutubeSearchResult, error)
GetYoutubeCharts() ([]YoutubeSearchResult, error)
}
type youtubeDBImpl struct {
randomKey []byte
ytKey string
youtubeDL string
songsRanking *rankingTree
songs sync.Map
searchesRanking *rankingTree
searches sync.Map
idRanking *rankingTree
ids sync.Map
deleteCacheLock sync.RWMutex
charts []YoutubeSearchResult
chartsLock sync.RWMutex
chartsLastFetched time.Time
}
func newYoutubeDB(key []byte, ytKey string) (YouTubeDB, error) {
youtubeDL, err := exec.LookPath(utils.YOUTUBE_DL)
if err != nil {
return nil, err
}
youtubeDB := &youtubeDBImpl{
youtubeDL: youtubeDL,
songsRanking: new(rankingTree),
searchesRanking: new(rankingTree),
idRanking: new(rankingTree),
randomKey: key,
ytKey: ytKey,
}
files, err := ioutil.ReadDir(utils.YOUTUBE_DIR)
if err != nil {
return nil, err
}
for _, file := range files {
if !file.IsDir() {
id := file.Name()
id = id[:strings.LastIndex(id, ".")]
youtubeSong := newYoutubeSong(id)
youtubeSong.setDownloaded(true)
youtubeSong.filePath = utils.YOUTUBE_DIR + "/" + file.Name()
youtubeDB.songsRanking.insert(*youtubeSong)
youtubeDB.songs.Store(id, youtubeSong)
}
}
return youtubeDB, nil
}
func (youtubeDB *youtubeDBImpl) GetYoutubeSong(id string) (*YoutubeSong, error) {
decryptedId, err := utils.Decrypt(youtubeDB.randomKey, id)
if err != nil {
return nil, err
}
loadedSong, ok := youtubeDB.songs.Load(decryptedId[:11])
if !ok {
return nil, fmt.Errorf("%s does not exist", id)
}
youtubeSong := loadedSong.(*YoutubeSong)
return youtubeSong, nil
}
func (youtubeDB *youtubeDBImpl) FetchYoutubeSong(id string) (string, string, error) {
id = strings.TrimSpace(id)
youtubeSong := newYoutubeSong(id)
loadedSong, loaded := youtubeDB.songs.LoadOrStore(id, youtubeSong)
if loaded {
youtubeSong = loadedSong.(*YoutubeSong)
youtubeSong.increaseCount()
}
youtubeSong.songLock.Lock()
defer youtubeSong.songLock.Unlock()
encryptedId := youtubeSong.getEncryptedId(youtubeDB.randomKey)
var link string
if youtubeSong.isDownloaded() {
link = encryptedId
} else if youtubeSong.IsDownloading() {
link, _ = youtubeSong.getDownloadUrl()
} else if !loaded {
link, _ = youtubeSong.getDownloadUrl()
if !utils.StringIsEmpty(link) {
go func() {
youtubeDB.deleteCacheLock.RLock()
defer youtubeDB.deleteCacheLock.RUnlock()
if err := youtubeSong.download(youtubeDB); err != nil {
logger.E(fmt.Sprintf("Failed to download %s %s", youtubeSong.id, err))
}
}()
}
}
if utils.StringIsEmpty(link) {
youtubeDB.songs.Delete(youtubeSong.id)
return "", "", fmt.Errorf("%s: failed to get url", youtubeSong.id)
}
youtubeDB.songsRanking.delete(*youtubeSong)
youtubeDB.songsRanking.insert(*youtubeSong)
if youtubeDB.songsRanking.getSize() >= 1000 {
lowestSong := youtubeDB.songsRanking.getLowest()
youtubeDB.songsRanking.delete(lowestSong)
loadedSong, loaded = youtubeDB.songs.Load(lowestSong.GetUniqueId())
if loaded {
youtubeSong := loadedSong.(*YoutubeSong)
youtubeDB.songs.Delete(lowestSong.GetUniqueId())
youtubeDB.deleteCacheLock.Lock()
youtubeSong.delete()
youtubeDB.deleteCacheLock.Unlock()
}
}
return link, encryptedId, nil
}
func (youtubeDB *youtubeDBImpl) GetYoutubeSearch(searchQuery string) ([]YoutubeSearchResult, error) {
if utils.StringIsEmpty(searchQuery) {
return nil, fmt.Errorf("search query is empty")
}
youtubeSearch := newYoutubeSearch(searchQuery)
loadedSearch, loaded := youtubeDB.searches.LoadOrStore(youtubeSearch.query, youtubeSearch)
if loaded {
youtubeSearch = loadedSearch.(*YoutubeSearch)
youtubeSearch.increaseCount()
}
var results []YoutubeSearchResult
var err error
if loaded {
results = youtubeSearch.getResults()
} else {
results, err = youtubeSearch.search(youtubeDB)
}
if err == nil {
youtubeDB.searchesRanking.delete(*youtubeSearch)
youtubeDB.songsRanking.insert(*youtubeSearch)
if youtubeDB.songsRanking.getSize() >= 1000 {
lowestSearch := youtubeDB.songsRanking.getLowest()
youtubeDB.songsRanking.delete(lowestSearch)
youtubeDB.songs.Delete(lowestSearch.GetUniqueId())
}
} else {
youtubeDB.searches.Delete(youtubeSearch.query)
}
return results, err
}
func (youtubeDB *youtubeDBImpl) GetYoutubeInfo(id string) (YoutubeSearchResult, error) {
id = strings.TrimSpace(id)
if utils.StringIsEmpty(id) {
return YoutubeSearchResult{}, fmt.Errorf("id is empty")
}
youtubeId := newYoutubeId(id)
loadedId, loaded := youtubeDB.ids.LoadOrStore(youtubeId.id, youtubeId)
if loaded {
youtubeId = loadedId.(*YoutubeId)
youtubeId.increaseCount()
}
var result YoutubeSearchResult
var err error
if loaded {
result = youtubeId.getResult()
} else {
result, err = youtubeId.fetchId(youtubeDB)
}
if err == nil {
youtubeDB.idRanking.delete(*youtubeId)
youtubeDB.idRanking.insert(*youtubeId)
if youtubeDB.idRanking.getSize() >= 1000 {
lowestId := youtubeDB.idRanking.getLowest()
youtubeDB.idRanking.delete(lowestId)
youtubeDB.ids.Delete(lowestId.GetUniqueId())
}
} else {
youtubeDB.ids.Delete(youtubeId.id)
}
return result, err
}
func (youtubeDB *youtubeDBImpl) GetYoutubeCharts() ([]YoutubeSearchResult, error) {
youtubeDB.chartsLock.RLock()
if len(youtubeDB.charts) == 0 || youtubeDB.chartsLastFetched.Day() != time.Now().Day() {
youtubeDB.chartsLock.RUnlock()
youtubeDB.chartsLock.Lock()
defer youtubeDB.chartsLock.Unlock()
charts, err := getYoutubeCharts()
if err != nil {
return nil, err
}
youtubeDB.chartsLastFetched = time.Now()
youtubeDB.charts = charts
return charts, nil
}
defer youtubeDB.chartsLock.RUnlock()
return youtubeDB.charts, nil
}
<|start_filename|>api/v1/youtube.go<|end_filename|>
package v1
import (
"net/http"
"net/url"
"strings"
"github.com/Grarak/GoYTFetcher/database"
"github.com/Grarak/GoYTFetcher/logger"
"github.com/Grarak/GoYTFetcher/miniserver"
"github.com/Grarak/GoYTFetcher/utils"
)
func youtubeFetch(client *miniserver.Client) miniserver.Response {
request, err := database.NewYoutube(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
logger.I(client.IPAddr + ": " + requester.Name + " fetching " + request.Id)
youtubeDB := database.GetDefaultDatabase().YoutubeDB
u, id, err := youtubeDB.FetchYoutubeSong(request.Id)
if err != nil {
logger.E(err)
return client.CreateResponse(utils.StatusYoutubeFetchFailure)
}
if request.AddHistory {
err := database.GetDefaultDatabase().HistoriesDB.AddHistory(request.ApiKey, request.Id)
if err != nil {
return client.CreateResponse(utils.StatusAddHistoryFailed)
}
}
if !strings.HasPrefix(u, "http") {
query := url.Values{}
query.Set("id", u)
if purl, err := url.Parse(u); err == nil {
host := purl.Host
if !strings.HasPrefix(host, "http") {
host = "http://" + client.Host
}
u = host + strings.Replace(
client.Url, "fetch", "get", 1) + "?" + query.Encode()
}
}
response := client.ResponseBody(u)
response.SetHeader("ytfetcher-id", id)
return response
}
return client.CreateResponse(utils.StatusInvalid)
}
func youtubeGet(client *miniserver.Client) miniserver.Response {
id := client.Queries.Get("id")
u := client.Queries.Get("url")
if !utils.StringIsEmpty(id) {
youtubeSong, err := database.GetDefaultDatabase().YoutubeDB.GetYoutubeSong(id)
if err != nil {
return client.CreateResponse(utils.StatusYoutubeGetFailure)
}
if strings.Contains(u, "googlevideo") {
return miniserver.NewForwardResponse(u)
}
reader, err := youtubeSong.Reader()
if err == nil {
response := client.ResponseReader(reader)
response.SetContentType(miniserver.ContentOgg)
return response
}
}
return client.CreateResponse(utils.StatusInvalid)
}
func youtubeSearch(client *miniserver.Client) miniserver.Response {
request, err := database.NewYoutube(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
logger.I(client.IPAddr + ": " + requester.Name + " searching " + request.SearchQuery)
results, err := database.GetDefaultDatabase().YoutubeDB.GetYoutubeSearch(request.SearchQuery)
if err != nil {
return client.CreateResponse(utils.StatusYoutubeSearchFailure)
}
return client.CreateJsonResponse(results)
}
return client.CreateResponse(utils.StatusInvalid)
}
func youtubeGetInfo(client *miniserver.Client) miniserver.Response {
request, err := database.NewYoutube(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
info, err := database.GetDefaultDatabase().YoutubeDB.GetYoutubeInfo(request.Id)
if err != nil {
return client.CreateResponse(utils.StatusYoutubeGetInfoFailure)
}
return client.CreateJsonResponse(info)
}
return client.CreateResponse(utils.StatusInvalid)
}
func youtubeGetCharts(client *miniserver.Client) miniserver.Response {
request, err := database.NewYoutube(client.Request)
if err != nil {
return client.CreateResponse(utils.StatusInvalid)
}
usersDB := database.GetDefaultDatabase().UsersDB
if requester, err := usersDB.FindUserByApiKey(request.ApiKey); err == nil && *requester.Verified {
info, err := database.GetDefaultDatabase().YoutubeDB.GetYoutubeCharts()
if err != nil {
return client.CreateResponse(utils.StatusYoutubeGetChartsFailure)
}
return client.CreateJsonResponse(info)
}
return client.CreateResponse(utils.StatusInvalid)
}
func HandleYoutubeV1(path string, client *miniserver.Client) miniserver.Response {
switch path {
case "fetch":
if client.Method == http.MethodPost && client.IsContentJson() {
return youtubeFetch(client)
}
break
case "get":
if client.Method == http.MethodGet {
return youtubeGet(client)
}
case "search":
if client.Method == http.MethodPost && client.IsContentJson() {
return youtubeSearch(client)
}
break
case "getinfo":
if client.Method == http.MethodPost && client.IsContentJson() {
return youtubeGetInfo(client)
}
break
case "getcharts":
if client.Method == http.MethodPost && client.IsContentJson() {
return youtubeGetCharts(client)
}
break
}
return nil
}
<|start_filename|>testing/datastructures.go<|end_filename|>
package main
import (
"fmt"
"strconv"
"sync"
)
type YoutubeSong struct {
id string
count int
}
func (youtubeSong YoutubeSong) GetUniqueId() string {
return youtubeSong.id
}
func (youtubeSong YoutubeSong) GetCount() int {
return youtubeSong.count
}
type rankingInterface interface {
GetUniqueId() string
GetCount() int
}
type rankingTree struct {
start *node
size int
lock sync.RWMutex
}
func (tree *rankingTree) insert(rankingItem rankingInterface) {
tree.lock.Lock()
defer tree.lock.Unlock()
tree.size++
if tree.start == nil {
tree.start = &node{rankingItem: rankingItem}
return
}
tree.start.insert(rankingItem)
}
func (tree *rankingTree) delete(rankingItem rankingInterface) bool {
tree.lock.Lock()
defer tree.lock.Unlock()
if tree.start == nil {
return false
}
if tree.start.rankingItem.GetUniqueId() == rankingItem.GetUniqueId() {
tree.size--
tree.start = createReplaceNode(tree.start)
return true
}
if tree.start.delete(rankingItem) {
tree.size--
return true
}
return false
}
func (tree *rankingTree) getLowest() rankingInterface {
tree.lock.RLock()
defer tree.lock.RUnlock()
if tree.start == nil {
return nil
}
return tree.start.getLowest()
}
func (tree *rankingTree) getSize() int {
tree.lock.RLock()
defer tree.lock.RUnlock()
return tree.size
}
type node struct {
rankingItem rankingInterface
left, right *node
children int
}
func (nodeLeaf *node) insert(rankingItem rankingInterface) {
nodeLeaf.children++
leftSize := 0
rightSize := 0
if nodeLeaf.left != nil {
leftSize = nodeLeaf.left.children
}
if nodeLeaf.right != nil {
rightSize = nodeLeaf.right.children
}
insertLeft := func() {
if nodeLeaf.left == nil {
nodeLeaf.left = &node{rankingItem: rankingItem}
} else {
nodeLeaf.left.insert(rankingItem)
}
}
insertRight := func() {
if nodeLeaf.right == nil {
nodeLeaf.right = &node{rankingItem: rankingItem}
} else {
nodeLeaf.right.insert(rankingItem)
}
}
if rankingItem.GetCount() < nodeLeaf.rankingItem.GetCount() {
insertLeft()
} else if rankingItem.GetCount() > nodeLeaf.rankingItem.GetCount() {
insertRight()
} else {
if leftSize < rightSize {
insertLeft()
} else {
insertRight()
}
}
}
func (nodeLeaf *node) delete(rankingItem rankingInterface) bool {
if nodeLeaf.left != nil &&
nodeLeaf.left.rankingItem.GetUniqueId() == rankingItem.GetUniqueId() {
nodeLeaf.left = createReplaceNode(nodeLeaf.left)
nodeLeaf.children--
return true
} else if nodeLeaf.right != nil &&
nodeLeaf.right.rankingItem.GetUniqueId() == rankingItem.GetUniqueId() {
nodeLeaf.right = createReplaceNode(nodeLeaf.right)
nodeLeaf.children--
return true
}
if rankingItem.GetCount() < nodeLeaf.rankingItem.GetCount() {
if nodeLeaf.left != nil {
return nodeLeaf.left.delete(rankingItem)
}
} else if rankingItem.GetCount() > nodeLeaf.rankingItem.GetCount() {
if nodeLeaf.right != nil {
return nodeLeaf.right.delete(rankingItem)
}
} else {
deleted := false
if nodeLeaf.left != nil {
deleted = nodeLeaf.left.delete(rankingItem)
}
if !deleted && nodeLeaf.right != nil {
deleted = nodeLeaf.right.delete(rankingItem)
}
return deleted
}
return false
}
func (nodeLeaf *node) getLowest() rankingInterface {
if nodeLeaf.left == nil {
return nodeLeaf.rankingItem
}
return nodeLeaf.left.getLowest()
}
func createReplaceNode(replacedNode *node) *node {
newNode := replacedNode.right
if newNode == nil {
return replacedNode.left
}
if replacedNode.left == nil {
return newNode
}
if newNode.left == nil {
newNode.children += replacedNode.left.children
newNode.left = replacedNode.left
return newNode
}
lastLeftNode := newNode.left
lastLeftNode.children += replacedNode.left.children
for lastLeftNode.left != nil {
lastLeftNode = lastLeftNode.left
lastLeftNode.children += replacedNode.left.children
}
lastLeftNode.left = replacedNode.left
return newNode
}
func (nodeLeaf *node) print(prefix string, isTail bool, position string) {
if nodeLeaf == nil {
return
}
message := "├── "
if isTail {
message = "└── "
}
fmt.Println(prefix + message + position + ": " + nodeLeaf.rankingItem.GetUniqueId() +
" " + strconv.Itoa(nodeLeaf.rankingItem.GetCount()))
message = "│ "
if isTail {
message = " "
}
if nodeLeaf.left != nil {
nodeLeaf.left.print(prefix+message, nodeLeaf.right == nil, "left")
}
if nodeLeaf.right != nil {
nodeLeaf.right.print(prefix+message, true, "right")
}
}
<|start_filename|>testing/user.go<|end_filename|>
package main
import "encoding/json"
import "github.com/Grarak/GoYTFetcher/utils"
type User struct {
ApiKey string `json:"apikey,omitempty"`
Name string `json:"name,omitempty"`
Password string `json:"password,omitempty"`
}
func (user User) ToJson() []byte {
b, err := json.Marshal(user)
utils.Panic(err)
return b
}
<|start_filename|>database/youtubeapiresult.go<|end_filename|>
package database
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
)
type YoutubeThumbnail struct {
Url string `json:"url"`
}
type YoutubeThumbnails struct {
Medium YoutubeThumbnail `json:"medium"`
}
type YoutubeSnippet struct {
Title string `json:"title"`
Thumbnails YoutubeThumbnails `json:"thumbnails"`
}
type YoutubeContentDetails struct {
Duration string `json:"duration"`
}
type YoutubeItem struct {
Snippet YoutubeSnippet `json:"snippet"`
ContentDetails YoutubeContentDetails `json:"contentDetails"`
Id string `json:"id"`
}
type YoutubeResponse struct {
Items []YoutubeItem `json:"items"`
}
func newYoutubeResponse(data []byte) (YoutubeResponse, error) {
var response YoutubeResponse
err := json.Unmarshal(data, &response)
return response, err
}
func getYoutubeApiResponseItems(url string) (YoutubeResponse, error) {
res, err := http.Get(url)
if err != nil {
return YoutubeResponse{}, err
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return YoutubeResponse{}, fmt.Errorf("failure")
}
b, err := ioutil.ReadAll(res.Body)
if err != nil {
return YoutubeResponse{}, err
}
response, err := newYoutubeResponse(b)
return response, err
}
<|start_filename|>testing/utils.go<|end_filename|>
package main
import "encoding/base64"
func Panic(err error) {
if err != nil {
panic(err)
}
}
func Encode(text string) string {
return base64.StdEncoding.EncodeToString([]byte(text))
}
func Decode(text string) ([]byte, error) {
return base64.StdEncoding.DecodeString(text)
}
<|start_filename|>api/api.go<|end_filename|>
package api
import (
"strings"
"github.com/Grarak/GoYTFetcher/api/v1"
"github.com/Grarak/GoYTFetcher/miniserver"
)
type apiHandle func(path string, client *miniserver.Client) miniserver.Response
var v1Apis = map[string]apiHandle{
"info": v1.HandleInfoV1,
"users": v1.HandleUsersV1,
"youtube": v1.HandleYoutubeV1,
}
// GetResponse makes the request and gets the response from the server
func GetResponse(version, api string, args []string, client *miniserver.Client) miniserver.Response {
var response apiHandle
switch version {
case "v1":
response = v1Apis[api]
break
}
if response != nil {
return response(strings.Join(args, "/"), client)
}
return nil
}
| Grarak/GoYTFetcher |
<|start_filename|>index.js<|end_filename|>
exports = module.exports = require('./lib/twitter_ads.js');
<|start_filename|>package.json<|end_filename|>
{
"name": "twitter-ads",
"description": "Twitter Ads API for NodeJS",
"version": "0.2.1",
"author": "<NAME> <<EMAIL>>",
"license": "MIT",
"homepage": "http://fallentech.github.io/twitter-ads",
"main": "index.js",
"engines": {
"node": ">=0.10.0"
},
"dependencies": {
"request": "^2.81.0",
"moment": "^2.18.1",
"async": "^2.5.0"
},
"repository": {
"type": "git",
"url": "git+https://github.com/FallenTech/twitter-ads.git"
},
"keywords": [
"twitter ads",
"twitter ads api",
"twitter advertising",
"twitter advertising api",
"twitter ton",
"twitter ton api"
],
"bugs": {
"url": "https://github.com/FallenTech/twitter-ads/issues"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 0"
}
}
<|start_filename|>lib/helpers.js<|end_filename|>
var querystring = require('querystring'),
constants = require('./constants'),
helpers = {};
// Adhere to RFC 3986 (which reserves !, ', (, ), and *)
function fixedEncodeURIComponent(str) {
return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {
return '%' + c.charCodeAt(0).toString(16);
});
}
helpers.makeQueryString = function(obj) {
var nodeVersionParts = process.versions.node.split('.');
if (nodeVersionParts[1] === '10') { // Node <= 0.10.x version (No encodeURIComponent overriding) ...
return querystring.stringify(obj).replace(/[!'()*]/g, function(c) {
return '%' + c.charCodeAt(0).toString(16);
});
} else return querystring.stringify(obj, null, null, {encodeURIComponent: fixedEncodeURIComponent});
};
// For each /:param fragment in path, move the value from params
helpers.moveParamsIntoPath = function(path, params) {
var rgxParam = /\/:(\w+)/g;
var missingParamErr = null;
path = path.replace(rgxParam, function(hit) {
var paramName = hit.slice(2);
if (!params[paramName]) throw new Error('Twitter-Ads: Params object is missing a required parameter for this request: `' + paramName + '`');
var retVal = '/' + params[paramName];
delete params[paramName];
return retVal;
});
return path;
};
helpers.normalizeParams = function(params) {
var normalized = params ? params : {};
if (params && typeof params === 'object') {
Object.keys(params).forEach(function(k) {
if (Array.isArray(params[k])) normalized[k] = params[k].join(',');
});
}
return normalized;
};
helpers.tryJSONParse = function(body) {
var ret = body;
try {
ret = JSON.parse(body);
} catch (e) {}
return ret;
};
helpers.parseResponse = function(resp, body, cb) {
var parsedBody;
if (body && typeof body === 'object') parsedBody = body;
else if (body && resp.headers['content-type'].indexOf('application/json') > -1 && typeof body === 'string') parsedBody = helpers.tryJSONParse(body);
if (constants.STATUS_CODES_TO_ABORT_ON.indexOf(resp.statusCode) > -1 || (parsedBody && parsedBody.errors && parsedBody.errors.length)) {
var err = new Error('Bad status code returned: ' + resp.statusCode + '\nTwitter Replied: ' + body.toString());
err.allErrors = [];
if (parsedBody && parsedBody.errors && parsedBody.errors.length) {
err.allErrors = parsedBody.errors;
err.message = 'Twitter-Ads API Error: ' + parsedBody.errors[0].message;
}
return cb(err, resp, parsedBody || body);
}
return cb(null, resp, parsedBody || body);
};
exports = module.exports = helpers; | hoangfuongduy/twitter-ads |
<|start_filename|>Classification/LibLinear/src/test/java/org/tribuo/classification/liblinear/TestLibLinearModel.java<|end_filename|>
/*
* Copyright (c) 2015-2020, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.classification.liblinear;
import com.oracle.labs.mlrg.olcut.util.Pair;
import org.tribuo.CategoricalIDInfo;
import org.tribuo.CategoricalInfo;
import org.tribuo.Dataset;
import org.tribuo.Example;
import org.tribuo.Feature;
import org.tribuo.ImmutableDataset;
import org.tribuo.ImmutableFeatureMap;
import org.tribuo.ImmutableOutputInfo;
import org.tribuo.Model;
import org.tribuo.Prediction;
import org.tribuo.classification.Label;
import org.tribuo.classification.LabelFactory;
import org.tribuo.classification.evaluation.LabelEvaluation;
import org.tribuo.classification.evaluation.LabelEvaluator;
import org.tribuo.classification.example.LabelledDataGenerator;
import org.tribuo.classification.liblinear.LinearClassificationType.LinearType;
import org.tribuo.data.text.TextDataSource;
import org.tribuo.data.text.TextFeatureExtractor;
import org.tribuo.data.text.impl.BasicPipeline;
import org.tribuo.data.text.impl.SimpleTextDataSource;
import org.tribuo.data.text.impl.TextFeatureExtractorImpl;
import org.tribuo.dataset.DatasetView;
import org.tribuo.impl.ListExample;
import de.bwaldvogel.liblinear.FeatureNode;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.tribuo.test.Helpers;
import org.tribuo.util.tokens.impl.BreakIteratorTokenizer;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestLibLinearModel {
private static final Logger logger = Logger.getLogger(TestLibLinearModel.class.getName());
private static final LibLinearClassificationTrainer t = new LibLinearClassificationTrainer();
//on Windows, this resolves to some nonsense like this: /C:/workspace/Classification/LibLinear/target/test-classes/test_input.tribuo
//and the leading slash is a problem and causes this test to fail on windows.
//it's generally poor practice to convert a resource to a path because the file won't normally exist as a file at runtime
//it only works at test time because ./target/test-classes/ is a folder that exists and it is on the classpath.
private final String TEST_INPUT_PATH = this.getClass().getResource("/test_input_binary.tribuo").getPath().replaceFirst("^/(.:/)", "$1");
private final String TEST_INPUT_PATH_MULTICLASS = this.getClass().getResource("/test_input_multiclass.tribuo").getPath().replaceFirst("^/(.:/)", "$1");
@Test
public void testPredictDataset() throws IOException, ClassNotFoundException {
for (LinearType mtype : LinearType.values()) {
checkModelType(mtype);
}
}
@Test
public void testSingleClassTraining() {
Pair<Dataset<Label>,Dataset<Label>> data = LabelledDataGenerator.denseTrainTest();
DatasetView<Label> trainingData = DatasetView.createView(data.getA(),(Example<Label> e) -> e.getOutput().getLabel().equals("Foo"), "Foo selector");
Model<Label> model = t.train(trainingData);
LabelEvaluation evaluation = (LabelEvaluation) trainingData.getOutputFactory().getEvaluator().evaluate(model,data.getB());
assertEquals(0.0,evaluation.accuracy(new Label("Bar")));
assertEquals(0.0,evaluation.accuracy(new Label("Baz")));
assertEquals(0.0,evaluation.accuracy(new Label("Quux")));
assertEquals(1.0,evaluation.recall(new Label("Foo")));
}
@Test
public void testMulticlass() throws IOException, ClassNotFoundException {
String prefix = "L2R_LR_multiclass";
LibLinearClassificationModel model = loadModel("/models/L2R_LR_multiclass.model");
Dataset<Label> examples = loadMulticlassTestDataset(model);
assertNotNull(model, prefix);
List<Prediction<Label>> predictions = model.predict(examples);
assertEquals(predictions.size(), examples.size(), prefix);
for (Prediction<Label> p : predictions) {
checkPrediction(prefix, model, p);
}
// check for ArrayIndexOutOfBounds
for (Example<Label> example : examples) {
model.getExcuse(example);
}
}
private void checkModelType(LinearType modelType) throws IOException, ClassNotFoundException {
String prefix = String.format("model %s", modelType);
LibLinearClassificationModel model = loadModel(modelType);
Dataset<Label> examples = loadTestDataset(model);
assertNotNull(model, prefix);
List<Prediction<Label>> predictions = model.predict(examples);
assertEquals(predictions.size(), examples.size(), prefix);
for (Prediction<Label> p : predictions) {
checkPrediction(prefix, model, p);
}
// check for ArrayIndexOutOfBounds
for (Example<Label> example : examples) {
model.getExcuse(example);
}
}
private LibLinearClassificationModel loadModel(LinearType modelType) throws IOException, ClassNotFoundException {
String modelPath = "/models/" + modelType + ".model";
return loadModel(modelPath);
}
private LibLinearClassificationModel loadModel(String path) throws IOException, ClassNotFoundException {
File modelFile = new File(this.getClass().getResource(path).getPath());
assertTrue(modelFile.exists(),String.format("model for %s does not exist", path));
try (ObjectInputStream oin = new ObjectInputStream(new FileInputStream(modelFile))) {
Object data = oin.readObject();
return (LibLinearClassificationModel) data;
}
}
private Dataset<Label> loadTestDataset(LibLinearClassificationModel model) throws IOException {
return loadDataset(model, Paths.get(TEST_INPUT_PATH));
}
private Dataset<Label> loadMulticlassTestDataset(LibLinearClassificationModel model) throws IOException {
return loadDataset(model, Paths.get(TEST_INPUT_PATH_MULTICLASS));
}
private Dataset<Label> loadDataset(LibLinearClassificationModel model, Path path) throws IOException {
TextFeatureExtractor<Label> extractor = new TextFeatureExtractorImpl<>(new BasicPipeline(new BreakIteratorTokenizer(Locale.US),2));
TextDataSource<Label> src = new SimpleTextDataSource<>(path, new LabelFactory(), extractor);
return new ImmutableDataset<>(src, model.getFeatureIDMap(), model.getOutputIDInfo(), false);
}
private void checkPrediction(String msgPrefix, LibLinearClassificationModel model, Prediction<Label> prediction) {
assertNotNull(prediction);
ImmutableOutputInfo<Label> labelMap = model.getOutputIDInfo();
Map<String,Label> dist = prediction.getOutputScores();
for (Label k : labelMap.getDomain()) {
String msg = String.format("%s --> dist did not contain entry for label %s", msgPrefix, k);
assertTrue(dist.containsKey(k.getLabel()), msg);
}
}
public Model<Label> testLibLinear(Pair<Dataset<Label>,Dataset<Label>> p) {
Model<Label> m = t.train(p.getA());
LabelEvaluator e = new LabelEvaluator();
LabelEvaluation evaluation = e.evaluate(m,p.getB());
Map<String, List<Pair<String,Double>>> features = m.getTopFeatures(3);
Assertions.assertNotNull(features);
Assertions.assertFalse(features.isEmpty());
features = m.getTopFeatures(-1);
Assertions.assertNotNull(features);
Assertions.assertFalse(features.isEmpty());
return m;
}
@Test
public void testReproducible() {
// Note this test will need to change if LibLinearTrainer grows a per Problem RNG.
Pair<Dataset<Label>,Dataset<Label>> p = LabelledDataGenerator.denseTrainTest();
Model<Label> m = t.train(p.getA());
Map<String, List<Pair<String,Double>>> mFeatures = m.getTopFeatures(-1);
Model<Label> mTwo = t.train(p.getA());
Map<String, List<Pair<String,Double>>> mTwoFeatures = mTwo.getTopFeatures(-1);
assertEquals(mFeatures,mTwoFeatures);
}
@Test
public void testDenseData() {
Pair<Dataset<Label>,Dataset<Label>> p = LabelledDataGenerator.denseTrainTest();
Model<Label> model = testLibLinear(p);
// Test serialization
Helpers.testModelSerialization(model,Label.class);
}
@Test
public void testSparseData() {
Pair<Dataset<Label>,Dataset<Label>> p = LabelledDataGenerator.sparseTrainTest();
testLibLinear(p);
}
@Test
public void testSparseBinaryData() {
Pair<Dataset<Label>,Dataset<Label>> p = LabelledDataGenerator.binarySparseTrainTest();
testLibLinear(p);
}
@Test
public void duplicateFeatureIDs() {
ImmutableFeatureMap fmap = new TestMap();
Example<Label> collision = generateExample(new String[]{"FOO","BAR","BAZ","QUUX"},new double[]{1.0,2.2,3.3,4.4});
int[] testCollisionIndices = new int[]{1,2,3,4};
double[] testCollisionValues = new double[]{4.3,2.2,4.4,1.0};
FeatureNode[] nodes = LibLinearClassificationTrainer.exampleToNodes(collision,fmap,null);
int[] nodesIndices = getIndices(nodes);
double[] nodesValues = getValues(nodes);
logger.log(Level.FINE,"node values " + Arrays.toString(nodes));
assertArrayEquals(testCollisionIndices,nodesIndices);
assertArrayEquals(testCollisionValues,nodesValues,1e-10);
Example<Label> fakecollision = generateExample(new String[]{"BAR","BAZ","QUUX"},new double[]{2.2,3.3,4.4});
testCollisionIndices = new int[]{1,2,3,4};
testCollisionValues = new double[]{3.3,2.2,4.4,1.0};
nodes = LibLinearClassificationTrainer.exampleToNodes(fakecollision,fmap,null);
nodesIndices = getIndices(nodes);
nodesValues = getValues(nodes);
logger.log(Level.FINE,"node values " + Arrays.toString(nodes));
assertArrayEquals(testCollisionIndices,nodesIndices);
assertArrayEquals(testCollisionValues,nodesValues,1e-10);
}
@Test
public void testInvalidExample() {
assertThrows(IllegalArgumentException.class, () -> {
Pair<Dataset<Label>, Dataset<Label>> p = LabelledDataGenerator.denseTrainTest();
Model<Label> m = t.train(p.getA());
m.predict(LabelledDataGenerator.invalidSparseExample());
});
}
@Test
public void testEmptyExample() {
assertThrows(IllegalArgumentException.class, () -> {
Pair<Dataset<Label>, Dataset<Label>> p = LabelledDataGenerator.denseTrainTest();
Model<Label> m = t.train(p.getA());
m.predict(LabelledDataGenerator.emptyExample());
});
}
private static int[] getIndices(FeatureNode[] nodes) {
int[] indices = new int[nodes.length];
for (int i = 0; i < nodes.length; i++) {
indices[i] = nodes[i].index;
}
return indices;
}
private static double[] getValues(FeatureNode[] nodes) {
double[] values = new double[nodes.length];
for (int i = 0; i < nodes.length; i++) {
values[i] = nodes[i].value;
}
return values;
}
private static Example<Label> generateExample(String[] names, double[] values) {
Example<Label> e = new ListExample<>(new Label("MONKEYS"));
for (int i = 0; i < names.length; i++) {
e.add(new Feature(names[i],values[i]));
}
return e;
}
private static class TestMap extends ImmutableFeatureMap {
private static final long serialVersionUID = 1L;
public TestMap() {
super();
CategoricalIDInfo foo = (new CategoricalInfo("FOO")).makeIDInfo(0);
m.put("FOO",foo);
idMap.put(0,foo);
CategoricalIDInfo bar = (new CategoricalInfo("BAR")).makeIDInfo(1);
m.put("BAR",bar);
idMap.put(1,bar);
CategoricalIDInfo baz = (new CategoricalInfo("BAZ")).makeIDInfo(0);
m.put("BAZ",baz);
idMap.put(0,baz);
CategoricalIDInfo quux = (new CategoricalInfo("QUUX")).makeIDInfo(2);
m.put("QUUX",quux);
idMap.put(2,quux);
size = idMap.size();
}
}
}
<|start_filename|>Core/src/main/java/org/tribuo/onnx/package-info.java<|end_filename|>
/*
* Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Interfaces and utilities for exporting Tribuo {@link org.tribuo.Model}s in
* <a href="https://onnx.ai">ONNX</a> format.
* <p>
* ONNX exported models use floats where Tribuo uses doubles, this is due
* to comparatively poor support for fp64 in ONNX deployment environments
* as compared to fp32. In addition fp32 executes better on the various
* accelerator backends available in
* <a href="https://onnxruntime.ai">ONNX Runtime</a>.
*/
package org.tribuo.onnx;
<|start_filename|>Classification/LibSVM/src/main/java/org/tribuo/classification/libsvm/LibSVMClassificationModel.java<|end_filename|>
/*
* Copyright (c) 2015-2020, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.classification.libsvm;
import com.oracle.labs.mlrg.olcut.util.Pair;
import org.tribuo.Example;
import org.tribuo.ImmutableFeatureMap;
import org.tribuo.ImmutableOutputInfo;
import org.tribuo.Prediction;
import org.tribuo.classification.Label;
import org.tribuo.common.libsvm.LibSVMModel;
import org.tribuo.common.libsvm.LibSVMTrainer;
import org.tribuo.provenance.ModelProvenance;
import libsvm.svm;
import libsvm.svm_model;
import libsvm.svm_node;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A classification model that uses an underlying LibSVM model to make the
* predictions.
* <p>
* See:
* <pre>
* <NAME>, <NAME>.
* "LIBSVM: a library for Support Vector Machines"
* ACM transactions on intelligent systems and technology (TIST), 2011.
* </pre>
* for the nu-svc algorithm:
* <pre>
* <NAME>, <NAME>, <NAME>, <NAME>.
* "New support vector algorithms"
* Neural Computation, 2000, 1207-1245.
* </pre>
* and for the original algorithm:
* <pre>
* <NAME>, <NAME>.
* "Support-Vector Networks"
* Machine Learning, 1995.
* </pre>
*/
public class LibSVMClassificationModel extends LibSVMModel<Label> {
private static final long serialVersionUID = 3L;
/**
* This is used when the model hasn't seen as many outputs as the OutputInfo says are there.
* It stores the unseen labels to ensure the predict method has the right number of outputs.
* If there are no unobserved labels it's set to Collections.emptySet.
*/
private final Set<Label> unobservedLabels;
LibSVMClassificationModel(String name, ModelProvenance description, ImmutableFeatureMap featureIDMap, ImmutableOutputInfo<Label> labelIDMap, List<svm_model> models) {
super(name, description, featureIDMap, labelIDMap, models.get(0).param.probability == 1, models);
// This sets up the unobservedLabels variable.
int[] curLabels = models.get(0).label;
if (curLabels.length != labelIDMap.size()) {
Map<Integer,Label> tmp = new HashMap<>();
for (Pair<Integer,Label> p : labelIDMap) {
tmp.put(p.getA(),p.getB());
}
for (int i = 0; i < curLabels.length; i++) {
tmp.remove(i);
}
Set<Label> tmpSet = new HashSet<>(tmp.values().size());
for (Label l : tmp.values()) {
tmpSet.add(new Label(l.getLabel(),0.0));
}
this.unobservedLabels = Collections.unmodifiableSet(tmpSet);
} else {
this.unobservedLabels = Collections.emptySet();
}
}
/**
* Returns the number of support vectors.
* @return The number of support vectors.
*/
public int getNumberOfSupportVectors() {
return models.get(0).SV.length;
}
@Override
public Prediction<Label> predict(Example<Label> example) {
svm_model model = models.get(0);
svm_node[] features = LibSVMTrainer.exampleToNodes(example, featureIDMap, null);
// Bias feature is always set
if (features.length == 0) {
throw new IllegalArgumentException("No features found in Example " + example.toString());
}
int[] labels = model.label;
double[] scores = new double[labels.length];
if (generatesProbabilities) {
svm.svm_predict_probability(model, features, scores);
} else {
//LibSVM returns a one vs one result, and unpacks it into a score vector by voting
double[] onevone = new double[labels.length * (labels.length - 1) / 2];
svm.svm_predict_values(model, features, onevone);
int counter = 0;
for (int i = 0; i < labels.length; i++) {
for (int j = i+1; j < labels.length; j++) {
if (onevone[counter] > 0) {
scores[i]++;
} else {
scores[j]++;
}
counter++;
}
}
}
double maxScore = Double.NEGATIVE_INFINITY;
Label maxLabel = null;
Map<String,Label> map = new LinkedHashMap<>();
for (int i = 0; i < scores.length; i++) {
String name = outputIDInfo.getOutput(labels[i]).getLabel();
Label label = new Label(name, scores[i]);
map.put(name,label);
if (label.getScore() > maxScore) {
maxScore = label.getScore();
maxLabel = label;
}
}
if (!unobservedLabels.isEmpty()) {
for (Label l : unobservedLabels) {
map.put(l.getLabel(),l);
}
}
return new Prediction<>(maxLabel, map, features.length, example, generatesProbabilities);
}
@Override
protected LibSVMClassificationModel copy(String newName, ModelProvenance newProvenance) {
return new LibSVMClassificationModel(newName,newProvenance,featureIDMap,outputIDInfo,Collections.singletonList(LibSVMModel.copyModel(models.get(0))));
}
}
<|start_filename|>Regression/Core/src/main/java/org/tribuo/regression/example/NonlinearGaussianDataSource.java<|end_filename|>
/*
* Copyright (c) 2015-2020, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.regression.example;
import com.oracle.labs.mlrg.olcut.config.Config;
import com.oracle.labs.mlrg.olcut.config.PropertyException;
import com.oracle.labs.mlrg.olcut.provenance.ObjectProvenance;
import com.oracle.labs.mlrg.olcut.provenance.Provenance;
import com.oracle.labs.mlrg.olcut.provenance.impl.SkeletalConfiguredObjectProvenance;
import com.oracle.labs.mlrg.olcut.provenance.primitives.StringProvenance;
import org.tribuo.ConfigurableDataSource;
import org.tribuo.Dataset;
import org.tribuo.Example;
import org.tribuo.MutableDataset;
import org.tribuo.OutputFactory;
import org.tribuo.Trainer;
import org.tribuo.impl.ArrayExample;
import org.tribuo.provenance.ConfiguredDataSourceProvenance;
import org.tribuo.provenance.DataSourceProvenance;
import org.tribuo.regression.RegressionFactory;
import org.tribuo.regression.Regressor;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
/**
* Generates a single dimensional output drawn from
* N(w_0*x_0 + w_1*x_1 + w_2*x_1*x_0 + w_3*x_1*x_1*x_1 + intercept,variance).
* <p>
* The features are drawn from a uniform distribution over the range.
*/
public class NonlinearGaussianDataSource implements ConfigurableDataSource<Regressor> {
@Config(mandatory=true,description = "The number of samples to draw.")
private int numSamples;
@Config(description = "The feature weights. Must be a 4 element array.")
private float[] weights = new float[]{1.0f,1.0f,1.0f,1.0f};
@Config(description="The y-intercept of the line.")
private float intercept = 0.0f;
@Config(description="The variance of the noise gaussian.")
private float variance = 1.0f;
@Config(description = "The minimum value of x_0.")
private float xZeroMin = -2.0f;
@Config(description = "The maximum value of x_0.")
private float xZeroMax = 2.0f;
@Config(description = "The minimum value of x_1.")
private float xOneMin = -2.0f;
@Config(description = "The maximum value of x_1.")
private float xOneMax = 2.0f;
@Config(description="The RNG seed.")
private long seed = Trainer.DEFAULT_SEED;
private List<Example<Regressor>> examples;
private final RegressionFactory factory = new RegressionFactory();
private static final String[] featureNames = new String[]{"X_0","X_1"};
/**
* For OLCUT
*/
private NonlinearGaussianDataSource() {}
/**
* Generates a single dimensional output drawn from
* N(w_0*x_0 + w_1*x_1 + w_2*x_1*x_0 + w_3*x_1*x_1*x_1 + intercept,variance).
* <p>
* The features are drawn from a uniform distribution over the range.
* @param numSamples The size of the output dataset.
* @param weights The feature weights.
* @param intercept The y intercept of the line.
* @param variance The variance of the gaussian.
* @param xZeroMin The minimum x_0 value (inclusive).
* @param xZeroMax The maximum x_0 value (exclusive).
* @param xOneMin The minimum x_1 value (inclusive).
* @param xOneMax The maximum x_1 value (exclusive).
* @param seed The rng seed to use.
*/
public NonlinearGaussianDataSource(int numSamples, float[] weights, float intercept, float variance,
float xZeroMin, float xZeroMax, float xOneMin, float xOneMax,
long seed) {
this.numSamples = numSamples;
this.weights = weights;
this.intercept = intercept;
this.variance = variance;
this.xZeroMin = xZeroMin;
this.xZeroMax = xZeroMax;
this.xOneMin = xOneMin;
this.xOneMax = xOneMax;
this.seed = seed;
postConfig();
}
/**
* Used by the OLCUT configuration system, and should not be called by external code.
*/
@Override
public void postConfig() {
// We use java.util.Random here because SplittableRandom doesn't have nextGaussian yet.
Random rng = new Random(seed);
if (weights.length != 4) {
throw new PropertyException("","weights","Must supply 4 weights, found " + weights.length);
}
if (xZeroMax <= xZeroMin) {
throw new PropertyException("","xZeroMax","xZeroMax must be greater than xZeroMin, found xZeroMax = " + xZeroMax + ", xZeroMin = " + xZeroMin);
}
if (xOneMax <= xOneMin) {
throw new PropertyException("","xOneMax","xOneMax must be greater than xOneMin, found xOneMax = " + xOneMax + ", xOneMin = " + xOneMin);
}
if (variance <= 0.0) {
throw new PropertyException("","variance","Variance must be positive, found variance = " + variance);
}
List<Example<Regressor>> examples = new ArrayList<>(numSamples);
double zeroRange = xZeroMax - xZeroMin;
double oneRange = xOneMax - xOneMin;
for (int i = 0; i < numSamples; i++) {
double xZero = (rng.nextDouble() * zeroRange) + xZeroMin;
double xOne = (rng.nextDouble() * oneRange) + xOneMin;
// N(w_0*x_0 + w_1*x_1 + w_2*x_1*x_0 + w_3*x_1*x_1*x_1 + intercept,variance).
double outputValue = (weights[0] * xZero) + (weights[1]*xOne) + (weights[2]*xZero*xOne) + (weights[3]*Math.pow(xOne,3)) + intercept;
Regressor output = new Regressor("Y",(rng.nextGaussian() * variance) + outputValue);
ArrayExample<Regressor> e = new ArrayExample<>(output,featureNames,new double[]{xZero,xOne});
examples.add(e);
}
this.examples = Collections.unmodifiableList(examples);
}
@Override
public OutputFactory<Regressor> getOutputFactory() {
return factory;
}
@Override
public DataSourceProvenance getProvenance() {
return new NonlinearGaussianDataSourceProvenance(this);
}
@Override
public Iterator<Example<Regressor>> iterator() {
return examples.iterator();
}
/**
* Generates a single dimensional output drawn from
* N(w_0*x_0 + w_1*x_1 + w_2*x_1*x_0 + w_3*x_1*x_1*x_1 + intercept,variance).
* <p>
* The features are drawn from a uniform distribution over the range.
* @param numSamples The size of the output dataset.
* @param weights The feature weights.
* @param intercept The y intercept of the line.
* @param variance The variance of the gaussian.
* @param xZeroMin The minimum x_0 value (inclusive).
* @param xZeroMax The maximum x_0 value (exclusive).
* @param xOneMin The minimum x_1 value (inclusive).
* @param xOneMax The maximum x_1 value (exclusive).
* @param seed The rng seed to use.
* @return A dataset drawn from a gaussian.
*/
public static MutableDataset<Regressor> generateDataset(int numSamples, float[] weights, float intercept, float variance,
float xZeroMin, float xZeroMax, float xOneMin, float xOneMax,
long seed) {
NonlinearGaussianDataSource source = new NonlinearGaussianDataSource(numSamples,weights,intercept,variance,
xZeroMin,xZeroMax,xOneMin,xOneMax,seed);
return new MutableDataset<>(source);
}
/**
* Provenance for {@link NonlinearGaussianDataSource}.
*/
public static class NonlinearGaussianDataSourceProvenance extends SkeletalConfiguredObjectProvenance implements ConfiguredDataSourceProvenance {
private static final long serialVersionUID = 1L;
/**
* Constructs a provenance from the host data source.
* @param host The host to read.
*/
NonlinearGaussianDataSourceProvenance(NonlinearGaussianDataSource host) {
super(host,"DataSource");
}
/**
* Constructs a provenance from the marshalled form.
* @param map The map of field values.
*/
public NonlinearGaussianDataSourceProvenance(Map<String, Provenance> map) {
this(extractProvenanceInfo(map));
}
private NonlinearGaussianDataSourceProvenance(ExtractedInfo info) {
super(info);
}
/**
* Extracts the relevant provenance information fields for this class.
* @param map The map to remove values from.
* @return The extracted information.
*/
protected static ExtractedInfo extractProvenanceInfo(Map<String,Provenance> map) {
Map<String,Provenance> configuredParameters = new HashMap<>(map);
String className = ObjectProvenance.checkAndExtractProvenance(configuredParameters,CLASS_NAME, StringProvenance.class, NonlinearGaussianDataSourceProvenance.class.getSimpleName()).getValue();
String hostTypeStringName = ObjectProvenance.checkAndExtractProvenance(configuredParameters,HOST_SHORT_NAME, StringProvenance.class, NonlinearGaussianDataSourceProvenance.class.getSimpleName()).getValue();
return new ExtractedInfo(className,hostTypeStringName,configuredParameters,Collections.emptyMap());
}
}
}
<|start_filename|>Core/src/main/java/org/tribuo/onnx/ONNXOperators.java<|end_filename|>
/*
* Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.onnx;
import ai.onnx.proto.OnnxMl;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.tribuo.onnx.ONNXAttribute.VARIADIC_INPUT;
/**
* The supported ONNX operators.
*/
public enum ONNXOperators {
/**
* Identity.
*/
IDENTITY("Identity",1,1),
/**
* Concatenates tensors.
*/
CONCAT("Concat",VARIADIC_INPUT,1, Collections.singletonList(
new ONNXAttribute("axis", OnnxMl.AttributeProto.AttributeType.INT, true)
)),
/**
* Sigmoid element-wise.
*/
SIGMOID("Sigmoid",1,1),
/**
* Softmax.
* <ul>
* <li>{@code axis} defaults to -1.</li>
* </ul>
*/
SOFTMAX("Softmax",1,1, Collections.singletonList(
new ONNXAttribute("axis", OnnxMl.AttributeProto.AttributeType.INT, false)
)),
/**
* Element-wise addition with broadcasting.
*/
ADD("Add",2,1),
/**
* Element-wise subtraction with broadcasting.
*/
SUB("Sub",2,1),
/**
* Element-wise multiplication with broadcasting.
*/
MUL("Mul",2,1),
/**
* Element-wise division with broadcasting.
*/
DIV("Div",2,1),
/**
* Element-wise exponentiation with broadcasting.
*/
POW("Pow",2,1),
/**
* Compute the minimum along the specified axes of the tensor.
* <ul>
* <li>{@code axes} defaults to all dimensions.</li>
* <li>{@code keepdims} defaults to 1 which means keep.</li>
* </ul>
*/
REDUCE_MIN("ReduceMin",1,1,Arrays.asList(
new ONNXAttribute("axes", OnnxMl.AttributeProto.AttributeType.INTS,false),
new ONNXAttribute("keepdims", OnnxMl.AttributeProto.AttributeType.INT,false)
)),
/**
* Compute the sum along the specified axes of the tensor.
* <ul>
* <li>{@code axes} defaults to all dimensions.</li>
* <li>{@code keepdims} defaults to 1 which means keep.</li>
* </ul>
*/
REDUCE_SUM("ReduceSum",1,1,Arrays.asList(
new ONNXAttribute("axes", OnnxMl.AttributeProto.AttributeType.INTS, false), //Opset 11
new ONNXAttribute("keepdims", OnnxMl.AttributeProto.AttributeType.INT, false)
)),
/**
* General Matrix Multiply: {@code alpha*AB + beta*C}.
* <p>
* The {@code C} input is optional, and if not supplied is treated as zero.
* <ul>
* <li>{@code alpha} defaults to 1.0</li>
* <li>{@code beta} defaults to 1.0</li>
* <li>{@code transA} defaults to 0 (i.e., not transposed)</li>
* <li>{@code transB} defaults to 0 (i.e., not transposed)</li>
* </ul>
*/
GEMM("Gemm",2,1, 1, Arrays.asList(
new ONNXAttribute("alpha", OnnxMl.AttributeProto.AttributeType.FLOAT,false),
new ONNXAttribute("beta", OnnxMl.AttributeProto.AttributeType.FLOAT,false),
new ONNXAttribute("transA", OnnxMl.AttributeProto.AttributeType.INT,false),
new ONNXAttribute("transB", OnnxMl.AttributeProto.AttributeType.INT,false)
));
/**
* The operator name.
*/
public final String opName;
/**
* The number of inputs.
*/
public final int numInputs;
/**
* The number of optional inputs.
*/
public final int numOptionalInputs;
/**
* The number of outputs.
*/
public final int numOutputs;
/**
* The operator attributes.
*/
public final Map<String,ONNXAttribute> attributes;
/**
* The mandatory attribute names.
*/
public final Set<String> mandatoryAttributeNames;
/**
* Opset supported by these definitions.
*/
private static final int OPSET_VERSION = 11;
/**
* Builds an operator without attributes.
* @param value The operator name.
* @param numInputs The number of inputs.
* @param numOutputs The number of outputs.
*/
private ONNXOperators(String value, int numInputs, int numOutputs) {
this(value,numInputs,0,numOutputs);
}
/**
* Builds an operator without attributes and with optional inputs.
* @param value The operator name.
* @param numInputs The number of inputs.
* @param numOptionalInputs The number of optional inputs.
* @param numOutputs The number of outputs.
*/
private ONNXOperators(String value, int numInputs, int numOptionalInputs, int numOutputs) {
this.opName = value;
this.numInputs = numInputs;
this.numOptionalInputs = numOptionalInputs;
this.numOutputs = numOutputs;
this.attributes = Collections.emptyMap();
this.mandatoryAttributeNames = Collections.emptySet();
}
/**
* Builds an operator with attributes.
* @param value The operator name.
* @param numInputs The number of inputs.
* @param numOutputs The number of outputs.
* @param attributes The attributes.
*/
private ONNXOperators(String value, int numInputs, int numOutputs, List<ONNXAttribute> attributes) {
this(value,numInputs,0,numOutputs,attributes);
}
/**
* Builds an operator with attributes and optional inputs.
* @param value The operator name.
* @param numInputs The number of inputs.
* @param numOptionalInputs The number of optional inputs.
* @param numOutputs The number of outputs.
* @param attributes The attributes.
*/
private ONNXOperators(String value, int numInputs, int numOptionalInputs, int numOutputs, List<ONNXAttribute> attributes) {
this.opName = value;
this.numInputs = numInputs;
this.numOptionalInputs = numOptionalInputs;
this.numOutputs = numOutputs;
Map<String,ONNXAttribute> attributeMap = new HashMap<>();
Set<String> attributeSet = new HashSet<>();
for (ONNXAttribute a : attributes) {
attributeMap.put(a.getName(),a);
if (a.isMandatory()) {
attributeSet.add(a.getName());
}
}
if (attributes.size() != attributeMap.size()) {
throw new IllegalArgumentException("Duplicate attribute in enum declaration - " + attributes);
}
this.attributes = Collections.unmodifiableMap(attributeMap);
this.mandatoryAttributeNames = attributeSet.isEmpty() ? Collections.emptySet() : Collections.unmodifiableSet(attributeSet);
}
/**
* Builds this node based on the supplied inputs and output.
* Throws {@link IllegalArgumentException} if this operator takes more than a single input or output.
* @param context The onnx context used to ensure this node has a unique name.
* @param input The name of the input.
* @param output The name of the output.
* @return The NodeProto.
*/
public OnnxMl.NodeProto build(ONNXContext context, String input, String output) {
return build(context,new String[]{input},new String[]{output},Collections.emptyMap());
}
/**
* Builds this node based on the supplied inputs and output.
* Throws {@link IllegalArgumentException} if this operator takes more than a single input or output.
* May throw {@link UnsupportedOperationException} if the attribute type is not supported.
* @param context The onnx context used to ensure this node has a unique name.
* @param input The names of the input.
* @param output The name of the output.
* @param attributeValues The attribute names and values.
* @return The NodeProto.
*/
public OnnxMl.NodeProto build(ONNXContext context, String input, String output, Map<String,Object> attributeValues) {
return build(context,new String[]{input},new String[]{output},attributeValues);
}
/**
* Builds this node based on the supplied inputs and output.
* Throws {@link IllegalArgumentException} if the number of inputs or outputs is wrong.
* @param context The onnx context used to ensure this node has a unique name.
* @param inputs The names of the inputs.
* @param output The name of the output.
* @return The NodeProto.
*/
public OnnxMl.NodeProto build(ONNXContext context, String[] inputs, String output) {
return build(context,inputs,new String[]{output},Collections.emptyMap());
}
/**
* Builds this node based on the supplied inputs and output.
* Throws {@link IllegalArgumentException} if the number of inputs, outputs or attributes is wrong.
* May throw {@link UnsupportedOperationException} if the attribute type is not supported.
* @param context The onnx context used to ensure this node has a unique name.
* @param inputs The names of the inputs.
* @param output The name of the output.
* @param attributeValues The attribute names and values.
* @return The NodeProto.
*/
public OnnxMl.NodeProto build(ONNXContext context, String[] inputs, String output, Map<String,Object> attributeValues) {
return build(context,inputs,new String[]{output},attributeValues);
}
/**
* Builds this node based on the supplied inputs and outputs.
* Throws {@link IllegalArgumentException} if the number of inputs or outputs is wrong.
* @param context The onnx context used to ensure this node has a unique name.
* @param inputs The names of the inputs.
* @param outputs The names of the outputs.
* @return The NodeProto.
*/
public OnnxMl.NodeProto build(ONNXContext context, String[] inputs, String[] outputs) {
return build(context,inputs,outputs,Collections.emptyMap());
}
/**
* Builds this node based on the supplied inputs and outputs.
* Throws {@link IllegalArgumentException} if the number of inputs, outputs or attributes is wrong.
* May throw {@link UnsupportedOperationException} if the attribute type is not supported.
* @param context The onnx context used to ensure this node has a unique name.
* @param inputs The names of the inputs.
* @param outputs The names of the outputs.
* @param attributeValues The attribute names and values.
* @return The NodeProto.
*/
public OnnxMl.NodeProto build(ONNXContext context, String[] inputs, String[] outputs, Map<String,Object> attributeValues) {
if ((numInputs != VARIADIC_INPUT) && ((inputs.length < numInputs) || (inputs.length > numInputs + numOptionalInputs))) {
throw new IllegalArgumentException("Expected " + numInputs + " inputs, with " + numOptionalInputs + " optional inputs, but received " + inputs.length);
}
if (outputs.length != numOutputs) {
throw new IllegalArgumentException("Expected " + numOutputs + " outputs, but received " + outputs.length);
}
if (attributeValues.size() > attributes.size()) {
throw new IllegalArgumentException("Found more attributes than expected, received " + attributeValues.size() + ", expected at most " + attributes.size());
}
if (!attributes.keySet().containsAll(attributeValues.keySet())) {
throw new IllegalArgumentException("Unexpected attribute found, received " + attributeValues.keySet() + ", expected values from " + attributes.keySet());
}
if (!attributeValues.keySet().containsAll(mandatoryAttributeNames)) {
throw new IllegalArgumentException("Expected to find all mandatory attributes, received " + attributeValues.keySet() + ", expected " + mandatoryAttributeNames);
}
OnnxMl.NodeProto.Builder nodeBuilder = OnnxMl.NodeProto.newBuilder();
for (String i : inputs) {
nodeBuilder.addInput(i);
}
for (String o : outputs) {
nodeBuilder.addOutput(o);
}
nodeBuilder.setName(context.generateUniqueName(opName));
nodeBuilder.setOpType(opName);
for (Map.Entry<String,Object> e : attributeValues.entrySet()) {
ONNXAttribute attr = attributes.get(e.getKey());
nodeBuilder.addAttribute(attr.build(e.getValue()));
}
return nodeBuilder.build();
}
/**
* Returns the opset version supported by these operators.
* @return The opset version.
*/
public static int getOpsetVersion() {
return OPSET_VERSION;
}
/**
* Returns the opset proto for these operators.
* @return The opset proto.
*/
public static OnnxMl.OperatorSetIdProto getOpsetProto() {
return OnnxMl.OperatorSetIdProto.newBuilder().setVersion(ONNXOperators.getOpsetVersion()).build();
}
}
<|start_filename|>Regression/LibSVM/src/test/java/org/tribuo/regression/libsvm/TestLibSVM.java<|end_filename|>
/*
* Copyright (c) 2015-2020, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.regression.libsvm;
import com.oracle.labs.mlrg.olcut.util.Pair;
import libsvm.svm_model;
import org.tribuo.Dataset;
import org.tribuo.Model;
import org.tribuo.common.libsvm.KernelType;
import org.tribuo.common.libsvm.LibSVMModel;
import org.tribuo.common.libsvm.LibSVMTrainer;
import org.tribuo.common.libsvm.SVMParameters;
import org.tribuo.regression.Regressor;
import org.tribuo.regression.evaluation.RegressionEvaluation;
import org.tribuo.regression.evaluation.RegressionEvaluator;
import org.tribuo.regression.example.RegressionDataGenerator;
import org.tribuo.regression.libsvm.SVMRegressionType.SVMMode;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.tribuo.test.Helpers;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.URL;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestLibSVM {
private static final SVMParameters<Regressor> linearParams = new SVMParameters<>(new SVMRegressionType(SVMMode.EPSILON_SVR), KernelType.LINEAR);
private static final LibSVMRegressionTrainer linear = new LibSVMRegressionTrainer(linearParams);
private static final SVMParameters<Regressor> rbfParams;
private static final LibSVMRegressionTrainer rbf;
private static final LibSVMRegressionTrainer linStandardize = new LibSVMRegressionTrainer(new SVMParameters<>(new SVMRegressionType(SVMMode.NU_SVR), KernelType.LINEAR),true);
private static final RegressionEvaluator eval = new RegressionEvaluator();
static {
rbfParams = new SVMParameters<>(new SVMRegressionType(SVMMode.NU_SVR),KernelType.RBF);
rbfParams.setGamma(0.5);
rbfParams.setNu(0.5);
rbfParams.setEpsilon(0.5);
rbf = new LibSVMRegressionTrainer(rbfParams);
}
private static final URL TEST_REGRESSION_REORDER_MODEL = TestLibSVM.class.getResource("libsvm-4.1.0.model");
@BeforeAll
public static void setup() {
Logger logger = Logger.getLogger(LibSVMTrainer.class.getName());
logger.setLevel(Level.WARNING);
}
public static Model<Regressor> testLibSVM(Pair<Dataset<Regressor>,Dataset<Regressor>> p) {
LibSVMModel<Regressor> linearModel = linear.train(p.getA());
RegressionEvaluation linearEval = eval.evaluate(linearModel,p.getB());
LibSVMRegressionModel rbfModel = (LibSVMRegressionModel) rbf.train(p.getA());
RegressionEvaluation rbfEval = eval.evaluate(rbfModel,p.getB());
return rbfModel;
}
@Test
public void testDenseData() {
Pair<Dataset<Regressor>,Dataset<Regressor>> p = RegressionDataGenerator.denseTrainTest();
Model<Regressor> model = testLibSVM(p);
Helpers.testModelSerialization(model,Regressor.class);
}
@Test
public void testSparseData() {
Pair<Dataset<Regressor>,Dataset<Regressor>> p = RegressionDataGenerator.sparseTrainTest();
testLibSVM(p);
}
@Test
public void testInvalidExample() {
assertThrows(IllegalArgumentException.class, () -> {
Pair<Dataset<Regressor>, Dataset<Regressor>> p = RegressionDataGenerator.denseTrainTest();
Model<Regressor> m = linear.train(p.getA());
m.predict(RegressionDataGenerator.invalidSparseExample());
});
}
@Test
public void testEmptyExample() {
assertThrows(IllegalArgumentException.class, () -> {
Pair<Dataset<Regressor>, Dataset<Regressor>> p = RegressionDataGenerator.denseTrainTest();
Model<Regressor> m = linear.train(p.getA());
m.predict(RegressionDataGenerator.emptyExample());
});
}
@Test
public void testMultiDenseData() {
Pair<Dataset<Regressor>,Dataset<Regressor>> p = RegressionDataGenerator.multiDimDenseTrainTest();
testLibSVM(p);
}
@Test
public void testThreeDenseData() {
Pair<Dataset<Regressor>,Dataset<Regressor>> p = RegressionDataGenerator.threeDimDenseTrainTest(1.0,true);
Model<Regressor> rbfModel = rbf.train(p.getA());
RegressionEvaluation rbfEval = eval.evaluate(rbfModel,p.getB());
double expectedDim1 = 0.0038608193481045605;
double expectedDim2 = 0.0038608193481045605;
double expectedDim3 = -0.12392916600305548;
double expectedAve = -0.03873584243561545;
assertEquals(expectedDim1,rbfEval.r2(new Regressor(RegressionDataGenerator.firstDimensionName,Double.NaN)),1e-6);
assertEquals(expectedDim2,rbfEval.r2(new Regressor(RegressionDataGenerator.secondDimensionName,Double.NaN)),1e-6);
assertEquals(expectedDim3,rbfEval.r2(new Regressor(RegressionDataGenerator.thirdDimensionName,Double.NaN)),1e-6);
assertEquals(expectedAve,rbfEval.averageR2(),1e-6);
}
@Test
public void testMultiModelsDifferent() {
Pair<Dataset<Regressor>, Dataset<Regressor>> p = RegressionDataGenerator.multiDimDenseTrainTest();
LibSVMRegressionModel rbfModel = (LibSVMRegressionModel) rbf.train(p.getA());
List<svm_model> rbfModelList = rbfModel.getInnerModels();
assertEquals(2, rbfModelList.size());
double[] firstSV = rbfModelList.get(0).sv_coef[0];
double[] secondSV = rbfModelList.get(1).sv_coef[0];
// The two dimensions are the inverse of each other, and should have inverted sv_coef.
for (int i = 0; i < firstSV.length; i++) {
firstSV[i] = -firstSV[i];
}
assertArrayEquals(firstSV, secondSV);
}
@Test
public void testMultiStandardizedModelsDifferent() {
Pair<Dataset<Regressor>, Dataset<Regressor>> p = RegressionDataGenerator.multiDimDenseTrainTest();
LibSVMRegressionModel linSModel = (LibSVMRegressionModel) linStandardize.train(p.getA());
List<svm_model> linModelList = linSModel.getInnerModels();
assertEquals(2,linModelList.size());
double[] means = linSModel.getMeans();
double[] variances = linSModel.getVariances();
assertEquals(means[0],-means[1]);
assertEquals(variances[0],variances[1]);
// The two dimensions are the inverse of each other, and should have inverted sv_coef.
// However the fact that some values are negative means the sv_coefs end up slightly different,
// and it appears to happen inside LibSVM.
/*
double[] firstSV = linModelList.get(0).sv_coef[0];
double[] secondSV = linModelList.get(1).sv_coef[0];
for (int i = 0; i < firstSV.length; i++) {
firstSV[i] = -firstSV[i];
}
assertArrayEquals(firstSV,secondSV);
*/
}
@Test
public void testMultiSparseData() {
Pair<Dataset<Regressor>,Dataset<Regressor>> p = RegressionDataGenerator.multiDimSparseTrainTest();
testLibSVM(p);
}
@Test
public void testMultiInvalidExample() {
assertThrows(IllegalArgumentException.class, () -> {
Pair<Dataset<Regressor>, Dataset<Regressor>> p = RegressionDataGenerator.multiDimDenseTrainTest();
Model<Regressor> m = linear.train(p.getA());
m.predict(RegressionDataGenerator.invalidMultiDimSparseExample());
});
}
@Test
public void testMultiEmptyExample() {
assertThrows(IllegalArgumentException.class, () -> {
Pair<Dataset<Regressor>, Dataset<Regressor>> p = RegressionDataGenerator.multiDimDenseTrainTest();
Model<Regressor> m = linear.train(p.getA());
m.predict(RegressionDataGenerator.emptyMultiDimExample());
});
}
@Test
public void testRegressionReordering() throws IOException, ClassNotFoundException {
try (ObjectInputStream ois = new ObjectInputStream(TEST_REGRESSION_REORDER_MODEL.openStream())) {
@SuppressWarnings("unchecked")
Model<Regressor> serializedModel = (Model<Regressor>) ois.readObject();
Pair<Dataset<Regressor>,Dataset<Regressor>> p = RegressionDataGenerator.threeDimDenseTrainTest(1.0, false);
RegressionEvaluation llEval = eval.evaluate(serializedModel,p.getB());
double expectedDim1 = 0.0038608193481045605;
double expectedDim2 = 0.0038608193481045605;
double expectedDim3 = -0.12392916600305548;
double expectedAve = -0.03873584243561545;
assertEquals(expectedDim1,llEval.r2(new Regressor(RegressionDataGenerator.firstDimensionName,Double.NaN)),1e-6);
assertEquals(expectedDim2,llEval.r2(new Regressor(RegressionDataGenerator.secondDimensionName,Double.NaN)),1e-6);
assertEquals(expectedDim3,llEval.r2(new Regressor(RegressionDataGenerator.thirdDimensionName,Double.NaN)),1e-6);
assertEquals(expectedAve,llEval.averageR2(),1e-6);
}
}
}
<|start_filename|>Regression/LibLinear/src/main/java/org/tribuo/regression/liblinear/LibLinearRegressionModel.java<|end_filename|>
/*
* Copyright (c) 2015-2020, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.regression.liblinear;
import com.oracle.labs.mlrg.olcut.util.Pair;
import org.tribuo.Example;
import org.tribuo.Excuse;
import org.tribuo.Feature;
import org.tribuo.ImmutableFeatureMap;
import org.tribuo.ImmutableOutputInfo;
import org.tribuo.Model;
import org.tribuo.Prediction;
import org.tribuo.common.liblinear.LibLinearModel;
import org.tribuo.common.liblinear.LibLinearTrainer;
import org.tribuo.provenance.ModelProvenance;
import org.tribuo.regression.ImmutableRegressionInfo;
import org.tribuo.regression.Regressor;
import de.bwaldvogel.liblinear.FeatureNode;
import de.bwaldvogel.liblinear.Linear;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.logging.Logger;
/**
* A {@link Model} which wraps a LibLinear-java model.
* <p>
* It disables the LibLinear debug output as it's very chatty.
* <p>
* It contains an independent liblinear model for each regression dimension.
* <p>
* See:
* <pre>
* <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
* "LIBLINEAR: A library for Large Linear Classification"
* Journal of Machine Learning Research, 2008.
* </pre>
* and for the original algorithm:
* <pre>
* <NAME>, <NAME>.
* "Support-Vector Networks"
* Machine Learning, 1995.
* </pre>
*/
public class LibLinearRegressionModel extends LibLinearModel<Regressor> {
private static final long serialVersionUID = 2L;
private static final Logger logger = Logger.getLogger(LibLinearRegressionModel.class.getName());
private final String[] dimensionNames;
// Not final as it doesn't exist in 4.0 or 4.1 and so must be created on deserialization.
private int[] mapping;
LibLinearRegressionModel(String name, ModelProvenance description, ImmutableFeatureMap featureIDMap, ImmutableOutputInfo<Regressor> outputInfo, List<de.bwaldvogel.liblinear.Model> models) {
super(name, description, featureIDMap, outputInfo, false, models);
this.dimensionNames = Regressor.extractNames(outputInfo);
this.mapping = ((ImmutableRegressionInfo) outputInfo).getIDtoNaturalOrderMapping();
}
@Override
public Prediction<Regressor> predict(Example<Regressor> example) {
FeatureNode[] features = LibLinearTrainer.exampleToNodes(example, featureIDMap, null);
// Bias feature is always set
if (features.length == 1) {
throw new IllegalArgumentException("No features found in Example " + example.toString());
}
double[] scores = new double[models.get(0).getNrClass()];
double[] regressedValues = new double[models.size()];
// Map through the id -> regressor dimension natural order (i.e., lexicographic) to ensure the regressor is
// constructed correctly.
for (int i = 0; i < regressedValues.length; i++) {
regressedValues[mapping[i]] = Linear.predictValues(models.get(i), features, scores);
}
Regressor regressor = new Regressor(dimensionNames,regressedValues);
return new Prediction<>(regressor, features.length - 1, example);
}
@Override
public Map<String, List<Pair<String, Double>>> getTopFeatures(int n) {
int maxFeatures = n < 0 ? featureIDMap.size() : n;
double[][] featureWeights = getFeatureWeights();
Comparator<Pair<String, Double>> comparator = Comparator.comparingDouble(p -> Math.abs(p.getB()));
Map<String, List<Pair<String, Double>>> map = new HashMap<>();
PriorityQueue<Pair<String, Double>> q = new PriorityQueue<>(maxFeatures, comparator);
for (int i = 0; i < featureWeights.length; i++) {
// Exclude bias
int numFeatures = featureWeights[i].length - 1;
for (int j = 0; j < numFeatures; j++) {
Pair<String, Double> cur = new Pair<>(featureIDMap.get(j).getName(), featureWeights[i][j]);
if (maxFeatures < 0 || q.size() < maxFeatures) {
q.offer(cur);
} else if (comparator.compare(cur, q.peek()) > 0) {
q.poll();
q.offer(cur);
}
}
List<Pair<String, Double>> list = new ArrayList<>();
while (q.size() > 0) {
list.add(q.poll());
}
Collections.reverse(list);
map.put(dimensionNames[mapping[i]], list);
}
return map;
}
@Override
protected LibLinearRegressionModel copy(String newName, ModelProvenance newProvenance) {
List<de.bwaldvogel.liblinear.Model> newModels = new ArrayList<>();
for (de.bwaldvogel.liblinear.Model m : models) {
newModels.add(copyModel(m));
}
return new LibLinearRegressionModel(newName,newProvenance,featureIDMap,outputIDInfo,newModels);
}
@Override
protected double[][] getFeatureWeights() {
double[][] featureWeights = new double[models.size()][];
for (int i = 0; i < models.size(); i++) {
featureWeights[i] = models.get(i).getFeatureWeights();
}
return featureWeights;
}
/**
* The call to model.getFeatureWeights in the public methods copies the
* weights array so this inner method exists to save the copy in getExcuses.
* <p>
* If it becomes a problem then we could cache the feature weights in the
* model.
*
* @param e The example.
* @param allFeatureWeights The feature weights.
* @return An excuse for this example.
*/
@Override
protected Excuse<Regressor> innerGetExcuse(Example<Regressor> e, double[][] allFeatureWeights) {
Prediction<Regressor> prediction = predict(e);
Map<String, List<Pair<String, Double>>> weightMap = new HashMap<>();
for (int i = 0; i < allFeatureWeights.length; i++) {
List<Pair<String, Double>> scores = new ArrayList<>();
for (Feature f : e) {
int id = featureIDMap.getID(f.getName());
if (id > -1) {
double score = allFeatureWeights[i][id] * f.getValue();
scores.add(new Pair<>(f.getName(), score));
}
}
scores.sort((o1, o2) -> o2.getB().compareTo(o1.getB()));
weightMap.put(dimensionNames[mapping[i]], scores);
}
return new Excuse<>(e, prediction, weightMap);
}
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
// Add mapping field to 4.0, 4.1 models and rearrange the dimensions.
if (mapping == null) {
this.mapping = ((ImmutableRegressionInfo) outputIDInfo).getIDtoNaturalOrderMapping();
List<de.bwaldvogel.liblinear.Model> newModels = new ArrayList<>(this.models);
for (int i = 0; i < mapping.length; i++) {
newModels.set(i,this.models.get(mapping[i]));
}
this.models = Collections.unmodifiableList(newModels);
}
}
}
<|start_filename|>MultiLabel/SGD/src/main/java/org/tribuo/multilabel/sgd/fm/FMMultiLabelTrainer.java<|end_filename|>
/*
* Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.multilabel.sgd.fm;
import com.oracle.labs.mlrg.olcut.config.Config;
import org.tribuo.ImmutableFeatureMap;
import org.tribuo.ImmutableOutputInfo;
import org.tribuo.common.sgd.AbstractFMTrainer;
import org.tribuo.common.sgd.FMParameters;
import org.tribuo.common.sgd.SGDObjective;
import org.tribuo.math.StochasticGradientOptimiser;
import org.tribuo.math.la.SGDVector;
import org.tribuo.math.la.SparseVector;
import org.tribuo.multilabel.MultiLabel;
import org.tribuo.multilabel.sgd.MultiLabelObjective;
import org.tribuo.multilabel.sgd.objectives.BinaryCrossEntropy;
import org.tribuo.provenance.ModelProvenance;
import java.util.logging.Logger;
/**
* A trainer for a multi-label classification factorization machine using SGD.
* <p>
* See:
* <pre>
* <NAME>.
* Factorization machines.
* 2010 IEEE International Conference on Data Mining
* </pre>
*/
public class FMMultiLabelTrainer extends AbstractFMTrainer<MultiLabel, SGDVector> {
private static final Logger logger = Logger.getLogger(FMMultiLabelTrainer.class.getName());
@Config(description = "The classification objective function to use.")
private MultiLabelObjective objective = new BinaryCrossEntropy();
/**
* Constructs an SGD trainer for a multi-label factorization machine.
*
* @param objective The objective function to optimise.
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param loggingInterval Log the loss after this many iterations. If -1 don't log anything.
* @param minibatchSize The size of any minibatches.
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
*/
public FMMultiLabelTrainer(MultiLabelObjective objective, StochasticGradientOptimiser optimiser, int epochs,
int loggingInterval, int minibatchSize, long seed,
int factorizedDimSize, double variance) {
super(optimiser, epochs, loggingInterval, minibatchSize, seed, factorizedDimSize, variance);
this.objective = objective;
}
/**
* Constructs an SGD trainer for a multi-label factorization machine.
* <p>
* Sets the minibatch size to 1.
*
* @param objective The objective function to optimise.
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param loggingInterval Log the loss after this many iterations. If -1 don't log anything.
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
*/
public FMMultiLabelTrainer(MultiLabelObjective objective, StochasticGradientOptimiser optimiser, int epochs,
int loggingInterval, long seed,
int factorizedDimSize, double variance) {
this(objective, optimiser, epochs, loggingInterval, 1, seed, factorizedDimSize, variance);
}
/**
* Constructs an SGD trainer for a multi-label factorization machine.
* <p>
* Sets the minibatch size to 1 and the logging interval to 1000.
*
* @param objective The objective function to optimise.
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
*/
public FMMultiLabelTrainer(MultiLabelObjective objective, StochasticGradientOptimiser optimiser, int epochs,
long seed, int factorizedDimSize, double variance) {
this(objective, optimiser, epochs, 1000, 1, seed, factorizedDimSize, variance);
}
/**
* For olcut.
*/
private FMMultiLabelTrainer() {
super();
}
@Override
protected SparseVector getTarget(ImmutableOutputInfo<MultiLabel> outputInfo, MultiLabel output) {
return output.convertToSparseVector(outputInfo);
}
@Override
protected SGDObjective<SGDVector> getObjective() {
return objective;
}
@Override
protected FMMultiLabelModel createModel(String name, ModelProvenance provenance, ImmutableFeatureMap featureMap, ImmutableOutputInfo<MultiLabel> outputInfo, FMParameters parameters) {
return new FMMultiLabelModel(name, provenance, featureMap, outputInfo, parameters, objective.getNormalizer(), objective.isProbabilistic(), objective.threshold());
}
@Override
protected String getModelClassName() {
return FMMultiLabelModel.class.getName();
}
@Override
public String toString() {
return "FMMultiLabelTrainer(objective=" + objective.toString() + ",optimiser=" + optimiser.toString() +
",epochs=" + epochs + ",minibatchSize=" + minibatchSize + ",seed=" + seed +
",factorizedDimSize=" + factorizedDimSize + ",variance=" + variance +
")";
}
}
<|start_filename|>Regression/SGD/src/main/java/org/tribuo/regression/sgd/fm/FMRegressionTrainer.java<|end_filename|>
/*
* Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.regression.sgd.fm;
import com.oracle.labs.mlrg.olcut.config.Config;
import org.tribuo.ImmutableFeatureMap;
import org.tribuo.ImmutableOutputInfo;
import org.tribuo.common.sgd.AbstractFMTrainer;
import org.tribuo.common.sgd.FMParameters;
import org.tribuo.common.sgd.SGDObjective;
import org.tribuo.math.StochasticGradientOptimiser;
import org.tribuo.math.la.DenseVector;
import org.tribuo.provenance.ModelProvenance;
import org.tribuo.regression.ImmutableRegressionInfo;
import org.tribuo.regression.Regressor;
import org.tribuo.regression.sgd.RegressionObjective;
import java.util.logging.Logger;
/**
* A trainer for a regression factorization machine using SGD.
* Independently trains each output dimension, unless they are tied together in the
* optimiser.
* <p>
* See:
* <pre>
* <NAME>.
* Factorization machines.
* 2010 IEEE International Conference on Data Mining
* </pre>
*/
public class FMRegressionTrainer extends AbstractFMTrainer<Regressor, DenseVector> {
private static final Logger logger = Logger.getLogger(FMRegressionTrainer.class.getName());
@Config(mandatory = true, description = "The regression objective to use.")
private RegressionObjective objective;
@Config(mandatory = true, description = "Standardise the output variables before fitting the model.")
private boolean standardise;
/**
* Constructs an SGD trainer for a factorization machine.
*
* @param objective The objective function to optimise.
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param loggingInterval Log the loss after this many iterations. If -1 don't log anything.
* @param minibatchSize The size of any minibatches.
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
* @param standardise Standardise the output regressors before fitting the model.
*/
public FMRegressionTrainer(RegressionObjective objective, StochasticGradientOptimiser optimiser, int epochs,
int loggingInterval, int minibatchSize, long seed,
int factorizedDimSize, double variance, boolean standardise) {
super(optimiser, epochs, loggingInterval, minibatchSize, seed, factorizedDimSize, variance);
this.objective = objective;
this.standardise = standardise;
}
/**
* Constructs an SGD trainer for a factorization machine.
* <p>
* Sets the minibatch size to 1.
*
* @param objective The objective function to optimise.
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param loggingInterval Log the loss after this many iterations. If -1 don't log anything.
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
* @param standardise Standardise the output regressors before fitting the model.
*/
public FMRegressionTrainer(RegressionObjective objective, StochasticGradientOptimiser optimiser, int epochs,
int loggingInterval, long seed,
int factorizedDimSize, double variance, boolean standardise) {
this(objective, optimiser, epochs, loggingInterval, 1, seed, factorizedDimSize, variance, standardise);
}
/**
* Constructs an SGD trainer for a factorization machine.
* <p>
* Sets the minibatch size to 1 and the logging interval to 1000.
*
* @param objective The objective function to optimise.
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
* @param standardise Standardise the output regressors before fitting the model.
*/
public FMRegressionTrainer(RegressionObjective objective, StochasticGradientOptimiser optimiser, int epochs,
long seed, int factorizedDimSize, double variance, boolean standardise) {
this(objective, optimiser, epochs, 1000, 1, seed, factorizedDimSize, variance, standardise);
}
/**
* For olcut.
*/
private FMRegressionTrainer() {
super();
}
@Override
protected DenseVector getTarget(ImmutableOutputInfo<Regressor> outputInfo, Regressor output) {
ImmutableRegressionInfo regressionInfo = (ImmutableRegressionInfo) outputInfo;
double[] regressorsBuffer = new double[outputInfo.size()];
for (Regressor.DimensionTuple r : output) {
int id = outputInfo.getID(r);
double curValue = r.getValue();
if (standardise) {
curValue = (curValue - regressionInfo.getMean(id)) / regressionInfo.getVariance(id);
}
regressorsBuffer[id] = curValue;
}
return DenseVector.createDenseVector(regressorsBuffer);
}
@Override
protected SGDObjective<DenseVector> getObjective() {
return objective;
}
@Override
protected FMRegressionModel createModel(String name, ModelProvenance provenance, ImmutableFeatureMap featureMap, ImmutableOutputInfo<Regressor> outputInfo, FMParameters parameters) {
String[] dimensionNames = new String[outputInfo.size()];
for (Regressor r : outputInfo.getDomain()) {
int id = outputInfo.getID(r);
dimensionNames[id] = r.getNames()[0];
}
return new FMRegressionModel(name, dimensionNames, provenance, featureMap, outputInfo, parameters, standardise);
}
@Override
protected String getModelClassName() {
return FMRegressionModel.class.getName();
}
@Override
public String toString() {
return "FMRegressionTrainer(objective=" + objective.toString() + ",optimiser=" + optimiser.toString() +
",epochs=" + epochs + ",minibatchSize=" + minibatchSize + ",seed=" + seed +
",factorizedDimSize=" + factorizedDimSize + ",variance=" + variance +
",standardise=" + standardise + ")";
}
}
<|start_filename|>Classification/SGD/src/main/java/org/tribuo/classification/sgd/fm/FMClassificationTrainer.java<|end_filename|>
/*
* Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.classification.sgd.fm;
import com.oracle.labs.mlrg.olcut.config.Config;
import org.tribuo.ImmutableFeatureMap;
import org.tribuo.ImmutableOutputInfo;
import org.tribuo.classification.Label;
import org.tribuo.classification.sgd.LabelObjective;
import org.tribuo.classification.sgd.objectives.LogMulticlass;
import org.tribuo.common.sgd.AbstractFMTrainer;
import org.tribuo.common.sgd.FMParameters;
import org.tribuo.common.sgd.SGDObjective;
import org.tribuo.math.StochasticGradientOptimiser;
import org.tribuo.provenance.ModelProvenance;
import java.util.logging.Logger;
/**
* A trainer for a classification factorization machine using SGD.
* <p>
* See:
* <pre>
* <NAME>.
* Factorization machines.
* 2010 IEEE International Conference on Data Mining
* </pre>
*/
public class FMClassificationTrainer extends AbstractFMTrainer<Label, Integer> {
private static final Logger logger = Logger.getLogger(FMClassificationTrainer.class.getName());
@Config(description = "The classification objective function to use.")
private LabelObjective objective = new LogMulticlass();
/**
* Constructs an SGD trainer for a factorization machine.
*
* @param objective The objective function to optimise.
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param loggingInterval Log the loss after this many iterations. If -1 don't log anything.
* @param minibatchSize The size of any minibatches.
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
*/
public FMClassificationTrainer(LabelObjective objective, StochasticGradientOptimiser optimiser, int epochs,
int loggingInterval, int minibatchSize, long seed,
int factorizedDimSize, double variance) {
super(optimiser, epochs, loggingInterval, minibatchSize, seed, factorizedDimSize, variance);
this.objective = objective;
}
/**
* Constructs an SGD trainer for a factorization machine.
* <p>
* Sets the minibatch size to 1.
*
* @param objective The objective function to optimise.
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param loggingInterval Log the loss after this many iterations. If -1 don't log anything.
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
*/
public FMClassificationTrainer(LabelObjective objective, StochasticGradientOptimiser optimiser, int epochs,
int loggingInterval, long seed,
int factorizedDimSize, double variance) {
this(objective, optimiser, epochs, loggingInterval, 1, seed, factorizedDimSize, variance);
}
/**
* Constructs an SGD trainer for a factorization machine.
* <p>
* Sets the minibatch size to 1 and the logging interval to 1000.
*
* @param objective The objective function to optimise.
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
*/
public FMClassificationTrainer(LabelObjective objective, StochasticGradientOptimiser optimiser, int epochs,
long seed, int factorizedDimSize, double variance) {
this(objective, optimiser, epochs, 1000, 1, seed, factorizedDimSize, variance);
}
/**
* For olcut.
*/
private FMClassificationTrainer() {
super();
}
@Override
protected Integer getTarget(ImmutableOutputInfo<Label> outputInfo, Label output) {
return outputInfo.getID(output);
}
@Override
protected SGDObjective<Integer> getObjective() {
return objective;
}
@Override
protected FMClassificationModel createModel(String name, ModelProvenance provenance, ImmutableFeatureMap featureMap, ImmutableOutputInfo<Label> outputInfo, FMParameters parameters) {
return new FMClassificationModel(name, provenance, featureMap, outputInfo, parameters, objective.getNormalizer(), objective.isProbabilistic());
}
@Override
protected String getModelClassName() {
return FMClassificationModel.class.getName();
}
@Override
public String toString() {
return "FMClassificationTrainer(objective=" + objective.toString() + ",optimiser=" + optimiser.toString() +
",epochs=" + epochs + ",minibatchSize=" + minibatchSize + ",seed=" + seed +
",factorizedDimSize=" + factorizedDimSize + ",variance=" + variance +
")";
}
}
<|start_filename|>Common/SGD/src/main/java/org/tribuo/common/sgd/AbstractFMTrainer.java<|end_filename|>
/*
* Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.common.sgd;
import com.oracle.labs.mlrg.olcut.config.Config;
import com.oracle.labs.mlrg.olcut.config.PropertyException;
import org.tribuo.Output;
import org.tribuo.math.StochasticGradientOptimiser;
import java.util.SplittableRandom;
import java.util.logging.Logger;
/**
* A trainer for a quadratic factorization machine model which uses SGD.
* <p>
* It's an {@link AbstractSGDTrainer} operating on {@link FMParameters}.
* <p>
* See:
* <pre>
* <NAME>.
* Factorization machines.
* 2010 IEEE International Conference on Data Mining
* </pre>
*/
public abstract class AbstractFMTrainer<T extends Output<T>, U> extends AbstractSGDTrainer<T, U, AbstractFMModel<T>, FMParameters> {
private static final Logger logger = Logger.getLogger(AbstractFMTrainer.class.getName());
@Config(mandatory = true, description = "The size of the factorized feature representation.")
protected int factorizedDimSize;
@Config(mandatory = true, description = "The variance of the initializer.")
protected double variance;
/**
* Constructs an SGD trainer for a factorization machine.
*
* @param optimiser The gradient optimiser to use.
* @param epochs The number of epochs (complete passes through the training data).
* @param loggingInterval Log the loss after this many iterations. If -1 don't log anything.
* @param minibatchSize The size of any minibatches.
* @param seed A seed for the random number generator, used to shuffle the examples before each epoch.
* @param factorizedDimSize Size of the factorized feature representation.
* @param variance The variance of the initializer.
*/
protected AbstractFMTrainer(StochasticGradientOptimiser optimiser, int epochs, int loggingInterval,
int minibatchSize, long seed, int factorizedDimSize, double variance) {
super(optimiser, epochs, loggingInterval, minibatchSize, seed, false);
this.factorizedDimSize = factorizedDimSize;
this.variance = variance;
postConfig();
}
/**
* For olcut.
*/
protected AbstractFMTrainer() {
super(false);
}
@Override
public void postConfig() {
super.postConfig();
if (factorizedDimSize < 1) {
throw new PropertyException("", "factorizedDimSize", "Value must be positive.");
}
if (variance <= 0.0) {
throw new PropertyException("", "variance", "Value must be positive.");
}
}
/**
* Returns the default model name.
*
* @return The default model name.
*/
@Override
protected String getName() {
return "factorization-machine-model";
}
/**
* Constructs the trainable parameters object, in this case a {@link FMParameters} containing
* a weight matrix for the feature weights and a series of weight matrices for the factorized
* feature representation.
*
* @param numFeatures The number of input features.
* @param numOutputs The number of output dimensions.
* @param localRNG The RNG to use for parameter initialisation.
* @return The trainable parameters.
*/
@Override
protected FMParameters createParameters(int numFeatures, int numOutputs, SplittableRandom localRNG) {
return new FMParameters(localRNG, numFeatures, numOutputs, factorizedDimSize, variance);
}
}
| Craigacp/tribuo |
<|start_filename|>main.go<|end_filename|>
package main
import (
"fmt"
"os"
"github.com/urfave/cli"
)
func main() {
app := cli.NewApp()
app.Name = "informer"
app.Version = Version
app.Usage = ""
app.Author = "nashiox"
app.Email = "<EMAIL>"
app.Commands = Commands
if err := app.Run(os.Args); err != nil {
fmt.Fprintln(os.Stderr, err)
}
}
<|start_filename|>commands.go<|end_filename|>
package main
import (
"bufio"
"encoding/hex"
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/hpcloud/tail"
"github.com/urfave/cli"
)
var Commands = []cli.Command{
commandWatch,
commandReview,
commandList,
}
var commandWatch = cli.Command{
Name: "watch",
Usage: "",
Description: `
`,
Action: doWatch,
Flags: []cli.Flag{
cli.StringFlag{
Name: "output,o",
Usage: "",
},
},
}
var commandReview = cli.Command{
Name: "review",
Usage: "",
Description: `
`,
Action: doReview,
Flags: []cli.Flag{
cli.IntFlag{
Name: "delay,d",
Value: 100,
Usage: "",
},
},
}
var commandList = cli.Command{
Name: "list",
Usage: "",
Description: `
`,
Action: doList,
}
func debug(v ...interface{}) {
if os.Getenv("DEBUG") != "" {
log.Println(v...)
}
}
func assert(err error) {
if err != nil {
log.Fatal(err)
}
}
func doWatch(c *cli.Context) {
if len(c.Args()) < 1 {
_ = cli.ShowAppHelp(c)
os.Exit(1)
}
tty := c.Args()[0]
output := c.String("output")
if output == "" {
fp, err := ioutil.TempFile("/tmp", "informer")
assert(err)
defer fp.Close()
output = fp.Name()
}
if !strings.HasPrefix(tty, "pts/") {
fmt.Fprintf(os.Stderr, "Unrecognized psuedo terminal [%s]\n", tty)
os.Exit(2)
}
if _, err := os.Stat("/dev/" + tty); os.IsNotExist(err) {
fmt.Fprintf(os.Stderr, "Psuedo terminal [%s] currently does NOT exist.\n", tty)
os.Exit(2)
}
debug("DEBUG: Scanning for psuedo terminal ", tty)
out, err := exec.Command("ps", "fauwwx").Output()
assert(err)
psreg := regexp.MustCompile(
`\n(\S+)\s+(\d+)\s+\S+\s+\S+\s+\S+\s+\S+\s+\?\s+\S+\s+\S+\s+\S+\s+\S+[\|\\_ ]+\S*\bsshd\b.*\n\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+` + tty + `\s`,
)
if !psreg.Match(out) {
fmt.Fprintf(os.Stderr, "Unable to locate corresponding ssh session for [%s]", tty)
os.Exit(2)
}
pid := string(psreg.FindSubmatch(out)[2])
cmd := exec.Command("strace", "-e", "read", "-s16384", "-q", "-x", "-p", pid, "-o", output)
_ = cmd.Start()
defer func() { _ = cmd.Process.Kill() }()
tmp, err := tail.TailFile(output, tail.Config{Follow: true})
assert(err)
fds := make(map[int]string, 2)
keys := make([]int, 2)
tmpreg := regexp.MustCompile(`(read)\((\d+), "(.*)"`)
for line := range tmp.Lines {
if tmpreg.Match([]byte(line.Text)) {
group := tmpreg.FindSubmatch([]byte(line.Text))
key, err := strconv.Atoi(string(group[2]))
assert(err)
fds[key] = string(group[1])
if len(fds) >= 2 {
for i := range fds {
keys = append(keys, i)
}
sort.Ints(keys)
break
}
}
}
tmp.Kill(nil)
out, err = exec.Command("clear").Output()
assert(err)
fmt.Print(string(out))
t, err := tail.TailFile(output, tail.Config{Follow: true})
assert(err)
defer t.Kill(nil)
outreg := regexp.MustCompile(
fmt.Sprintf(`read\(%d, "(.*)"`, keys[len(keys)-1]),
)
hexreg := regexp.MustCompile(`\\x(..)`)
for line := range t.Lines {
if outreg.Match([]byte(line.Text)) {
s := string(outreg.FindSubmatch([]byte(line.Text))[1])
s = hexreg.ReplaceAllStringFunc(s, func(src string) string {
tname := hexreg.FindStringSubmatch(src)
h, err := hex.DecodeString(tname[1])
assert(err)
return string(h)
})
s = strings.Replace(s, `\t`, "\t", -1)
s = strings.Replace(s, `\r`, "\r", -1)
s = strings.Replace(s, `\n`, "\n", -1)
s = strings.Replace(s, `\\`, "\\", -1)
s = strings.Replace(s, `\"`, `"`, -1)
fmt.Print(s)
}
}
}
func doReview(c *cli.Context) {
if len(c.Args()) < 1 {
_ = cli.ShowAppHelp(c)
os.Exit(1)
}
delay := c.Int("delay")
fp, err := os.Open(c.Args()[0])
assert(err)
fds := make(map[int]string, 2)
keys := make([]int, 2)
scanner := bufio.NewScanner(fp)
tmpreg := regexp.MustCompile(`(read)\((\d+), "(.*)"`)
for scanner.Scan() {
text := []byte(scanner.Text())
if tmpreg.Match(text) {
group := tmpreg.FindSubmatch(text)
key, err := strconv.Atoi(string(group[2]))
assert(err)
fds[key] = string(group[1])
if len(fds) >= 2 {
for i := range fds {
keys = append(keys, i)
}
sort.Ints(keys)
break
}
}
}
fp.Close()
out, err := exec.Command("clear").Output()
assert(err)
fmt.Print(string(out))
fp, err = os.Open(c.Args()[0])
assert(err)
defer fp.Close()
outreg := regexp.MustCompile(
fmt.Sprintf(`read\(%d, "(.*)"`, keys[len(keys)-1]),
)
hexreg := regexp.MustCompile(`\\x(..)`)
scanner = bufio.NewScanner(fp)
for scanner.Scan() {
text := []byte(scanner.Text())
if outreg.Match(text) {
s := string(outreg.FindSubmatch(text)[1])
s = hexreg.ReplaceAllStringFunc(s, func(src string) string {
tname := hexreg.FindStringSubmatch(src)
h, err := hex.DecodeString(tname[1])
assert(err)
return string(h)
})
s = strings.Replace(s, `\t`, "\t", -1)
s = strings.Replace(s, `\r`, "\r", -1)
s = strings.Replace(s, `\n`, "\n", -1)
s = strings.Replace(s, `\\`, "\\", -1)
s = strings.Replace(s, `\"`, `"`, -1)
fmt.Print(s)
time.Sleep(time.Duration(delay) * time.Millisecond)
}
}
fmt.Println()
assert(scanner.Err())
}
func doList(c *cli.Context) {
out, err := exec.Command("w", "-hs").Output()
assert(err)
fmt.Println(string(out))
}
| nashiox/informer |
<|start_filename|>src/index.js<|end_filename|>
import AlpineComponentMagicMethod from './component'
import AlpineFetchMagicMethod from './fetch'
import AlpineIntervalMagicMethod from './interval'
import AlpineRangeMagicMethod from './range'
import AlpineRefreshMagicMethod from './refresh'
import AlpineScreenMagicMethod from './screen'
import AlpineScrollMagicMethod from './scroll'
import AlpineTruncateMagicMethod from './truncate'
import AlpineUndoMagicMethod from './undo'
import AlpineUnsafeHTMLCustomDirective from './unsafeHTML'
export default {
AlpineComponentMagicMethod,
AlpineFetchMagicMethod,
AlpineIntervalMagicMethod,
AlpineRangeMagicMethod,
AlpineRefreshMagicMethod,
AlpineScreenMagicMethod,
AlpineScrollMagicMethod,
AlpineTruncateMagicMethod,
AlpineUndoMagicMethod,
AlpineUnsafeHTMLCustomDirective,
}
<|start_filename|>tests/interval.spec.js<|end_filename|>
import Alpine from 'alpinejs'
import AlpineIntervalMagicMethod from '../dist/interval'
beforeAll(() => {
window.Alpine = Alpine
})
beforeEach(() => {
jest.useFakeTimers("modern")
AlpineIntervalMagicMethod.start()
})
afterEach(() => {
jest.useRealTimers()
})
test('$interval > is called every x seconds', async () => {
document.body.innerHTML = `
<div x-data="{counter: '0'}" x-init="$interval(() => counter++, 1000)">
<p x-text="counter"></p>
</div>
`
Alpine.start()
expect(document.querySelector('p').textContent).toEqual('0')
jest.advanceTimersByTime(1200)
expect(document.querySelector('p').textContent).toEqual('1')
jest.advanceTimersByTime(3000)
expect(document.querySelector('p').textContent).toEqual('4')
})
test('$interval > can be delayed', async () => {
document.body.innerHTML = `
<div x-data="{counter: '0'}" x-init="$interval(() => counter++, {timer: 1000, delay: 2000})">
<p x-text="counter"></p>
</div>
`
Alpine.start()
expect(document.querySelector('p').textContent).toEqual('0')
jest.advanceTimersByTime(2200)
expect(document.querySelector('p').textContent).toEqual('0')
jest.advanceTimersByTime(1200)
expect(document.querySelector('p').textContent).toEqual('1')
jest.advanceTimersByTime(3200)
expect(document.querySelector('p').textContent).toEqual('4')
})
test('$interval > can be paused', async () => {
document.body.innerHTML = `
<div x-data="{counter: '0', autoIntervalTest: true}" x-init="$interval(() => counter++, 1000)">
<p x-text="counter"></p>
<button @click="autoIntervalTest = !autoIntervalTest"><button>
</div>
`
Alpine.start()
expect(document.querySelector('p').textContent).toEqual('0')
document.querySelector('button').click()
jest.advanceTimersByTime(1200)
expect(document.querySelector('p').textContent).toEqual('0')
document.querySelector('button').click()
jest.advanceTimersByTime(1200)
expect(document.querySelector('p').textContent).toEqual('1')
})
test('$interval > timeout is cleared correctly when paused', async () => {
document.body.innerHTML = `
<div x-data="{counter: '0', autoIntervalTest: true}" x-init="$interval(() => counter++, 1000)">
<p x-text="counter"></p>
<button @click="autoIntervalTest = !autoIntervalTest"><button>
</div>
`
Alpine.start()
expect(document.querySelector('p').textContent).toEqual('0')
document.querySelector('button').click()
document.querySelector('button').click()
jest.advanceTimersByTime(1200)
expect(document.querySelector('p').textContent).toEqual('1')
})
<|start_filename|>dist/unsafeHTML.js<|end_filename|>
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, (global.AlpineMagicHelpers = global.AlpineMagicHelpers || {}, global.AlpineMagicHelpers.unsafeHTML = factory()));
}(this, (function () { 'use strict';
var checkForAlpine = function checkForAlpine() {
if (!window.Alpine) {
throw new Error('[Magic Helpers] Alpine is required for the magic helpers to function correctly.');
}
if (!window.Alpine.version || !isValidVersion('2.5.0', window.Alpine.version)) {
throw new Error('Invalid Alpine version. Please use Alpine version 2.5.0 or above');
}
};
function isValidVersion(required, current) {
var requiredArray = required.split('.');
var currentArray = current.split('.');
for (var i = 0; i < requiredArray.length; i++) {
if (!currentArray[i] || parseInt(currentArray[i]) < parseInt(requiredArray[i])) {
return false;
}
}
return true;
}
var X_ATTR_RE = /^x-([a-z-]*)\b/i;
function parseHtmlAttribute(_ref) {
var name = _ref.name,
value = _ref.value;
var typeMatch = name.match(X_ATTR_RE);
var valueMatch = name.match(/:([a-z0-9\-:]+)/i);
var modifiers = name.match(/\.[^.\]]+(?=[^\]]*$)/g) || [];
return {
type: typeMatch ? typeMatch[1] : null,
value: valueMatch ? valueMatch[1] : null,
modifiers: modifiers.map(function (i) {
return i.replace('.', '');
}),
expression: value
};
}
function getXDirectives(el) {
return Array.from(el.attributes).filter(function (attr) {
return X_ATTR_RE.test(attr.name);
}).map(parseHtmlAttribute);
}
function importOrderCheck() {
// We only want to show the error once
if (window.Alpine && !window.AlpineMagicHelpers.__fatal) {
window.AlpineMagicHelpers.__fatal = setTimeout(function () {
console.error('%c*** ALPINE MAGIC HELPER: Fatal Error! ***\n\n\n' + 'Alpine magic helpers need to be loaded before Alpine ' + 'to avoid errors when Alpine initialises its component. \n\n' + 'Make sure the helper script is included before Alpine in ' + 'your page when using the defer attribute', 'font-size: 14px');
}, 200); // We set a small timeout to make sure we flush all the Alpine noise first
}
}
importOrderCheck();
var DIRECTIVE = 'unsafe-html';
var nodeScriptClone = function nodeScriptClone(node) {
var script = document.createElement('script');
script.text = node.innerHTML;
for (var i = 0; i < node.attributes.length; i++) {
var attr = node.attributes[i];
script.setAttribute(attr.name, attr.value);
}
return script;
};
var nodeScriptReplace = function nodeScriptReplace(node) {
if (node.tagName && node.tagName.toLowerCase() === 'script') {
node.parentNode.replaceChild(nodeScriptClone(node), node);
} else {
for (var i = 0; i < node.childNodes.length; i++) {
nodeScriptReplace(node.childNodes[i]);
}
}
return node;
};
var AlpineUnsafeHTMLCustomDirective = {
start: function start() {
checkForAlpine();
Alpine.onBeforeComponentInitialized(function (component) {
var legacyResolveBoundAttributes = component.resolveBoundAttributes;
component.resolveBoundAttributes = function (el, initialUpdate, extraVars) {
if (initialUpdate === void 0) {
initialUpdate = false;
}
var attrs = getXDirectives(el);
attrs.forEach(function (_ref) {
var type = _ref.type,
expression = _ref.expression;
if (type === DIRECTIVE) {
el.innerHTML = component.evaluateReturnExpression(el, expression, extraVars);
nodeScriptReplace(el);
}
});
return legacyResolveBoundAttributes.bind(component)(el, initialUpdate, extraVars);
};
});
}
};
var alpine = window.deferLoadingAlpine || function (alpine) {
return alpine();
};
window.deferLoadingAlpine = function (callback) {
AlpineUnsafeHTMLCustomDirective.start();
alpine(callback);
};
return AlpineUnsafeHTMLCustomDirective;
})));
<|start_filename|>src/screen.js<|end_filename|>
import config from './config'
import {
checkForAlpine,
importOrderCheck,
} from './utils'
importOrderCheck()
// Collection of components that contains `$screen` helper usecase
const screenComponents = []
// Debounce `updateElements` method to prevent memory leak
const debouncedScreensUpdate = () => {
let update
// Update component if $el is in `screenComponents`
const updateScreens = () => {
clearTimeout(update)
update = setTimeout(() => {
screenComponents.forEach(($el) =>
$el && $el.__x && $el.__x.updateElements($el),
)
}, 150)
}
return updateScreens
}
const AlpineScreenMagicMethod = {
start() {
checkForAlpine()
// Bind `debouncedScreensUpdate` to resize event on window
// Note that `resize` event will be triggered on `orientationchange` event as well
window.addEventListener('resize', debouncedScreensUpdate())
Alpine.addMagicProperty('screen', ($el) => {
// Push $el if it's not in the `screenComponents`
if (!screenComponents.includes($el)) {
screenComponents.push($el)
}
return (breakpoint) => {
// Get current window width
const width = window.innerWidth
// Early return if breakpoint is provided as number
if (Number.isInteger(breakpoint)) return breakpoint <= width
// Get breakpoints from Config
const configBreakpoints = config.get('breakpoints')
// Check if breakpoint exists
if (configBreakpoints[breakpoint] === undefined) {
throw Error('Undefined $screen property: ' + breakpoint)
}
// Finally compare breakpoint with window width and return as boolean
return configBreakpoints[breakpoint] <= width
}
})
},
}
const alpine = window.deferLoadingAlpine || ((alpine) => alpine())
window.deferLoadingAlpine = (callback) => {
AlpineScreenMagicMethod.start()
alpine(callback)
}
export default AlpineScreenMagicMethod
<|start_filename|>src/interval.js<|end_filename|>
import {
checkForAlpine,
importOrderCheck,
} from './utils'
importOrderCheck()
const AlpineIntervalMagicMethod = {
start() {
checkForAlpine()
Alpine.addMagicProperty('interval', ($el) => {
return function (...parameters) {
if (typeof parameters[0] !== 'function') return parameters[0]
let timer = parameters[1]
let delay = 0
let forceInterval = false
// Users can pass in an object as a second parameter instead
if (typeof parameters[1] === 'object') {
if (Object.prototype.hasOwnProperty.call(parameters[1], 'timer')) {
timer = parameters[1].timer
}
if (Object.prototype.hasOwnProperty.call(parameters[1], 'delay')) {
delay = parameters[1].delay
}
if (Object.prototype.hasOwnProperty.call(parameters[1], 'forceInterval')) {
forceInterval = parameters[1].forceInterval
}
}
let autoIntervalLoop = null
const loop = () => {
autoIntervalLoop = setTimeout(() => {
parameters[0].call(this)
forceInterval ? loop() : requestAnimationFrame(loop)
}, timer)
}
autoIntervalLoop = setTimeout(() => {
forceInterval ? loop() : requestAnimationFrame(loop)
}, delay)
this.$watch('autoIntervalTest', test => {
if (test) {
forceInterval ? loop() : requestAnimationFrame(loop)
} else {
clearTimeout(autoIntervalLoop)
}
})
}
})
},
}
const alpine = window.deferLoadingAlpine || ((alpine) => alpine())
window.deferLoadingAlpine = function (callback) {
AlpineIntervalMagicMethod.start()
alpine(callback)
}
export default AlpineIntervalMagicMethod
<|start_filename|>src/range.js<|end_filename|>
import {
checkForAlpine,
importOrderCheck,
} from './utils'
importOrderCheck()
const AlpineRangeMagicMethod = {
start() {
checkForAlpine()
Alpine.addMagicProperty('range', () => {
return function (start, stop, step = 1) {
// Accept $range(10) and expect 1...10
if (typeof stop === 'undefined') {
stop = start
start = start ? 1 : 0
}
// Accept $range(20, 10) and expect 20...10
const reverse = start > stop
if (reverse) {
[start, stop] = [stop, start]
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/from#Sequence_generator_range
const range = Array.from({ length: (stop - start) / step + 1 }, (_, i) => start + (i * step))
return reverse ? range.reverse() : range
}
})
},
}
const alpine = window.deferLoadingAlpine || ((alpine) => alpine())
window.deferLoadingAlpine = function (callback) {
AlpineRangeMagicMethod.start()
alpine(callback)
}
export default AlpineRangeMagicMethod
<|start_filename|>tests/component.spec.js<|end_filename|>
import Alpine from 'alpinejs'
import AlpineComponentMagicMethod from '../dist/component'
import { waitFor } from '@testing-library/dom'
beforeAll(() => {
window.Alpine = Alpine
})
beforeEach(function () {
AlpineComponentMagicMethod.start()
})
test('$parent > component can access parent scope', async () => {
document.body.innerHTML = `
<div x-data="{foo: 'bar'}">
<div x-data>
<p x-text="$parent.foo"></p>
<button @click="$parent.foo = 'baz'"></button>
</div>
<button @click="foo = 'bob'"></button>
</div>
`
Alpine.start()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('bar')
})
document.querySelectorAll('button')[0].click()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('baz')
})
document.querySelectorAll('button')[1].click()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('bob')
})
})
test('$parent > component can access parent scope when called only in a child event', async () => {
document.body.innerHTML = `
<div id="parent-component" x-data="{foo: 'bar'}">
<div x-data>
<button @click="$parent.foo = 'baz'"></button>
</div>
</div>
`
await waitFor(() => {
expect(document.querySelector('#parent-component').__x.$data.foo === 'bar')
})
Alpine.start()
document.querySelectorAll('button')[0].click()
await waitFor(() => {
expect(document.querySelector('#parent-component').__x.$data.foo === 'baz')
})
})
test('$parent > component can update and watch deep object properties', async () => {
document.body.innerHTML = `
<div x-data="{foo: {bar: 'baz'}}">
<div x-data>
<p x-text="$parent.foo.bar"></p>
<button @click="$parent.foo.bar = 'qux'"></button>
</div>
<button @click="foo.bar = 'bob'"></button>
</div>
`
Alpine.start()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('baz')
})
document.querySelectorAll('button')[0].click()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('qux')
})
document.querySelectorAll('button')[1].click()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('bob')
})
})
// This test was added to remove `characterData: true` from the MutationObserver options
test('$parent > will not error on characterData edits', async () => {
document.body.innerHTML = `
<div x-data="{foo: 'bar'}">
<div x-data>
<p x-text="$parent.foo"></p>
<span>Some text</span>
</div>
</div>
`
Alpine.start()
document.querySelector('span').firstChild.appendData('Different text')
})
test('$component > component can access external scope', async () => {
document.body.innerHTML = `
<div x-data>
<p x-text="$component('ext').foo"></p>
<button @click="$component('ext').foo = 'baz'"></button>
</div>
<div x-id="ext" x-data="{foo: 'bar'}">
<button @click="foo = 'bob'"></button>
</div>
`
Alpine.start()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('bar')
})
document.querySelectorAll('button')[0].click()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('baz')
})
document.querySelectorAll('button')[1].click()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('bob')
})
})
test('$component > component can update and watch deep object properties', async () => {
document.body.innerHTML = `
<div x-data>
<p x-text="$component('ext').foo.bar"></p>
<button @click="$component('ext').foo.bar = 'qux'"></button>
</div>
<div x-id="ext" x-data="{foo: {bar: 'baz'}}">
<button @click="foo.bar = 'bob'"></button>
</div>
`
Alpine.start()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('baz')
})
document.querySelectorAll('button')[0].click()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('qux')
})
document.querySelectorAll('button')[1].click()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('bob')
})
})
test('$component > this context is set correctly when functions are invoked through the helper', async () => {
document.body.innerHTML = `
<div x-data>
<p x-text="$component('ext').foo"></p>
<button @click="$component('ext').baz()"></button>
</div>
<div x-id="ext" x-data="{foo: 'bar', baz() {return this.foo = this.$refs.bob.textContent}}">
<span x-ref="bob">qux</span>
</div>
`
Alpine.start()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('bar')
})
document.querySelector('button').click()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('qux')
})
})
test('$parent > component can access granparent scope', async () => {
document.body.innerHTML = `
<div x-data="{foo: 'bar', getFoo() {return this.foo}}">
<p x-text="foo"></p>
<div x-data>
<div x-data>
<p x-text="$parent.$parent.foo"></p>
<p x-text="$parent.$parent.getFoo()"></p>
<button @click="$parent.$parent.foo = 'baz'"></button>
</div>
<button @click="$parent.foo = 'bob'"></button>
</div>
<button @click="foo = 'qux'"></button>
</div>
`
Alpine.start()
await waitFor(() => {
expect(document.querySelectorAll('p')[0].textContent).toEqual('bar')
expect(document.querySelectorAll('p')[1].textContent).toEqual('bar')
expect(document.querySelectorAll('p')[2].textContent).toEqual('bar')
})
document.querySelectorAll('button')[0].click()
await waitFor(() => {
expect(document.querySelectorAll('p')[0].textContent).toEqual('baz')
expect(document.querySelectorAll('p')[1].textContent).toEqual('baz')
expect(document.querySelectorAll('p')[2].textContent).toEqual('baz')
})
document.querySelectorAll('button')[1].click()
await waitFor(() => {
expect(document.querySelectorAll('p')[0].textContent).toEqual('bob')
expect(document.querySelectorAll('p')[1].textContent).toEqual('bob')
expect(document.querySelectorAll('p')[2].textContent).toEqual('bob')
})
document.querySelectorAll('button')[2].click()
await waitFor(() => {
expect(document.querySelectorAll('p')[0].textContent).toEqual('qux')
expect(document.querySelectorAll('p')[1].textContent).toEqual('qux')
expect(document.querySelectorAll('p')[2].textContent).toEqual('qux')
})
})
test('$component > component can access magic properties', async () => {
document.body.innerHTML = `
<div x-data>
<p x-text="$component('ext').$refs.bob.textContent"></p>
</div>
<div x-id="ext" x-data="{foo: 'bar'}">
<span x-ref="bob" x-text="foo"></span>
</div>
`
Alpine.start()
await waitFor(() => {
expect(document.querySelector('p').textContent).toEqual('bar')
})
})
test('$parent > x-for can loop correctly on a property from the parent scope', async () => {
document.body.innerHTML = `
<div x-data="{ comments: ['test', 'test2'] }">
<div x-data>
<template x-for="item in $parent.comments">
<p x-text="item"></p>
</template>
</div>
</div>
`
Alpine.start()
await waitFor(() => {
expect(document.querySelectorAll('p').length).toEqual(2)
})
})
<|start_filename|>src/fetch.js<|end_filename|>
import axios from 'axios'
import {
checkForAlpine,
importOrderCheck,
} from './utils'
importOrderCheck()
const AlpineFetchMagicMethod = {
start() {
checkForAlpine()
Alpine.addMagicProperty('fetch', this.fetch.bind(null, null))
Alpine.addMagicProperty('get', this.fetch.bind(null, 'get'))
Alpine.addMagicProperty('post', this.fetch.bind(null, 'post'))
},
fetch(method) {
return async (parameters, data = {}) => {
function findResponse(response) {
return Object.prototype.hasOwnProperty.call(response, 'data') ? response.data : response
}
// Using $post or $get
if (method) {
return await axios({
url: parameters,
method: method,
[method === 'post' ? 'data' : 'params']: data,
}).then(response => findResponse(response))
}
if (typeof parameters === 'string') {
// Using $fetch('url')
return await axios.get(parameters).then(response => findResponse(response))
}
// Using $fetch({ // axios config })
return await axios(parameters)
}
},
}
const alpine = window.deferLoadingAlpine || ((alpine) => alpine())
window.deferLoadingAlpine = function (callback) {
AlpineFetchMagicMethod.start()
alpine(callback)
}
export default AlpineFetchMagicMethod
<|start_filename|>rollup.config.js<|end_filename|>
import babel from '@rollup/plugin-babel'
import commonjs from '@rollup/plugin-commonjs'
import filesize from 'rollup-plugin-filesize'
import json from '@rollup/plugin-json'
import resolve from '@rollup/plugin-node-resolve'
const createConfig = (filename) => ({
input: `src/${filename}.js`,
output: [
{
file: `dist/${filename}.js`,
format: 'umd',
name: `AlpineMagicHelpers.${filename}`,
},
],
external: false,
treeshake: {
propertyReadSideEffects: false,
},
plugins: [
babel({
babelHelpers: 'bundled',
exclude: 'node_modules/**',
}),
resolve({
mainFields: ['module', 'jsnext', 'main'],
browser: true,
extensions: ['.mjs', '.js', '.jsx', '.json', '.node'],
preferBuiltins: false,
}),
commonjs({
include: /\/node_modules\//,
}),
json(),
filesize(),
],
})
export default [
'index',
'component',
'fetch',
'interval',
'range',
'refresh',
'screen',
'scroll',
'truncate',
'undo',
'unsafeHTML',
].map(createConfig)
<|start_filename|>tests/unsafeHTML.spec.js<|end_filename|>
import Alpine from 'alpinejs'
import AlpineUnsafeHTMLCustomDirective from '../dist/unsafeHTML'
import { waitFor } from '@testing-library/dom'
beforeAll(() => {
window.Alpine = Alpine
window.alert = jest.fn()
})
beforeEach(() => {
AlpineUnsafeHTMLCustomDirective.start()
})
test('x-unsafe-html > on init', async () => {
document.body.innerHTML = `
<div x-data="{ foo: '<h1>bar</h1><script>alert(1)</script>' }">
<div id="component" x-unsafe-html="foo"></div>
</div>
`
expect(window.alert).toHaveBeenCalledTimes(0)
Alpine.start()
await waitFor(() => {
expect(document.querySelector('#component').innerHTML).toEqual('<h1>bar</h1><script>alert(1)</script>')
expect(window.alert).toHaveBeenCalledTimes(1)
})
})
<|start_filename|>src/undo.js<|end_filename|>
import {
checkForAlpine,
componentData,
updateOnMutation,
importOrderCheck,
} from './utils'
import { DeepDiff } from 'deep-diff'
importOrderCheck()
const history = new WeakMap()
const AlpineUndoMagicMethod = {
start() {
checkForAlpine()
Alpine.addMagicProperty('track', ($el) => {
return (propertiesToWatch) => {
propertiesToWatch = propertiesToWatch ?? Object.keys(componentData($el))
propertiesToWatch = Array.isArray(propertiesToWatch) ? propertiesToWatch : [propertiesToWatch]
const initialState = JSON.stringify(componentData($el, propertiesToWatch))
updateOnMutation($el, () => {
history.has($el.__x) || this.store($el.__x, {
props: propertiesToWatch,
previous: initialState,
})
const fresh = componentData($el, history.get($el.__x).props)
const previous = JSON.parse(history.get($el.__x).previous)
let changes = DeepDiff.diff(previous, fresh, true)
if (changes && changes.length) {
changes = changes.filter(change => {
return history.get($el.__x).props.some(prop => change.path.join('.').startsWith(prop))
})
history.get($el.__x).previous = JSON.stringify(fresh)
history.get($el.__x).changes.push(changes)
$el.__x.updateElements($el)
}
})
}
})
Alpine.addMagicProperty('undo', ($el, $clone) => {
return () => {
if ($el !== $clone) {
$el = this.syncClone($el, $clone)
}
const changes = history.get($el.__x).changes.pop()
const previous = JSON.parse(history.get($el.__x).previous)
changes && changes.forEach(change => {
DeepDiff.revertChange(
previous,
componentData($el, history.get($el.__x).props),
change,
)
})
// This could probably be extracted to a utility method like updateComponentProperties()
if (Object.keys(previous).length) {
const newData = {}
Object.entries(previous).forEach(item => {
newData[item[0]] = item[1]
})
$el.__x.$data = Object.assign($el.__x.$data, newData)
}
history.get($el.__x).previous = JSON.stringify(componentData($el, history.get($el.__x).props))
}
})
Alpine.addMagicProperty('history', ($el, $clone) => {
if (!$clone.__x) return []
if ($el !== $clone) {
$el = this.syncClone($el, $clone)
}
return history.has($el.__x) ? history.get($el.__x) : []
})
},
store(key, state) {
history.set(key, Object.assign({
changes: [],
get length() {
return this.changes.length
},
}, state))
return history.get(key)
},
syncClone($el, $clone) {
this.store($clone.__x, {
props: history.get($el.__x).props,
previous: history.get($el.__x).previous,
changes: history.get($el.__x).changes,
})
return $clone
},
}
const alpine = window.deferLoadingAlpine || ((alpine) => alpine())
window.deferLoadingAlpine = function (callback) {
alpine(callback)
AlpineUndoMagicMethod.start()
}
export default AlpineUndoMagicMethod
<|start_filename|>dist/interval.js<|end_filename|>
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, (global.AlpineMagicHelpers = global.AlpineMagicHelpers || {}, global.AlpineMagicHelpers.interval = factory()));
}(this, (function () { 'use strict';
var checkForAlpine = function checkForAlpine() {
if (!window.Alpine) {
throw new Error('[Magic Helpers] Alpine is required for the magic helpers to function correctly.');
}
if (!window.Alpine.version || !isValidVersion('2.5.0', window.Alpine.version)) {
throw new Error('Invalid Alpine version. Please use Alpine version 2.5.0 or above');
}
};
function isValidVersion(required, current) {
var requiredArray = required.split('.');
var currentArray = current.split('.');
for (var i = 0; i < requiredArray.length; i++) {
if (!currentArray[i] || parseInt(currentArray[i]) < parseInt(requiredArray[i])) {
return false;
}
}
return true;
}
function importOrderCheck() {
// We only want to show the error once
if (window.Alpine && !window.AlpineMagicHelpers.__fatal) {
window.AlpineMagicHelpers.__fatal = setTimeout(function () {
console.error('%c*** ALPINE MAGIC HELPER: Fatal Error! ***\n\n\n' + 'Alpine magic helpers need to be loaded before Alpine ' + 'to avoid errors when Alpine initialises its component. \n\n' + 'Make sure the helper script is included before Alpine in ' + 'your page when using the defer attribute', 'font-size: 14px');
}, 200); // We set a small timeout to make sure we flush all the Alpine noise first
}
}
importOrderCheck();
var AlpineIntervalMagicMethod = {
start: function start() {
checkForAlpine();
Alpine.addMagicProperty('interval', function ($el) {
return function () {
var _this = this;
for (var _len = arguments.length, parameters = new Array(_len), _key = 0; _key < _len; _key++) {
parameters[_key] = arguments[_key];
}
if (typeof parameters[0] !== 'function') return parameters[0];
var timer = parameters[1];
var delay = 0;
var forceInterval = false; // Users can pass in an object as a second parameter instead
if (typeof parameters[1] === 'object') {
if (Object.prototype.hasOwnProperty.call(parameters[1], 'timer')) {
timer = parameters[1].timer;
}
if (Object.prototype.hasOwnProperty.call(parameters[1], 'delay')) {
delay = parameters[1].delay;
}
if (Object.prototype.hasOwnProperty.call(parameters[1], 'forceInterval')) {
forceInterval = parameters[1].forceInterval;
}
}
var autoIntervalLoop = null;
var loop = function loop() {
autoIntervalLoop = setTimeout(function () {
parameters[0].call(_this);
forceInterval ? loop() : requestAnimationFrame(loop);
}, timer);
};
autoIntervalLoop = setTimeout(function () {
forceInterval ? loop() : requestAnimationFrame(loop);
}, delay);
this.$watch('autoIntervalTest', function (test) {
if (test) {
forceInterval ? loop() : requestAnimationFrame(loop);
} else {
clearTimeout(autoIntervalLoop);
}
});
};
});
}
};
var alpine = window.deferLoadingAlpine || function (alpine) {
return alpine();
};
window.deferLoadingAlpine = function (callback) {
AlpineIntervalMagicMethod.start();
alpine(callback);
};
return AlpineIntervalMagicMethod;
})));
| pascalandy/alpine-magic-helpers |
<|start_filename|>generators/app/index.js<|end_filename|>
'use strict';
var path = require('path');
var yeoman = require('yeoman-generator');
var camelcase = require('camelcase');
module.exports = yeoman.Base.extend({
prompting: function () {
var done = this.async();
var prompts = [{
name: 'filename',
message: 'Specify a folder/filename',
default: 'lib/my-codemod'
}];
this.prompt(prompts, function (props) {
var filename = props.filename;
var ext = path.extname(filename);
var base = path.basename(filename, ext);
if (ext === '') {
ext = '.js';
}
this.props = {
filename: path.join(path.dirname(filename), base + ext),
camelName: camelcase(base)
};
done();
}.bind(this));
},
writing: function () {
this.fs.copyTpl(
this.templatePath('codemod.js'),
this.destinationPath(this.props.filename),
this.props
);
}
});
<|start_filename|>test/app.js<|end_filename|>
import path from 'path';
import test from 'ava';
import assert from 'yeoman-assert';
import helpers from 'yeoman-test';
function runWithPrompts(prompts, cb) {
helpers
.run(path.join(__dirname, '../generators/app'))
.withPrompts(prompts)
.on('end', cb);
}
test.cb('generator-codemod:app', t => {
runWithPrompts({filename: 'my-codemod'}, err => {
t.ifError(err);
assert.file([
'my-codemod.js'
]);
t.end();
});
});
<|start_filename|>generators/app/templates/codemod.js<|end_filename|>
'use strict';
module.exports = function <%= camelName %>(file, api) {
var j = api.jscodeshift;
var ast = j(file.source);
ast.find(j.CallExpression, {
callee: {
type: 'Identifier',
name: 'foo'
}
}).forEach(function (p) {
p.get('callee').replace(j.identifier('bar'));
});
return ast.toSource({
useTabs: true,
quote: 'single'
});
};
| jamestalmage/generator-codemod |
<|start_filename|>examples.lisp<|end_filename|>
(defpackage :cl-drawille/examples
(:use :common-lisp :cl-drawille)
(:export :turtle-example :basic-example))
(in-package :cl-drawille/examples)
(defun turtle-example ()
(let ((turtle (cl-drawille:make-turtle)))
(loop repeat 36
do (cl-drawille:right turtle 10)
(loop repeat 36
do (cl-drawille:right turtle 10)
(cl-drawille:forward turtle 8)))
(format t "~a~%" (cl-drawille:frame turtle))))
(defun basic-example ()
(let ((canvas (cl-drawille:make-canvas)))
(loop for x below 1800
do (cl-drawille:set-pixel canvas (/ x 10) (* 10 (sin (* (/ x 180) pi)))))
(format t "~a~%" (cl-drawille:frame canvas))
(cl-drawille:clear canvas)
(loop for x below 1800 by 10
do (cl-drawille:set-pixel canvas (/ x 10) (+ 10 (* 10 (sin (* (/ x 180) pi)))))
(cl-drawille:set-pixel canvas (/ x 10) (+ 10 (* 10 (cos (* (/ x 180) pi))))))
(format t "~a~%" (cl-drawille:frame canvas))
(cl-drawille:clear canvas)
(loop for x below 3600 by 20
do (cl-drawille:set-pixel canvas (/ x 20) (+ 4 (* 4 (sin (* (/ x 180) pi))))))
(format t "~a~%" (cl-drawille:frame canvas))
(cl-drawille:clear canvas)
(loop for x below 360 by 4
do (cl-drawille:set-pixel canvas (/ x 4) (+ 30 (* 30 (sin (* (/ x 180) pi))))))
(loop for x below 30
do (loop for y below 30
do (cl-drawille:set-pixel canvas x y)
(cl-drawille:toggle-pixel canvas (+ 30 x) (+ 30 y))
(cl-drawille:toggle-pixel canvas (+ 60 x) y)))
(format t "~a~%" (cl-drawille:frame canvas))))
<|start_filename|>drawille.lisp<|end_filename|>
(defpackage :cl-drawille
(:use :common-lisp :cffi :osicat :alexandria)
(:export :terminal-size :make-canvas :clear
:set-pixel :unset-pixel :toggle-pixel
:rows :frame :set-text :make-turtle
:up :down :forward :move :right :left :back))
(in-package :cl-drawille)
(defun terminal-size ()
(multiple-value-bind (columns rows)
(ignore-errors
(cffi:with-foreign-object (window-size '(:struct osicat-posix:winsize))
(osicat-posix:ioctl 0 osicat-posix:tiocgwinsz window-size)
(let ((columns (cffi:foreign-slot-value window-size '(:struct osicat-posix:winsize) 'osicat-posix:col))
(rows (cffi:foreign-slot-value window-size '(:struct osicat-posix:winsize) 'osicat-posix:row)))
(values columns rows))))
(if columns
(values columns rows)
(let ((columns (osicat:environment-variable "COLUMNS"))
(rows (osicat:environment-variable "LINES")))
(values (if columns (parse-integer columns) 80) (if rows (parse-integer rows) 25))))))
(alexandria:define-constant +pixel-map+ #2A((#x01 #x08) (#x02 #x10) (#x04 #x20) (#x40 #x80)) :test #'equalp)
(defconstant +braille-offset+ #x2800)
(defun normalize (coord)
(round coord))
(defun get-pos (x y)
(values (floor x 2) (floor y 4)))
(defun pixel (x y)
(aref +pixel-map+ (mod y 4) (mod x 2)))
(defclass canvas ()
((chars
:initform (make-hash-table :test #'equal)
:accessor chars)))
(defun make-canvas ()
(make-instance 'canvas))
(defgeneric clear (c))
(defmethod clear ((c canvas))
(setf (chars c) (make-hash-table :test #'equal)))
(defmacro access-char (&body body)
`(let ((x (normalize x))
(y (normalize y)))
(multiple-value-bind (col row) (get-pos x y)
(let* ((key (cons col row))
(char (gethash key (chars c)))
(char (typecase char
(integer char)
(t 0)))
(pixel (pixel x y)))
,@body
(if (equal 0 (gethash key (chars c)))
(remhash key (chars c)))))))
(defmacro process-char (&body body)
`(access-char (setf (gethash key (chars c)) ,@body)))
(defgeneric set-pixel (c x y))
(defmethod set-pixel ((c canvas) x y)
(process-char (logior char pixel)))
(defgeneric unset-pixel (c x y))
(defmethod unset-pixel ((c canvas) x y)
(process-char (logandc2 char pixel)))
(defgeneric toggle-pixel (c x y))
(defmethod toggle-pixel ((c canvas) x y)
(process-char (logxor char pixel)))
(defgeneric get-pixel (c x y))
(defmethod get-pixel ((c canvas) x y)
(access-char (/= 0 (logand char pixel))))
(defgeneric get-char (c col row))
(defmethod get-char ((c canvas) col row)
(let* ((key (cons col row))
(char (gethash key (chars c)))
(char (if char char 0)))
(typecase char
(integer (code-char (+ +braille-offset+ char)))
(t char))))
(defgeneric rows (c &key))
(defmethod rows ((c canvas) &key (min-x nil) (min-y nil) (max-x nil) (max-y nil))
(multiple-value-bind (computed-max-x computed-min-x computed-max-y computed-min-y)
(loop for key being the hash-key of (chars c)
maximize (1+ (car key)) into max-x
minimize (car key) into min-x
maximize (1+ (cdr key)) into max-y
minimize (cdr key) into min-y
finally (return (values max-x min-x max-y min-y)))
(let ((max-x (if max-x (ceiling max-x 2) computed-max-x))
(min-x (if min-x (floor min-x 2) computed-min-x))
(max-y (if max-y (ceiling max-y 4) computed-max-y))
(min-y (if min-y (floor min-y 4) computed-min-y)))
(loop for y from min-y below max-y
collect (format nil "~{~c~}"
(loop for x from min-x below max-x
collect (get-char c x y)))))))
(defgeneric frame (c &key))
(defmethod frame ((c canvas) &key (min-x nil) (min-y nil) (max-x nil) (max-y nil))
(format nil "~{~a~^~%~}" (rows c :min-x min-x :min-y min-y :max-x max-x :max-y max-y)))
(defgeneric set-text (c x y text))
(defmethod set-text ((c canvas) x y text)
(multiple-value-bind (col row) (get-pos (normalize x) (normalize y))
(loop for char across text
and i from 0
do (setf (gethash (cons (+ col i) row) (chars c)) char))))
(defun line (x1 y1 x2 y2)
(let* ((x1 (normalize x1))
(y1 (normalize y1))
(x2 (normalize x2))
(y2 (normalize y2))
(xdiff (abs (- x1 x2)))
(ydiff (abs (- y1 y2)))
(xdir (if (<= x1 x2) 1 -1))
(ydir (if (<= y1 y2) 1 -1))
(r (max xdiff ydiff)))
(if (= 0 r)
(list (list x1 y1))
(loop for i from 0 to r
collect (list
(+ x1 (/ (* i xdiff xdir) r))
(+ y1 (/ (* i ydiff ydir) r)))))))
(defun polygon (&optional (center-x 0) (center-y 0) (sides 4) (radius 4))
(let ((angle (/ (* 2 pi) sides))
(radius (1+ radius)))
(loop for n below sides
append (line (+ center-x (/ (* radius (cos (* n angle))) 2))
(+ center-y (/ (* radius (sin (* n angle))) 2))
(+ center-x (/ (* radius (cos (* (1+ n) angle))) 2))
(+ center-y (/ (* radius (sin (* (1+ n) angle))) 2))))))
(defclass turtle (canvas)
((pos-x
:initarg :pos-x
:accessor pos-x)
(pos-y
:initarg :pos-y
:accessor pos-y)
(rotation
:initform 0
:accessor rotation)
(brush
:initform t
:accessor brush)))
(defun make-turtle (&optional (pos-x 0) (pos-y 0))
(make-instance 'turtle :pos-x pos-x :pos-y pos-y))
(defgeneric up (tur))
(defmethod up ((tur turtle))
(setf (brush tur) nil))
(defgeneric down (tur))
(defmethod down ((tur turtle))
(setf (brush tur) t))
(defgeneric forward (tur step))
(defmethod forward ((tur turtle) step)
(let ((x (+ (pos-x tur) (* step (cos (/ (* pi (rotation tur)) 180)))))
(y (+ (pos-y tur) (* step (sin (/ (* pi (rotation tur)) 180))))))
(move tur x y)))
(defgeneric move (tur x y))
(defmethod move ((tur turtle) x y)
(when (brush tur)
(loop for (x y) in (line (pos-x tur) (pos-y tur) x y)
do (set-pixel tur x y)))
(setf (pos-x tur) x)
(setf (pos-y tur) y))
(defgeneric right (tur angle))
(defmethod right ((tur turtle) angle)
(incf (rotation tur) angle))
(defgeneric left (tur angle))
(defmethod left ((tur turtle) angle)
(decf (rotation tur) angle))
(defgeneric back (tur step))
(defmethod back ((tur turtle) step)
(forward tur (- step)))
<|start_filename|>animations.lisp<|end_filename|>
(defpackage :cl-drawille/examples-animations
(:use :common-lisp :cl-drawille :cl-charms)
(:export :sine-tracking-example :rotating-cube-example))
(in-package :cl-drawille/examples-animations)
(defun animate (animation)
(cl-charms:with-curses ()
(cl-charms:disable-echoing)
(cl-charms/low-level:timeout 10)
(cl-charms/low-level:curs-set 0)
(cl-charms:clear-window cl-charms:*standard-window*)
(cl-charms:refresh-window cl-charms:*standard-window*)
(loop named curses-loop
for input = (cl-charms:get-char cl-charms:*standard-window* :ignore-error t)
do (when (eq #\q input) (return-from curses-loop))
(multiple-value-bind (frame delay) (funcall animation)
(loop for y from 0 and row in frame
do (ignore-errors (cl-charms:write-string-at-point cl-charms:*standard-window* row 0 y)))
(cl-charms:refresh-window cl-charms:*standard-window*)
(sleep delay)))))
(defun sine-tracking ()
(let ((canvas (cl-drawille:make-canvas))
(i 0)
(height 40))
(lambda ()
(cl-drawille:clear canvas)
(loop for (x y) in (cl-drawille::line 0 height 180 (+ height (* height (sin (/ (* i pi) 180)))))
do (cl-drawille:set-pixel canvas x y))
(loop for x from 0 below 360 by 2
do (cl-drawille:set-pixel canvas (/ x 2) (+ height (* height (sin (/ (* (+ x i) pi) 180))))))
(incf i 2)
(values (cl-drawille:rows canvas) 1/60))))
(defun sine-tracking-example ()
(animate (sine-tracking)))
(defun rotate-x (input angle)
(let ((cos (cos (/ (* angle pi) 180)))
(sin (sin (/ (* angle pi) 180))))
(vector (aref input 0)
(- (* cos (aref input 1)) (* sin (aref input 2)))
(+ (* sin (aref input 1)) (* cos (aref input 2))))))
(defun rotate-y (input angle)
(let ((cos (cos (/ (* angle pi) 180)))
(sin (sin (/ (* angle pi) 180))))
(vector (- (* cos (aref input 2)) (* sin (aref input 0)))
(aref input 1)
(+ (* sin (aref input 2)) (* cos (aref input 0))))))
(defun rotate-z (input angle)
(let ((cos (cos (/ (* angle pi) 180)))
(sin (sin (/ (* angle pi) 180))))
(vector (- (* cos (aref input 0)) (* sin (aref input 1)))
(+ (* sin (aref input 0)) (* cos (aref input 1)))
(aref input 2))))
(defun project (input width height fov distance)
(let ((factor (/ fov (+ distance (aref input 2)))))
(vector (+ (/ width 2) (* factor (aref input 0)))
(+ (/ height 2) (* factor (aref input 1)))
1)))
(defun rotating-cube (&key (projection nil))
(let ((canvas (cl-drawille:make-canvas))
(vertices '(#(-20 20 -20)
#(20 20 -20)
#(20 -20 -20)
#(-20 -20 -20)
#(-20 20 20)
#(20 20 20)
#(20 -20 20)
#(-20 -20 20)))
(faces '((0 1 2 3) (1 5 6 2) (5 4 7 6) (4 0 3 7) (0 4 5 1) (3 2 6 7)))
(angle-x 0)
(angle-y 0)
(angle-z 0))
(lambda ()
(cl-drawille:clear canvas)
(flet ((cond-project (input &rest args) (if projection (apply #'project input args) input)))
(let ((transformed
(loop for vertex in vertices collect (cond-project (rotate-z (rotate-y (rotate-x vertex angle-x) angle-y) angle-z) 50 50 50 50))))
(loop for (a b c d) in faces
do (loop for (x y) in (cl-drawille::line (aref (nth a transformed) 0) (aref (nth a transformed) 1) (aref (nth b transformed) 0) (aref (nth b transformed) 1))
do (cl-drawille:set-pixel canvas x y))
(loop for (x y) in (cl-drawille::line (aref (nth b transformed) 0) (aref (nth b transformed) 1) (aref (nth c transformed) 0) (aref (nth c transformed) 1))
do (cl-drawille:set-pixel canvas x y))
(loop for (x y) in (cl-drawille::line (aref (nth c transformed) 0) (aref (nth c transformed) 1) (aref (nth d transformed) 0) (aref (nth d transformed) 1))
do (cl-drawille:set-pixel canvas x y))
(loop for (x y) in (cl-drawille::line (aref (nth d transformed) 0) (aref (nth d transformed) 1) (aref (nth a transformed) 0) (aref (nth a transformed) 1))
do (cl-drawille:set-pixel canvas x y)))))
(incf angle-x 2)
(incf angle-y 3)
(incf angle-z 5)
(values (cl-drawille:rows canvas :min-x -40 :min-y -40 :max-x 80 :max-y 80) 1/20))))
(defun rotating-cube-example (&key (projection nil))
(animate (rotating-cube :projection projection)))
<|start_filename|>cl-drawille.asd<|end_filename|>
(in-package :asdf)
(defsystem "cl-drawille"
:description "cl-drawille: Drawing in terminal with Unicode Braille characters."
:version "1.0.0"
:author "Goheeca <<EMAIL>>"
:licence "MIT"
:components ((:file "drawille"))
:depends-on ("cffi" "osicat" "alexandria"))
(defsystem "cl-drawille/examples"
:description "cl-drawille examples"
:version "1.0.0"
:author "Goheeca <<EMAIL>>"
:licence "MIT"
:components ((:file "examples"))
:depends-on ("cl-drawille"))
(defsystem "cl-drawille/examples-animations"
:description "cl-drawille animated examples"
:version "1.0.0"
:author "Goheeca <<EMAIL>>"
:licence "MIT"
:components ((:file "animations"))
:depends-on ("cl-drawille" "cl-charms"))
| Goheeca/cl-drawille |
<|start_filename|>Makefile<|end_filename|>
.ONESHELL:
clean:
rm -rf build;
rm -rf dist;
rm -rf procbridge.egg-info
setup:
python3 -m pip install --upgrade setuptools wheel twine
build:
# https://packaging.python.org/tutorials/packaging-projects/
python3 setup.py sdist bdist_wheel
upload_test:
twine upload --repository-url https://test.pypi.org/legacy/ dist/*
upload:
twine upload dist/*
| raajon/procbridge-python |
<|start_filename|>src/db/seeds/seed_users.js<|end_filename|>
if (process.env.NODE_ENV === "production") {
throw new Error("Can't run seeds in production");
}
exports.seed = function(knex) {
// Deletes ALL existing entries
return knex("users")
.del()
.then(function() {
// Inserts seed entries
return knex("users").insert([
{
name: "<NAME>",
description: "Force user"
},
{
name: "<NAME>",
description: "Wizard"
}
]);
});
};
<|start_filename|>src/api/routes/v1/users.js<|end_filename|>
const { Router } = require("express");
const { StatusCodes } = require("http-status-codes");
const UsersService = require("../../../services/UsersService");
const route = Router();
module.exports = app => {
app.use("/users", route);
route.get("/", async (req, res, next) => {
try {
const data = await UsersService.getAllUsers();
return res.json(data).status(StatusCodes.OK);
} catch (err) {
return next(err);
}
});
route.get("/:id", async (req, res, next) => {
try {
const data = await UsersService.getUser(req.params.id);
return res.json(data).status(StatusCodes.OK);
} catch (err) {
return next(err);
}
});
};
<|start_filename|>src/api/routes/index.js<|end_filename|>
const { Router } = require("express");
const apiv1 = require("./v1");
// this serves as the root path definition, define root paths here
const app = Router();
app.use("/api", apiv1);
module.exports = app;
<|start_filename|>src/services/UsersService.js<|end_filename|>
const { StatusCodes } = require("http-status-codes");
const Users = require("../models/Users");
const logger = require("./internal/Logger");
const ApiError = require("../errors/ApiError");
class UsersService {
async getAllUsers() {
try {
return await Users.query().select();
} catch (err) {
logger.error("An error occured when querying all users.", __filename, { err });
throw new ApiError("An internal server error occurred");
}
}
async getUser(id) {
let user;
try {
user = await Users.query().findById(id);
} catch (err) {
logger.error("An error occured when querying all users.", __filename, { err });
throw new ApiError("An internal server error occurred");
}
if (!user) throw new ApiError("User not found", StatusCodes.NOT_FOUND);
return user;
}
}
module.exports = new UsersService();
<|start_filename|>src/loaders/index.js<|end_filename|>
const databaseLoader = require("./databaseLoader");
const expressLoader = require("./expressLoader");
module.exports = async app => {
await databaseLoader();
expressLoader(app);
};
<|start_filename|>src/api/routes/v1/index.js<|end_filename|>
const { Router } = require("express");
const users = require("./users");
const api = require("./api");
// any endpoints for api/{anything} will be here
const v1 = Router();
api(v1);
users(v1);
module.exports = v1;
<|start_filename|>src/api/routes/v1/api.js<|end_filename|>
const { Router } = require("express");
const route = Router();
module.exports = app => {
app.use("/", route);
route.get("/", async (req, res) => {
return res.send("<h1>Welcome to Backend Starter v2.0.0!</h1>");
});
};
<|start_filename|>src/api/middleware/genericErrorHandler.js<|end_filename|>
const { StatusCodes } = require("http-status-codes");
const logger = require("../../services/internal/Logger");
module.exports = (err, req, res, next) => {
logger.error("An ApiError is going to be returned => " + err);
res.status(err.httpStatusCode || StatusCodes.INTERNAL_SERVER_ERROR);
return res.json(err);
};
<|start_filename|>src/errors/ApiError.js<|end_filename|>
const { StatusCodes } = require("http-status-codes");
class ApiError extends Error {
constructor(message, httpStatusCode = StatusCodes.INTERNAL_SERVER_ERROR, context, ...params) {
super(...params);
if (Error.captureStackTrace) {
Error.captureStackTrace(this, ApiError);
}
this.message = message;
this.code = httpStatusCode;
this.context = context;
this.date = new Date();
}
}
module.exports = ApiError;
<|start_filename|>src/db/migrations/20200127211708_users.js<|end_filename|>
exports.up = function(knex) {
return knex.schema.createTable("users", table => {
table.increments().primary();
table.string("name").notNullable();
table.string("description");
table.timestamp("updatedAt").defaultTo(knex.fn.now());
table.timestamp("createdAt").defaultTo(knex.fn.now());
});
};
exports.down = function(knex) {
if (process.env.NODE_ENV !== "production") {
return knex.schema.dropTableIfExists("users");
}
};
<|start_filename|>src/api/middleware/index.js<|end_filename|>
const genericErrorHandler = require("./genericErrorHandler");
module.exports = { genericErrorHandler };
<|start_filename|>src/services/internal/Logger.js<|end_filename|>
/**
* Verify basic fake logger, uses console.log to format the log.
*/
class Logger {
constructor() {
this.colors = {
red: "\x1b[31m",
green: "\x1b[32m",
yellow: "\x1b[33m",
cyan: "\x1b[36m",
reset: "\x1b[0m"
};
}
error(message, className, extendedFields = {}) {
this._writeLocal("ERROR", className, message, extendedFields, this.colors.red);
}
warn(message, className, extendedFields = {}) {
this._writeLocal("WARN", className, message, extendedFields, this.colors.yellow);
}
info(message, className, extendedFields = {}) {
this._writeLocal("INFO", className, message, extendedFields, this.colors.cyan);
}
debug(message, className, extendedFields = {}) {
this._writeLocal("DEBUG", className, message, extendedFields, this.colors.green);
}
_writeLocal(level, className, message, extendedFields, color) {
const formattedMessage =
`\n-----\n${new Date().toLocaleString()} - ${level}\n` +
`file: ${className}\n` +
`message: ${message}\n` +
`extendedFields: ${JSON.stringify(extendedFields)}\n-----\n`;
console.log(color, formattedMessage, this.colors.reset);
}
}
module.exports = new Logger();
<|start_filename|>src/loaders/expressLoader.js<|end_filename|>
const express = require("express");
const api = require("../api/routes");
const { genericErrorHandler } = require("../api/middleware");
module.exports = app => {
// define any middlewares that need to run befoure our routes
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
// define ALL routes here
app.use(api);
// for any other middlewares that need to run after our routes
app.use(genericErrorHandler);
};
<|start_filename|>src/loaders/databaseLoader.js<|end_filename|>
const knex = require("knex");
const { Model } = require("objection");
const knexConfig = require("../knexfile");
const logger = require("../services/internal/Logger");
module.exports = async () => {
logger.debug("Starting Database connection");
const k = knex(knexConfig);
try {
await k.raw("select 1 + 1 as result");
Model.knex(k);
} catch (e) {
logger.error("Database connection failed! Here's the error => " + e);
return process.exit(1);
}
k.on("query", query => logger.debug(`DB Query Ran: ${query.sql}`));
logger.debug("Database connection is good!");
};
<|start_filename|>src/app.js<|end_filename|>
const express = require("express");
const loaders = require("./loaders");
const logger = require("./services/internal/Logger");
const startServer = async () => {
logger.debug("startServer() was called, starting server");
const app = express();
await loaders(app);
app.listen(process.env.PORT, err => {
if (err) {
logger.error(err);
return process.exit(1);
}
logger.info(`
#########################################################
Server listening on port: ${process.env.PORT}
#########################################################
`);
});
};
startServer();
<|start_filename|>src/knexfile.js<|end_filename|>
module.exports = {
client: "mysql2",
useNullAsDefault: true,
connection: {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: <PASSWORD>,
database: process.env.DB_NAME,
port: process.env.DB_PORT || 3306
},
migrations: {
directory: "./db/migrations"
},
seeds: {
directory: "./db/seeds"
}
};
| HappyZombies/express-backend-starter |
<|start_filename|>Distortion/Wave.shader<|end_filename|>
shader_type canvas_item;
uniform float speed = 10.0;
uniform float waves = 60.0;
float Remap01(float value, float from, float to) {
return (value - from) / (to - from);
}
void fragment() {
vec2 uv = UV;
uv.x += Remap01(sin(uv.y * waves - (TIME * speed)), -waves, waves) - 0.5;
vec4 color = texture(TEXTURE, uv);
COLOR = color;
}
<|start_filename|>Dissolve/3D Vertical Dissolve.shader<|end_filename|>
shader_type spatial;
render_mode cull_disabled;
uniform sampler2D texture;
uniform sampler2D blendTexture;
uniform sampler2D noiseTexture;
uniform float offset = 0.;
uniform bool up = true;
uniform vec4 borderColor: hint_color = vec4(1., 1., 0., 1.);
uniform float borderHeight = 0.1;
uniform float waveAmplitude = 1.;
uniform float waveFrequency = 1.;
uniform float wavePhase = 0.1;
uniform float emissionIntensity = 1.;
uniform float noiseSpeed = .01;
uniform float noiseInfluence = 1.;
uniform vec2 blendUVScale = vec2(1.);
uniform vec2 noiseUVScale = vec2(1.);
uniform vec2 textureUVScale = vec2(1.);
const float tao = 2. * 3.14;
// https://github.com/godotengine/godot/issues/19800
uniform mat4 global_transform;
varying vec3 world_pos;
void vertex(){
world_pos = (global_transform * vec4(VERTEX, 1.0)).xyz;
}
void fragment() {
vec3 position = world_pos;
vec4 text = texture(texture, UV);
vec4 blend = texture(blendTexture, UV * blendUVScale);
vec2 st = UV;
st.y -= TIME * noiseSpeed;
vec4 noise = texture(noiseTexture, st * noiseUVScale);
float x = tao * position.x;
float waveFrequency1 = waveFrequency;
float waveFrequency2 = waveFrequency + 2. - wavePhase;
float waveFrequency3 = waveFrequency + 3. - wavePhase;
position.y += waveAmplitude * (sin(x / waveFrequency1) + sin(x / waveFrequency2) + sin(x / waveFrequency3));
position.y += (noise.r * noiseInfluence);
float direction = up ? 1. : -1.;
float upperBorder = smoothstep(offset, offset, (position.y * direction) + 1.);
float bottomBorder = smoothstep(offset, offset, (position.y * direction) - borderHeight + 1.);
float borderPart = upperBorder - bottomBorder;
vec4 color = mix(blend, borderColor, upperBorder);
color = mix(color, text, bottomBorder);
ALBEDO = color.rgb;
if (!FRONT_FACING) {
ALBEDO = borderColor.rgb;
NORMAL = VIEW;
}
ALPHA = color.a;
ALPHA_SCISSOR = 1.0;
EMISSION = vec3(borderPart) * borderColor.rgb * emissionIntensity;
}
<|start_filename|>Dissolve/2D Dissolve.shader<|end_filename|>
shader_type canvas_item;
const float BORDER_SIZE = 0.02;
const float NOISE_BORDER_SIZE = 0.05;
uniform sampler2D texture2;
uniform sampler2D noiseTexture;
uniform float noise_speed = 1.;
uniform vec4 borderColor: hint_color = vec4(1.);
const vec3 GRAY_COLOR = vec3(0.299, 0.587, 0.114);
float circle(vec2 uv, vec2 position, float radius) {
float d = distance(position, uv);
return 1. - clamp(d / radius, 0., 1.);
}
float easeInQuad(float x) {
return x * x;
}
void fragment() {
float radius = abs(2. * fract(TIME / 6.));
radius = easeInQuad(radius);
float shape = circle(UV, vec2(1.0), radius);
float shape2 = shape + BORDER_SIZE;
vec4 color1 = texture(TEXTURE, UV);
vec4 color2 = texture(texture2, UV);
float r = 0.1;
vec2 st = UV;
st.x = st.x - r * sin(TIME / noise_speed);
st.y = st.y - r * cos(TIME / noise_speed);
vec4 noise = texture(noiseTexture, st);
float noiseVal = noise.r * shape;
float noiseVal2 = noise.r * shape2;
float s1 = step(noiseVal, NOISE_BORDER_SIZE);
float s2 = step(noiseVal2, NOISE_BORDER_SIZE);
vec4 border1 = (1. - s1) * vec4(1., 0., 0., 1.);
vec4 border2 = (1. - s2) * vec4(1.) - border1;
vec4 c3 = mix(color2, color1, s1);
vec4 c4 = mix(c3, border2 * borderColor, border2.r);
COLOR = c4;
}
<|start_filename|>Dissolve/3D Dissolve.shader<|end_filename|>
shader_type spatial;
render_mode cull_disabled;
uniform sampler2D texture;
uniform sampler2D blendTexture;
uniform sampler2D noiseTexture;
uniform sampler2D normalTexture;
//uniform float offset = 0.;
uniform vec3 offset = vec3(0.);
uniform vec4 borderColor: hint_color = vec4(1., 1., 0., 1.);
uniform float borderHeight = 0.1;
uniform float radius = 5.;
uniform float emissionIntensity = 1.;
uniform float noiseSpeed = .01;
uniform float noiseInfluence = 1.;
uniform vec2 UVScale = vec2(1.);
uniform vec2 blendUVScale = vec2(1.);
uniform vec2 noiseUVScale = vec2(1.);
const float tao = 2. * 3.14;
const float NOISE_BORDER_SIZE = 0.2;
// https://github.com/godotengine/godot/issues/19800
uniform mat4 global_transform;
varying vec3 world_pos;
void vertex(){
world_pos = (global_transform * vec4(VERTEX, 1.0)).xyz;
}
void fragment() {
vec3 position = world_pos;
vec4 text = texture(texture, UV * UVScale);
vec4 blend = texture(blendTexture, UV * blendUVScale);
vec4 normal = texture(normalTexture, UV * UVScale);
vec2 st = UV;
st.y -= TIME * noiseSpeed;
vec4 noise = texture(noiseTexture, st * noiseUVScale);
float global_distance = distance(position, offset);
global_distance += (noise.r * noiseInfluence);
float border1 = global_distance > radius ? 1. : 0.;
float border2 = global_distance > (radius + borderHeight) ? 1. : 0.;
vec4 color = mix(blend, borderColor, border1);
color = mix(color, text, border2);
ALBEDO = color.rgb;
if (!FRONT_FACING) {
ALBEDO = borderColor.rgb;
NORMAL = VIEW;
}
ALPHA = color.a;
ALPHA_SCISSOR = 1.0;
EMISSION = vec3(border1 - border2) * borderColor.rgb * emissionIntensity;
}
<|start_filename|>Hologram/3D Hologram.shader<|end_filename|>
shader_type spatial;
render_mode cull_disabled, specular_schlick_ggx;
uniform vec4 baseColor: hint_color = vec4(0.3058, 0.835, 0.960, 1.);
uniform float speed = 0.5;
uniform vec4 linesColor: hint_color = vec4(0.633232, 0.910156, 0.555693, 1.);
uniform float linesColorIntensity = 5.;
uniform float emissionValue = 1.;
uniform sampler2D hologramTexture;
uniform vec2 hologramTextureTiling = vec2(1., 5.);
vec2 TilingAndOffset(vec2 uv, vec2 tiling, vec2 offset) {
return uv * tiling + offset;
}
float Fresnel(vec3 normal, vec3 view, float pow) {
return pow(1.0 - clamp(dot(normal, view), 0.0, 1.0), pow);
}
void fragment() {
vec2 uv = SCREEN_UV;
vec2 offset = vec2(TIME * speed);
vec2 tiling = TilingAndOffset(uv, hologramTextureTiling, offset);
vec4 noise = texture(hologramTexture, tiling);
float fresnel = Fresnel(NORMAL, VIEW, emissionValue);
vec4 colorLines = linesColor * vec4(vec3(linesColorIntensity), 1.);
vec4 emission = colorLines * fresnel * noise;
ALBEDO = baseColor.rgb;
ALPHA = dot(noise.rgb, vec3(0.333));
EMISSION = emission.rgb;
}
| ceceppa/godot-shaders |
<|start_filename|>src/fileio.h<|end_filename|>
#ifndef FILEIO_H
#define FILEIO_H
#include <QObject>
#include <QTemporaryDir>
class FileIO : public QObject
{
Q_OBJECT
public:
Q_INVOKABLE QString read(const QString& filePath) const;
Q_INVOKABLE bool write(
const QString& filePath,
const QString& jsonFile,
const QStringList& binariesPath = {}) const;
Q_INVOKABLE QString toLocalFile(const QString& filePath) const;
Q_INVOKABLE QString toUrlFile(const QString& filePath) const;
Q_INVOKABLE bool fileExist(const QString& filePath) const;
Q_INVOKABLE void copyToClipboard(const QString& text) const;
Q_INVOKABLE QString getClipboard() const;
Q_INVOKABLE QString tempFolder() const;
Q_INVOKABLE QString copyToTempFolder(const QString& path) const;
Q_INVOKABLE QString tempFolderFileUrl(const QString& fileName) const;
Q_INVOKABLE QString tempFolderFilePath(const QString& fileName) const;
Q_INVOKABLE QString getImageData(const QString& fileName) const;
Q_INVOKABLE QString openFilePaht() const;
void setOpenFilePaht(const QString& filePaht);
private:
QTemporaryDir m_dir;
QByteArray fileChecksum(QFile& file) const;
QString m_openFilePath{""};
};
#endif // FILEIO_H
<|start_filename|>src/fileio.cpp<|end_filename|>
#include "fileio.h"
#include <QClipboard>
#include <QCryptographicHash>
#include <QDataStream>
#include <QDebug>
#include <QFile>
#include <QGuiApplication>
#include <QStandardPaths>
#include <QTextStream>
#include <QUrl>
QString
FileIO::read(const QString& filePath) const {
QFile file(toLocalFile(filePath));
if (!file.open(QIODevice::ReadOnly | QIODevice::Text))
return "";
QDataStream in(&file);
QString jsonFile;
QStringList binaries;
in >> jsonFile;
in >> binaries;
for (auto& binaryFileName : binaries) {
QFile binaryFile(tempFolderFilePath(binaryFileName));
if (binaryFile.open(QIODevice::WriteOnly)) {
QByteArray binaryData;
in >> binaryData;
binaryFile.write(QByteArray::fromBase64(binaryData));
binaryFile.close();
}
}
file.close();
return jsonFile;
}
bool
FileIO::write(
const QString& filePath,
const QString& jsonFile,
const QStringList& binariesPath) const {
QFile file(toLocalFile(filePath));
if (!file.open(QIODevice::WriteOnly | QIODevice::Text))
return false;
QDataStream out(&file);
out << jsonFile;
out << binariesPath;
for (auto& binaryFileName : binariesPath) {
QFile binaryFile(tempFolderFilePath(binaryFileName));
if (binaryFile.open(QIODevice::ReadOnly)) {
out << binaryFile.readAll().toBase64();
}
}
file.close();
return true;
}
QString
FileIO::toLocalFile(const QString& filePath) const {
if (filePath.left(7) != "file://")
return filePath;
return QUrl(filePath).toLocalFile();
}
QString
FileIO::toUrlFile(const QString& filePath) const {
if (filePath.left(7) == "file://")
return filePath;
return QUrl::fromLocalFile(filePath).toString();
}
bool
FileIO::fileExist(const QString& filePath) const {
return QFile::exists(toLocalFile(filePath));
}
void
FileIO::copyToClipboard(const QString& text) const {
QGuiApplication::clipboard()->setText(text);
}
QString
FileIO::getClipboard() const {
return QGuiApplication::clipboard()->text();
}
QString
FileIO::tempFolder() const {
if (m_dir.isValid()) {
return m_dir.path();
}
return "";
}
QString
FileIO::copyToTempFolder(const QString& path) const {
QFile file(toLocalFile(path));
QFileInfo in(file.fileName());
QString tempFileName{QString{"dpb"}
.append(QString::fromLatin1(fileChecksum(file)))
.append(".")
.append(in.suffix())};
QFileInfo out(tempFolder(), tempFileName);
file.copy(out.absoluteFilePath());
return tempFileName;
}
QString
FileIO::tempFolderFileUrl(const QString& fileName) const {
QFileInfo out(tempFolder(), fileName);
return toUrlFile(out.absoluteFilePath());
}
QString
FileIO::tempFolderFilePath(const QString& fileName) const {
QFileInfo out(tempFolder(), fileName);
return out.absoluteFilePath();
}
QString FileIO::getImageData(const QString &fileName) const
{
QFile binaryFile(fileName);
if (binaryFile.open(QIODevice::ReadOnly)) {
return binaryFile.readAll().toBase64();
}
return "";
}
QString FileIO::openFilePaht() const
{
return m_openFilePath;
}
void FileIO::setOpenFilePaht(const QString& filePaht)
{
m_openFilePath = filePaht;
m_openFilePath = m_openFilePath.replace("\\","/");
}
QByteArray
FileIO::fileChecksum(QFile& file) const {
if (file.open(QFile::ReadOnly)) {
QCryptographicHash hash(QCryptographicHash::Algorithm::Md5);
if (hash.addData(&file)) {
return hash.result().toHex();
}
}
return QByteArray();
}
<|start_filename|>src/main.cpp<|end_filename|>
#include "fileio.h"
#include "upnpmanager.h"
#include <QApplication>
#include <QQmlApplicationEngine>
#include <QQmlContext>
#include <QQuickStyle>
#include <QSplashScreen>
int
main(int argc, char* argv[]) {
QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling);
QApplication app(argc, argv);
app.setApplicationName("demon-presentation-board");
app.setOrganizationName("waterbear");
QQuickStyle::setStyle("material");
QLocale::setDefault(QLocale(QLocale::English, QLocale::UnitedStates));
UpnpManager upnpmanager;
upnpmanager.handleSearch = true;
QQmlApplicationEngine engine;
FileIO fileio;
if(argc>1){
fileio.setOpenFilePaht(QString(argv[1]));
}
engine.rootContext()->setContextProperty("fileio", &fileio);
engine.rootContext()->setContextProperty("upnp", &upnpmanager);
engine.load(QUrl(QStringLiteral("qrc:/src/qml/main.qml")));
if (engine.rootObjects().isEmpty())
return -1;
return app.exec();
}
<|start_filename|>src/qml/qmlHelper.js<|end_filename|>
function fileFormatFromPath(path){
return path.substring(
path.lastIndexOf(".")+1,
path.length)
}
function isImage(path){
var f = fileFormatFromPath(path)
return f==="png" || f==="jpeg" || f==="jpg" || f==="svg" || f==="gif"
}
function isMedia(path){
var f = fileFormatFromPath(path)
return f==="mp4" || f==="avi" || f==="mov" || f==="mkv" || f==="wmv"
}
function isAppFile(path){
var f = fileFormatFromPath(path)
return f==="dpb"
}
function isAcceptableForDrop(path){
return isImage(path) || isAppFile(path)
}
<|start_filename|>src/upnpmanager.cpp<|end_filename|>
#include "upnpmanager.h"
#include <QNetworkDatagram>
#include <QNetworkInterface>
#include <QTcpSocket>
const QHostAddress upnpAddress = QHostAddress{"172.16.17.32"};
const quint16 upnpPort = 1900;
static quint16 selectedPort=0;
QList<QNetworkInterface>
activeInterfaces() {
QList<QNetworkInterface> actives;
#if defined(_MSC_VER) // visual studio
const int
#else
constexpr const int
#endif
KRequirement = QNetworkInterface::IsUp | QNetworkInterface::IsRunning;
auto interfaces = QNetworkInterface::allInterfaces();
for (const auto& iface : interfaces) {
auto flags = iface.flags();
if (flags & QNetworkInterface::IsLoopBack)
continue;
if (!(flags & KRequirement) || !iface.isValid())
continue;
actives.append(iface);
}
return actives;
}
UpnpManager::UpnpManager() {
connect(
this,
&QUdpSocket::stateChanged,
[this](QAbstractSocket::SocketState state) {
if (state != QAbstractSocket::BoundState)
return;
for (auto interface : activeInterfaces()) {
joinMulticastGroup(upnpAddress, interface);
}
connect(this, &QUdpSocket::readyRead, this, [this]() {
while (hasPendingDatagrams()) {
QNetworkDatagram datagram = receiveDatagram();
this->handleMessage(datagram.data());
}
});
});
m_broadcastTimer.setSingleShot(false);
connect(&m_broadcastTimer, &QTimer::timeout, [this]() {
this->writeDatagram(
m_requestMessage, m_requestMessage.size(), upnpAddress, upnpPort);
});
bind(QHostAddress::AnyIPv4, upnpPort, QAbstractSocket::ShareAddress);
}
QString UpnpManager::urls()
{
QStringList urls;
auto interfaces = activeInterfaces();
for (auto& interface : interfaces) {
for (auto address : interface.addressEntries()) {
if (address.ip().protocol() != QAbstractSocket::IPv4Protocol)
continue;
auto url =
address.ip().toString().prepend("ws://").append(":")
.append(QString::number(selectedPort));
if (!urls.contains(url)) {
urls.push_back(url);
}
}
}
return urls.join("\",\"").prepend("[\"").append("\"]");
}
int UpnpManager::freePort()
{
QTcpSocket socket;
for(quint16 i=54321;i < 60000;i++){
if(socket.bind(QHostAddress::Any,i)){
socket.close();
selectedPort = i;
return i;
}
}
return -1;
}
void
UpnpManager::startDiscovery() {
m_broadcastTimer.start(1000);
}
void
UpnpManager::stopDiscovery() {
m_broadcastTimer.stop();
}
void UpnpManager::writeRespond()
{
QByteArray message = m_respondMessage.arg(urls()).toUtf8();
for (auto interface:activeInterfaces()) {
setMulticastInterface(interface);
this->writeDatagram(message, upnpAddress, upnpPort);
}
}
void
UpnpManager::handleMessage(QString message) {
if (handleSearch &&
message.endsWith("USER-AGENT:DemonPresentationBoard\r\n\r\n") &&
message.startsWith("M-SEARCH")) {
this->writeRespond();
} else if (
handleNotify &&
message.endsWith("USER-AGENT:DemonPresentationBoard\r\n\r\n") &&
message.startsWith("NOTIFY")) {
QString beginStr = "Location:";
QString endStr = "]";
auto begin = message.indexOf(beginStr) + beginStr.length();
newUrlListRecieved(
message.mid(begin, message.indexOf(endStr) - begin + 1));
}
}
<|start_filename|>src/upnpmanager.h<|end_filename|>
#ifndef UPNPMANAGER_H
#define UPNPMANAGER_H
#include <QTimer>
#include <QUdpSocket>
class UpnpManager : public QUdpSocket
{
Q_OBJECT
public:
UpnpManager();
Q_INVOKABLE QString urls();
Q_INVOKABLE int freePort();
Q_INVOKABLE void startDiscovery();
Q_INVOKABLE void stopDiscovery();
void writeRespond();
void handleMessage(QString message);
bool handleNotify = false;
bool handleSearch = false;
signals:
void newUrlListRecieved(QString urlList);
private:
QTimer m_broadcastTimer;
QByteArray m_requestMessage = {
"M-SEARCH * HTTP/1.1\r\nHOST: 172.16.31.10:1900\r\nMAN: "
"\"ssdp:discover\"\r\nMX: 1\r\nST: "
"urn:remote:service:dial:1\r\nUSER-AGENT:"
"DemonPresentationBoard\r\n\r\n"};
QString m_respondMessage = {
"NOTIFY * HTTP/1.1\r\nHOST: 172.16.31.10:1900\r\n"
"Location:%1\r\nUSER-AGENT:DemonPresentationBoard\r\n\r\n"};
};
#endif // UPNPMANAGER_H
<|start_filename|>src/qml/Elements/ElementHelper.js<|end_filename|>
var text = "text"
var image = "image"
var media = "media"
var browser = "browser"
var rectangle = "rectangle"
var circle = "circle"
var icon = "icon"
var bracket = "bracket"
var animation = "animation"
var arrow = "arrow"
var table = "table"
var chart = "chart"
function path(element){
switch(element){
case text:
return "qrc:/src/qml/Elements/ElementText.qml"
case image:
return "qrc:/src/qml/Elements/ElementImage.qml"
case media:
return "qrc:/src/qml/Elements/ElementMedia.qml"
case browser:
return "qrc:/src/qml/Elements/ElementImage.qml"
case rectangle:
return "qrc:/src/qml/Elements/ElementRectangle.qml"
case circle:
return "qrc:/src/qml/Elements/ElementCircle.qml"
case icon:
return "qrc:/src/qml/Elements/ElementIcon.qml"
case bracket:
return "qrc:/src/qml/Elements/ElementBracket.qml"
case animation:
return "qrc:/src/qml/Elements/ElementAnimation.qml"
case arrow:
return "qrc:/src/qml/Elements/ElementArrow.qml"
case table:
return "qrc:/src/qml/Elements/ElementTable.qml"
case chart:
return "qrc:/src/qml/Elements/ElementChart.qml"
}
}
| mshobeyri/Pl-illustrator |
<|start_filename|>assets/shaders/shader.frag<|end_filename|>
#version 450
#extension GL_ARB_separate_shader_objects : enable
// Fragment shader that uses a texture coordinate to sample from a texture
// uniform.
layout(location = 0) in vec2 textureCoord;
layout(set = 0, binding = 1) uniform texture2D backgroundTexture;
layout(set = 0, binding = 2) uniform sampler textureSampler;
layout(location = 0) out vec4 outColor;
void main() {
outColor = texture(sampler2D(backgroundTexture, textureSampler), textureCoord);
}
<|start_filename|>assets/shaders/shader.vert<|end_filename|>
#version 450
#extension GL_ARB_separate_shader_objects : enable
// Vertex shader that applies a uniform matrix transformation to the position
// and directly copies the input texture coordinate to the following fragment
// shader.
layout(location = 0) in vec2 position2D;
layout(location = 1) in vec2 textureCoordInput;
layout(location = 0) out vec2 textureCoordOutput;
layout(set = 0, binding = 0) uniform Transform {
mat4 transform;
};
out gl_PerVertex {
vec4 gl_Position;
};
void main() {
textureCoordOutput = textureCoordInput;
gl_Position = transform * vec4(position2D, 0.0, 1.0);
}
| Schuwi/ampli-Fe |
<|start_filename|>load_test/package.json<|end_filename|>
{
"name": "load_test",
"version": "1.0.0",
"description": "Perform load test of ES modules and CommonJS",
"author": "<NAME>, Amazon Web Services",
"license": "MIT-0",
"devDependencies": {
"artillery": "^1.7.9"
}
}
<|start_filename|>src/package.json<|end_filename|>
{
"name": "aws-lambda-es-module-performance-benchmark",
"type": "module",
"version": "1.0.0",
"description": "Lambda ES Module performance benchmarking",
"dependencies": {
"@aws-sdk/client-secrets-manager": "^3.36.1",
"@aws-sdk/client-ssm": "^3.36.1",
"aws-xray-sdk-core": "^3.3.3"
},
"repository": {
"type": "git",
"url": "https://github.com/aws-samples/aws-lambda-es-module-performance-benchmark"
},
"author": "<NAME>, Amazon Web Services",
"license": "MIT-0",
"homepage": "https://aws.amazon.com/blogs/compute/using-node-js-es-modules-and-top-level-await-in-aws-lambda"
}
| buithaibinh/aws-lambda-es-module-performance-benchmark |
<|start_filename|>Dockerfile<|end_filename|>
FROM php:7.1.27-fpm-stretch
LABEL maintainer="<NAME> <<EMAIL>>"
ENV php_conf /usr/local/etc/php/php.ini
ENV fpm_conf /usr/local/etc/php/php-fpm.conf
ENV fpm_conf_dir /usr/local/etc/php-fpm.d/
RUN apt-get update \
&& apt-get install -y autoconf pkg-config libssl-dev
RUN docker-php-ext-install bcmath
RUN docker-php-ext-install sockets
RUN pecl install mongodb-1.2.2 \
&& docker-php-ext-enable mongodb
RUN apt-get update \
&& apt-get install -y libpq-dev \
&& docker-php-ext-install pdo pdo_pgsql
RUN apt-get update \
&& apt-get install -y libzip-dev zip \
&& docker-php-ext-configure zip --with-libzip \
&& docker-php-ext-install zip pcntl
RUN apt-get update \
&& apt-get install -y nginx supervisor cron
RUN mkdir /code
RUN useradd --no-create-home nginx
# tweak php-fpm config
COPY php/php.ini ${php_conf}
COPY php/www.conf.default ${fpm_conf_dir}/www.conf
COPY php/pools/pool-1.conf ${fpm_conf_dir}/pool-1.conf
COPY php/pools/pool-2.conf ${fpm_conf_dir}/pool-2.conf
COPY php/pools/pool-3.conf ${fpm_conf_dir}/pool-3.conf
COPY nginx/nginx.conf /etc/nginx/nginx.conf
COPY nginx/php.conf /etc/nginx/php.conf
COPY nginx/host.conf /etc/nginx/conf.d/default.conf
# add cron runner script
COPY cron.sh /cron.sh
COPY supervisord.conf /etc/supervisor/supervisord.conf
# install cronitor cli
WORKDIR /tmp
RUN curl -sOL https://cronitor.io/dl/cronitor-stable-linux-amd64.tgz
RUN tar xvf cronitor-stable-linux-amd64.tgz -C /usr/bin/
RUN rm cronitor-stable-linux-amd64.tgz
WORKDIR /code
EXPOSE 443 80
CMD /usr/bin/supervisord -n -c /etc/supervisor/supervisord.conf
| Edipyanik/docker-nginx-php |
<|start_filename|>Makefile<|end_filename|>
VERSION=$$(cat VERSION)
default: test
deep_clean: clean
(rm -fr ./target ; rm -fr ./project/project ; rm -fr ./project/target) || echo "it's clean"
clean:
sbt clean
fmt:
sbt soteriaCheckScalaFmtRun
publishLocal:
sbt 'set isSnapshot := true' publishLocal
publish: test publish_only
publish_only:
git tag -a $(VERSION) -m $(VERSION)
git push origin $(VERSION)
have_right_version:
cat ./project/soteria.sbt | grep `cat ./VERSION | cut -d "v" -f 2` && \
echo "Plugin have right version!"
# https://www.scala-sbt.org/1.x/docs/Testing-sbt-plugins.html
test_plugin:
sbt '; set isSnapshot := true ; scripted'
# https://stackoverflow.com/a/42430476
test_plugin_one:
sbt "; set isSnapshot := true ; scripted $(TEST_NAME)"
test_coverage_run:
sbt ";clean; coverage; test; coverageReport"
open ./target/scala-2.12/sbt-1.0/scoverage-report/index.html
test_coverage:
sbt ";clean; coverage; test; coverageReport; coveralls"
check_style:
sbt soteriaCheckScalaFmt
unit_test:
sbt ";clean; test"
test: deep_clean check_style publishLocal unit_test test_plugin
mutator_test:
export SBT_OPTS="-XX:+CMSClassUnloadingEnabled -Xmx4G"
sbt 'set logLevel in Test := Level.Error' 'set parallelExecution in Test := true' 'set soteriaSoftOnCompilerWarning := true' stryker
mutator_open_results:
open `find ./target/stryker4s* -type f -iname "*index.html"`
mutator_test_run: mutator_test mutator_open_results
| leobenkel/Soteria |
<|start_filename|>test/bundle/run-default-parameters.js<|end_filename|>
require('../../example/src/features/default-parameters')('Bruno');
<|start_filename|>test/bundle/run-classes.js<|end_filename|>
require('../../example/src/features/classes')();
<|start_filename|>index.js<|end_filename|>
'use strict';
var through = require('through')
, compile = require('./compile')
, crypto = require('crypto')
, path = require('path')
, runtime = require.resolve(require('traceur').RUNTIME_PATH)
, cache = {};
function getHash(data) {
return crypto
.createHash('md5')
.update(data)
.digest('hex');
}
/**
* Compile function, exposed to be used from other libraries, not needed when using es6ify as a transform.
*
* @name es6ify::compileFile
* @function
* @param {string} file name of the file that is being compiled to ES5
* @param {string} src source of the file being compiled to ES5
* @return {string} compiled source
*/
function compileFile(file, src) {
var compiled;
compiled = compile(file, src, exports.traceurOverrides);
if (compiled.error) throw new Error(compiled.error);
return compiled.source;
}
function es6ify(filePattern) {
filePattern = filePattern || /\.js$/;
return function (file) {
// Don't es6ify the traceur runtime
if (file === runtime) return through();
if (!filePattern.test(file)) return through();
var data = '';
return through(write, end);
function write (buf) { data += buf; }
function end () {
var hash = getHash(data)
, cached = cache[file];
if (!cached || cached.hash !== hash) {
try {
cache[file] = { compiled: compileFile(file, data), hash: hash };
} catch (ex) {
this.emit('error', ex);
return this.queue(null);
}
}
this.queue(cache[file].compiled);
this.queue(null);
}
};
}
/**
* The es6ify transform to be used with browserify.
*
* #### Example
*
* `browserify().transform(es6ify)`
*
* @name es6ify
* @function
* @return {function} function that returns a `TransformStream` when called with a `file`
*/
exports = module.exports = es6ify();
/**
* Configurable es6ify transform function that allows specifying the `filePattern` of files to be compiled.
*
* @name es6ify::configure
* @function
* @param {string=} filePattern (default: `/\.js$/) pattern of files that will be es6ified
* @return {function} function that returns a `TransformStream` when called with a `file`
*/
exports.configure = es6ify;
/**
* The traceur runtime exposed here so it can be included in the bundle via:
*
* `browserify.add(es6ify.runtime)`
*
* ### Note
*
* The runtime is quite large and not needed for all ES6 features and therefore not added to the bundle by default.
* See [this comment](https://github.com/google/traceur-compiler/issues/1007#issuecomment-42837067) for details.
*
* @name e6ify::runtime
*/
exports.runtime = runtime;
exports.compileFile = compileFile;
/**
* Allows to override traceur compiler defaults.
*
* ### Example
*
* In order to support async functions (`async`/`await`) do:
*
* `es6ify.traceurOverrides = { asyncFunctions: true }`
*
* @name es6ify::traceurOverrides
*/
exports.traceurOverrides = {};
<|start_filename|>test/transform.js<|end_filename|>
'use strict';
/*jshint asi: true */
var test = require('tap').test
, fs = require('fs')
, path = require('path')
, through = require('through')
, convert = require('convert-source-map')
, compile = require('../compile')
, proxyquire = require('proxyquire')
test('transform adds sourcemap comment and uses cache on second time', function (t) {
t.plan(3);
var data = '';
var compiles = 0;
function trackingCompile() {
compiles++;
var args = [].slice.call(arguments);
return compile.apply(this, args);
}
var es6ify = proxyquire('..', { './compile' : trackingCompile } )
var file = path.join(__dirname, '../example/src/features/iterators.js');
// first time
fs.createReadStream(file)
.pipe(es6ify(file))
.on('error', console.error)
.pipe(through(write));
// second time
fs.createReadStream(file)
.pipe(es6ify(file))
.on('error', console.error)
.pipe(through(write, end));
function write (buf) { data += buf; }
function end () {
var sourceMap = convert.fromSource(data).toObject();
// Traceur converts all \s to /s so we need to do so also before comparing
var fileConverted = file.replace(/\\/g, '/');
var sourceRootConverted = path.join(path.dirname(file), path.sep).replace(/\\/g, '/');
t.deepEqual(
sourceMap
, { version: 3,
file: fileConverted,
sources: [ fileConverted, '@traceur/generated/TemplateParser/1' ],
names: [],
mappings: sourceMap.mappings,
sourceRoot: sourceRootConverted,
sourcesContent: [
'module.exports = function () {\n' +
' for (let element of [1, 2, 3]) {\n' +
' console.log(\'element:\', element);\n' +
' }\n' +
'};\n',
'\n for (var $__placeholder__0 =\n' +
' $__placeholder__1[\n' +
' $traceurRuntime.toProperty(Symbol.iterator)](),\n' +
' $__placeholder__2;\n' +
' !($__placeholder__3 = $__placeholder__4.next()).done; ) {\n' +
' $__placeholder__5;\n' +
' $__placeholder__6;\n' +
' }'
] }
, 'adds sourcemap comment including original source'
);
t.ok(sourceMap.mappings.length);
t.equal(compiles, 1, 'compiles only the first time');
}
});
test('transform does not add sourcemaps if traceurOverrides.sourceMaps is false', function (t) {
t.plan(1);
var data = '';
var es6ify = require('..');
var file = path.join(__dirname, '../example/src/features/iterators.js');
es6ify.traceurOverrides = { sourceMaps: false };
fs.createReadStream(file)
.pipe(es6ify(file))
.on('error', function (e) { throw e; })
.pipe(through(write, end));
function write (buf) { data += buf; }
function end () {
var sourceMap = convert.fromSource(data);
t.ok(sourceMap === null);
}
})
<|start_filename|>example/src/features/default-parameters.js<|end_filename|>
module.exports = function (name, codes = 'JavaScript', livesIn = 'USA') {
console.log('name: %s, codes: %s, lives in: %s', name, codes, livesIn);
};
<|start_filename|>test/errors.js<|end_filename|>
'use strict';
/*jshint asi: true */
var test = require('tap').test;
var browserify = require('browserify');
var es6ify = require('..');
var path = require('path');
test('\nsyntax error', function (t) {
browserify()
.transform(es6ify)
.require(__dirname + '/bundle/syntax-error.js', { entry: true })
.bundle(function (err, src) {
t.similar(err.message, /bundle\/syntax-error\.js\:1\:10\: Unexpected token \(/, 'returns the syntax error');
t.end();
});
})
test('\ncompiling block-scope with blockBinding: false', function (t) {
es6ify.traceurOverrides = { blockBinding: false };
browserify()
.transform(es6ify)
.require(__dirname + '/../example/src/features/block-scope.js', { entry: true })
.bundle(function (err, src) {
t.similar(err.message, /Unexpected token let/, 'returns error indicating that let is not supported')
es6ify.traceurOverrides = null;
t.end();
});
});
<|start_filename|>example/src/features/iterators.js<|end_filename|>
module.exports = function () {
for (let element of [1, 2, 3]) {
console.log('element:', element);
}
};
<|start_filename|>example/src/features/spread-operator.js<|end_filename|>
module.exports = function (numbers) {
function add(x, y) {
console.log('%d + %d = %d', x, y, x + y);
}
add(...numbers);
};
<|start_filename|>example/src/make-monster.js<|end_filename|>
class Character {
constructor(x, y, name) {
this.x = x;
this.y = y;
}
attack(character) {
console.log('attacking', character);
}
}
class Monster extends Character {
constructor(x, y, name) {
super(x, y);
this.name = name;
this.health_ = 100;
}
attack(character) {
super.attack(character);
}
get isAlive() { return this.health > 0; }
get health() { return this.health_; }
set health(value) {
if (value < 0) throw new Error('Health must be non-negative.');
this.health_ = value;
}
}
module.exports = function (x, y, name) {
return new Monster(x, y, name);
};
<|start_filename|>test/bundle.js<|end_filename|>
'use strict';
/*jshint asi: true */
var test = require('tap').test;
var browserify = require('browserify');
var vm = require('vm');
var es6ify = require('..');
var compile = require('../compile');
var format = require('util').format;
[ [ 'run-destructuring' , [ 'hello, world' ], true ]
//, [ 'run-block-scope' , [ 'tmp is undefined: true' ] , false ]
, [ 'run-default-parameters', [ 'name: Bruno, codes: JavaScript, lives in: USA' ] ]
, [ 'run-rest-parameters' , ['list fruits has the following items', 'apple', 'banana' ] ]
, [ 'run-classes' , [ 'An instance of Foo says hi from its .toString()!' ], true ]
, [ 'run-spread-operator' , [ '3 + 4 = 7' ], true ]
, [ 'run-combined'
, [ 'hello, world'
// , 'tmp is undefined: true'
, 'An instance of Foo says hi from its .toString()!'
, 'name: Bruno, codes: JavaScript, lives in: USA'
, 'list fruits has the following items', 'apple', 'banana'
, '3 + 4 = 7'
]
, true
]
].forEach(function (row) {
var filename = row[0];
var expectedLogs = row[1];
var useRuntime = !!row[2];
var overrides = row[3] || {};
test('\nbundle ' + (useRuntime ? 'with' : 'without') + ' traceur runtime - ' + filename, function (t) {
t.plan(expectedLogs.length)
es6ify.traceurOverrides = overrides;
var bfy = browserify();
if (useRuntime) bfy.add(es6ify.runtime);
bfy
.transform(es6ify)
.require(__dirname + '/bundle/' + filename + '.js', { entry: true })
.bundle(function (err, src) {
if (err) t.fail(err);
src = 'window=this;'+src;
vm.runInNewContext(src, {
window: {},
console: { log: log }
});
t.end()
});
function log () {
var args = [].slice.call(arguments);
var output = format.apply(null, args);
t.equal(output, expectedLogs.shift());
}
})
})
<|start_filename|>example/src/features/classes.js<|end_filename|>
class Foo {
toString() {
return 'An instance of Foo says hi from its .toString()!';
}
}
module.exports = function () {
console.log(new Foo().toString());
};
<|start_filename|>example/src/features/rest-parameters.js<|end_filename|>
module.exports = function printList(listname, ...items) {
console.log('list %s has the following items', listname);
items.forEach(function (item) { console.log(item); });
};
<|start_filename|>test/bundle/run-destructuring.js<|end_filename|>
require('../../example/src/features/destructuring')();
<|start_filename|>example/src/features/block-scope.js<|end_filename|>
module.exports = function () {
var a = 2
, b = 3
;
{
let tmp = a;
a = b;
b = tmp;
}
console.log('tmp is undefined: ', typeof tmp == 'undefined');
};
<|start_filename|>test/bundle/run-spread-operator.js<|end_filename|>
require('../../example/src/features/spread-operator')([3, 4]);
<|start_filename|>example/src/features/destructuring.js<|end_filename|>
module.exports = function () {
var [a, [b], c, d] = ['hello', [', ', 'junk'], ['world']];
console.log(a + b + c); // hello world
};
<|start_filename|>test/bundle/run-block-scope.js<|end_filename|>
require('../../example/src/features/block-scope')();
<|start_filename|>example/src/features/generators.js<|end_filename|>
// A binary tree class.
function Tree(left, label, right) {
this.left = left;
this.label = label;
this.right = right;
}
// A recursive generator that iterates the Tree labels in-order.
function* inorder(t) {
if (t) {
yield* inorder(t.left);
yield t.label;
yield* inorder(t.right);
}
}
// Make a tree
function make(array) {
// Leaf node:
if (array.length == 1) return new Tree(null, array[0], null);
return new Tree(make(array[0]), array[1], make(array[2]));
}
module.exports = function () {
let tree = make([[['a'], 'b', ['c']], 'd', [['e'], 'f', ['g']]]);
console.log('generating tree labels in order:');
// Iterate over it
for (let node of inorder(tree)) {
console.log(node); // a, b, c, d, ...
}
};
<|start_filename|>example/src/features/index.js<|end_filename|>
module.exports = {
blockScope : require('./block-scope')
, destructuring : require('./destructuring')
, classes : require('./classes')
, generators : require('./generators')
, iterators : require('./iterators')
, defaultParameters : require('./default-parameters')
, restParameters : require('./rest-parameters')
, spreadOperator : require('./spread-operator')
, arrowFunctions : require('./arrow-functions')
};
<|start_filename|>example/public/index.html<|end_filename|>
<!DOCTYPE html>
<html>
<head>
<meta charset=utf-8 />
<title>Browserified ES6 Bundle</title>
</head>
<body>
<p>Open dev tools and look in console ;)</p<>
<p>Make sure you have source maps enabled in settings.</p>
<p>Most examples are derrived from
<a href="https://code.google.com/p/traceur-compiler/wiki/LanguageFeatures">
traceur language features
</a>
.</p>
</body>
<script type="text/javascript" src="./js/bundle.js"></script>
</html>
<|start_filename|>package.json<|end_filename|>
{
"name": "es6ify",
"version": "1.6.0",
"description": "browserify v2 transform to compile JavaScript.next (ES6) to JavaScript.current (ES5) on the fly.",
"main": "es6ify.js",
"scripts": {
"docme": "JSDOC_GITHUBIFY_BRANCH=master docme README.md -- --configure .jsdocrc",
"test": "tap test/*.js"
},
"repository": "thlorenz/es6ify",
"homepage": "https://thlorenz.github.io/es6ify/",
"dependencies": {
"through": "~2.2.7",
"traceur": "0.0.79",
"xtend": "~2.2.0"
},
"devDependencies": {
"tap": "~0.4.0",
"proxyquire": "~0.4.0",
"convert-source-map": "~0.4.1",
"browserify": "~5.12.1"
},
"keywords": [
"traceur",
"JavaScript.next",
"ES6",
"browserify",
"browserify-transform",
"v2",
"js",
"plugin",
"transform"
],
"author": {
"name": "<NAME>",
"email": "<EMAIL>",
"url": "http://thlorenz.com"
},
"license": "MIT",
"engine": {
"node": ">=0.8"
}
}
<|start_filename|>test/bundle/run-rest-parameters.js<|end_filename|>
require('../../example/src/features/rest-parameters')('fruits', 'apple', 'banana');
| baileyparker/es6ify |
<|start_filename|>include/arith.h<|end_filename|>
/* $Id: arith.h 6 2007-01-22 00:45:22Z drhanson $ */
extern int Arith_max(int x, int y);
extern int Arith_min(int x, int y);
extern int Arith_div(int x, int y);
extern int Arith_mod(int x, int y);
extern int Arith_ceiling(int x, int y);
extern int Arith_floor (int x, int y);
| IdiotForces/cii |
<|start_filename|>common/constants.js<|end_filename|>
export const PROJECT_LIST = '/common/constants.js';
export const PROJECT_DETAIL = '/common/constants.js';
<|start_filename|>public/styles/style.css<|end_filename|>
body {
font: 16px microsoft yahei, simsun, sans-serif;
}
*{
font-size: 100%; margin:0;padding: 0;
}
a{
text-decoration:none;
}
ul,li{
list-style: none
}
#header{
height: 80px;
line-height: 80px;
color: #fff;
margin-bottom: 25px;
overflow: hidden;
background: #2e74c3;
box-shadow: 0px 0px 10px 0px #000;
}
.header-name{
width: 1200px;
margin:0 auto;
height: 80px;
position: relative;
}
#header h1 {
float: left;
padding-right: 20px;
margin-top: 23px;
line-height: 34px;
border-right: 4px solid #fff;
display: inline-block;
}
.create-btn{
margin-left: 40px;
color:#fff;
padding: 10px 20px;
background-color:#ff9a14 ;
height: 30px;
line-height: 30px;
float: right;
font-size: 20px;
}
.act{
-webkit-transition-property: transform;
transition-property: transform;
-webkit-transition: 0.3s;
transition: 0.3s;
}
.act:hover{
transform:scale(1.2);
-webkit-transform:scale(1.2);
-moz-transform:scale(1.2);
-o-transform:scale(1.2);
-ms-transform:scale(1.2);
}
.search-tab{
float: right;
height: 50px;
margin-top: 15px
}
.search-btn{
margin-left: 10px;
background-color: #fff;
padding: 8px 14px;
height: 20px;
color: #333;
line-height: 20px;
margin-top: 7px;
float: right;
cursor: pointer;
}
.search{
margin-left: 10px;
background-color: #fff;
padding: 8px 14px;
height: 20px;
color: #333;
line-height: 20px;
margin-top: 4px;
float: right;
width: 500px;
border: 3px solid #cbcbcb;
}
.container{
width: 1200px;
margin:0 auto;
}
.searchlist li{
height: 80px;
width: 396px;
box-sizing: border-box;
padding: 10px;
display: inline-block;
}
.item{
height:100px;
width: 380px;
background-color: #75caf4;
display: block;
cursor: pointer;
overflow: hidden;
color:#fff;
padding: 10px;
line-height: 40px;
box-sizing: border-box;
}
.item>p{
width: 360px;
height: 50px;
line-height: 50px;
}
.view{
color:#fff;
width: 120px;
display: inline-block;
float: left;
text-align: center;
}
.name{
height: 80px;
margin: 10px 0;
}
.apiInfo{
font-size:25px;font-weight:bold;color:#666;line-height:60px;height:60px;
}
.namespan{
height: 80px;
width: 200px;
line-height: 80px;
background-color: #83b157;
display: inline-block;
color:#fff;
text-align: center;
font-size: 25px
}
.ti1{
float: right;
width: 1000px;
background-color: #454545;
height: 80px
}
.nameinput{
height: 80px;
border: 0px solid transparent;
background-color: transparent;
font-size: 20px;
padding: 25px 20px;
width: 100%;
box-sizing: border-box;
display: inline-block;
color: #fff;
outline: none
}
.edit{
width: 1200px;
height:700px;
border: 3px solid #cbcbcb;
box-sizing: border-box;
margin-top: 40px
}
.edit_left{
width: 590px;
padding: 10px 20px;
display: inline-block;
box-sizing: border-box;
}
.edit_left em{
font-size: 25px;
text-align: center;
display: block;
line-height: 50px
}
.notice{
font-size: 12px;
line-height: 50px;
font-style: normal;
color:#DF5B6D;
}
.string-edit{
display: block;
width: 490px;
height: 600px;
margin:0;
padding: 5px;
outline: 1px solid #ccc;
border: 1px solid #ccc;
}
.json-show{
width: 100%;
height: 610px;
margin: 0;
/*padding: 5px;*/
box-sizing: border-box;
float: right;
}
.info p{
font-size: 25px;
padding: 5px
}
.info-edit{
display: block;
width: 1190px;
height: 100px;
margin:0;
padding: 5px;
}
.ok-btn{
font-size: 18px;
padding: 22px 75px;
background: #ff9a14;
margin:50px auto;
color:#fff;
text-align: center;
box-sizing: border-box;
display: inline-block;
cursor: pointer;
}
#view-btn{
height: 30px;
width: 100px;
position: absolute;
margin-left: -76px;
background-color: #83b157;
border: none;
border-radius: 4;
color:#fff;
cursor: pointer;
margin-top: 20px;
outline: none
}
#view-btn:active{
background-color: #5B8C2C;
}
pre {outline: 2px solid #ccc; padding: 5px; margin: 5px; }
.string { color: green; }
.number { color: darkorange; }
.boolean { color: blue; }
.null { color: magenta; }
.key { color: red; }
.bottom{
margin-top: 100px;
padding:0 50px ;
box-sizing: border-box;
}
.morelink{
color: #fff;
background-size: 30px;
background-repeat: no-repeat;
background-position: 0;
height: 30px;
display: inline-block;
padding-left: 40px;
line-height: 30px;
margin-right: 100px
}
.guide{
background-image: url(../images/guide.png);
}
.github{
background-image: url(../images/github.png);
}
#api-introduce{
width: 1180px;
height: 200px;
padding: 10px
}
#warn{
color:#f10027;
font-size: 16px;
height: 20px;
line-height: 20px;
display: none;
position: absolute;
}
.K{
}
<|start_filename|>models/apis.js<|end_filename|>
const db = require('../db')
const Sequelize = require('sequelize')
const Apis = db.define('apis', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true
},
name: {
type: Sequelize.STRING,
allowNull: false
},
desc: {
type: Sequelize.STRING
},
content: {
type: Sequelize.STRING,
allowNull: false
},
project_id: {
type: Sequelize.UUID,
allowNull: false
},
url: {
type: Sequelize.STRING,
allowNull: false
},
state: {
type: Sequelize.INTEGER,
allowNull: true
}
})
function addApi(api) {
return Apis.create({
name: api.name,
desc: api.desc,
content: api.content,
project_id: api.projectId,
url: api.url,
state: 1
})
}
function selectOneApi(id) {
return Apis.findOne({
where: {
id: id,
state: 1
},
raw: true
})
}
function findOneApiByUrl (name) {
return Apis.findOne({
where: {
url: name,
state: 1
},
raw: true
})
}
function selectAllApi(id) {
return Apis.findAll({
where: {
project_id: id,
state: 1
},
raw: true
})
}
function selectApiByCondiction (condiction) {
return Apis.findAll({
where: condiction,
raw: true
})
}
function deleteApi(id) {
return Apis.update({
state: 0
}, {
where: { id: id }
})
// return Apis.destroy({
// where: { id: id }
// })
}
function deleteProjectApis (projectId) {
return Apis.update({
state: 0
}, {
where: { project_id: projectId }
})
// return Apis.destroy({
// where: { project_id: projectId }
// })
}
function updateApi (content, condition) {
return Apis.update(content, condition)
}
module.exports = {
addApi,
selectAllApi,
selectOneApi,
deleteApi,
updateApi,
deleteProjectApis,
selectApiByCondiction,
findOneApiByUrl
}
<|start_filename|>public/styles/main.css<|end_filename|>
a:hover, a:visited, a:link, a:active {
text-decoration: none;
}
.header {
height: 60px;
background-color: #5D4384;
width: 100%;
color: #fff;
padding: 0 40px;
line-height: 60px;
}
.header .title {
font-size: 20px;
color: #fff;
}
.header .title-icon {
font-size: 30px;
vertical-align: sub;
}
.crumb {
padding: 8px 50px;
}
.header .github{
content: '';
width: 30px;
height: 30px;
background-size: contain;
display: inline-block;
background-repeat: no-repeat;
background-image: url(../images/github.png);
float: right;
margin-top: 15px;
}
.header .support {
color: #fff;
background-size: 30px;
background-repeat: no-repeat;
background-position: 0;
height: 30px;
display: inline-block;
padding-left: 40px;
line-height: 30px;
margin-right: 100px;
}
.edit-bar {
background-color: #555;
color: #FFF;
border-bottom: solid 1px #F3F3F3;
font-size: 78%;
padding: 10px 0 10px 20px;
}
.edit-area {
width: 100%;
min-height: 500px;
}
.edit-remark {
min-height: 200px;
margin-bottom: 15px;
}
.mb-15 {
margin-bottom: 15px;
}
<|start_filename|>models/projects.js<|end_filename|>
const db = require('../db')
const Sequelize = require('sequelize')
const Projects = db.define('projects', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true
},
name: {
type: Sequelize.STRING,
allowNull: false,
unique: true
},
desc: {
type: Sequelize.STRING
},
url: {
type: Sequelize.STRING,
allowNull: false
},
state: {
type: Sequelize.INTEGER,
allowNull: true
}
})
function addProject(project) {
return Projects.create({
name: project.name,
desc: project.desc,
url: project.url
})
}
function selectOneProject (id) {
return Projects.findOne({
where: { id: id },
raw: true
})
}
function selectAllProject() {
return Projects.findAll({ raw: true })
}
function deleteProject(id) {
return Projects.destroy({
where: { id: id }
})
}
module.exports = {
addProject,
selectAllProject,
deleteProject,
selectOneProject
}
<|start_filename|>app.js<|end_filename|>
/**
* Module dependencies.
*/
var express = require('express')
, routes = require('./routes/index')
, bodyParser = require('body-parser')
, partials = require('express-partials')
, path = require('path')
, app = express()
, fs = require('fs')
, path = require('path')
, cors = require('cors');
try {
fs.accessSync(`${process.cwd()}/mock_db/app.sqlite`, fs.F_OK)
console.log('connecting the mock database')
} catch (e) {
fs.mkdirSync('mock_db')
fs.copyFileSync(path.resolve(__dirname, `./db/app.sqlite`), `${process.cwd()}/mock_db/app.sqlite`, function(err) {
if (err) {
console.log('create mock database failed.')
} else {
console.log('create mock database successfully.')
}
})
}
// Configuration
app.use(bodyParser.urlencoded({ extended: true }))
app.use(bodyParser.json())
app.use(function(req, res, next) {
res.header('Access-Control-Allow-Credentials', 'true');
res.header("Access-Control-Allow-Origin", "http://localhost:8888");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Access-Control-Allow-Origin");
next();
});
app.use(express.static(__dirname + '/public'));
// app.use(express.static(__dirname + '/views'));
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'ejs');
app.use(partials());
// app.use(express.Router(routes)); //自动解析url
routes(app);
module.exports = app;
// console.log("Express server listening on port %d in %s mode", app.address().port, app.settings.env);
<|start_filename|>db.js<|end_filename|>
'use strict'
const Sequelize = require('sequelize')
const path = require('path')
const db = new Sequelize('app', 'app', 'app', {
dialect: 'sqlite',
sync: { force: true },
storage: path.resolve(__dirname, './', 'mock_db/app.sqlite')
})
db
.authenticate()
.then(() => {
console.log('Connection has been established successfully.')
})
.catch(err => {
console.error('Unable to connect to the database:', err)
})
module.exports = db
<|start_filename|>routes/index.js<|end_filename|>
module.exports = function(app) {
app.use('/', require('./list'));
app.use('/', require('./detail'));
app.use(function(req, res, next) {
var err = new Error('Not Found')
err.status = 404;
next(err);
});
app.use(function(err, req, res, next){
// res.status(404).send('页面不存在,正在为您跳转到首页~')
res.redirect('/list');
})
};
<|start_filename|>common/utils.js<|end_filename|>
var fs = require('fs')
const PROJECT_LIST = './json/projectList.json'
const PROJECT_DETAIL = './json/projectDetail.json'
/**
* 创建多层目录
*
* @param {any} url
* @param {any} mode
* @param {any} cb
*/
var mkdirSync = (url, mode, cb) => {
var path = require("path"),
arr = url.split("/");
mode = mode || 0755;
cb = cb || function () {};
if (arr[0] === ".") { //处理 ./aaa
arr.shift();
}
if (arr[0] == "..") { //处理 ../ddd/d
arr.splice(0, 2, arr[0] + "/" + arr[1])
}
function inner(cur) {
if (!fs.existsSync(cur)) { //不存在就创建一个
fs.mkdirSync(cur, mode)
}
if (arr.length) {
inner(cur + "/" + arr.shift());
} else {
cb();
}
}
arr.length && inner(arr.shift());
}
var getReadPromise = (file) => {
return new Promise((resolve, reject) => {
var ret = fs.readFileSync(file);
ret ? resolve(ret) : reject(ret);
});
}
var getProjectList = () => {
return getReadPromise(PROJECT_LIST);
}
var getProjectDetail = () => {
return getReadPromise(PROJECT_DETAIL);
}
var writeProjectList = (content) => {
fs.writeFileSync(PROJECT_LIST, content);
}
var writeProjectDetail = (content) => {
fs.writeFileSync(PROJECT_DETAIL, content);
}
var devareFolder = function(path) {
var files = [];
if( fs.existsSync(path) ) {
files = fs.readdirSync(path);
files.forEach(function(file,index){
var curPath = path + "/" + file;
if(fs.statSync(curPath).isDirectory()) { // recurse
devareFolderRecursive(curPath);
} else { // devare file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
};
module.exports = {
mkdirSync,
getReadPromise,
devareFolder,
getProjectDetail,
getProjectList,
writeProjectDetail,
writeProjectList
}
<|start_filename|>routes/list.js<|end_filename|>
var express = require('express');
var fs = require('fs');
var path = require('path');
var router = express.Router();
var util = require('../common/utils');
var project = require('../models/projects')
var api = require('../models/apis')
//接口首页
router.get('/list', (req, res) => {
project.selectAllProject().then(list => {
if (list.length > 0) {
res.render('project_list', {
haveList: true,
dataList: list,
page: 'list'
})
} else {
res.render('project_list', {
haveList: false,
dataList: [],
page: 'list'
})
}
})
.catch((response) => {
res.render('project_list', {
haveList: false,
dataList: [],
page: 'list'
})
})
})
router.post('/list/create', (req, res) => {
var name = req.body.name;
var url = req.body.url;
var desc = req.body.desc;
project.addProject(req.body).then(function () {
res.status(200).json({msg: '创建成功!'}).end()
})
})
router.post('/list/delete', (req, res) => {
var id = req.body.id;
api.deleteProjectApis(id)
project.deleteProject(id)
res.json({
code: 2000,
msg: '删除成功!'
})
})
router.get('/list/download/:projectName', (req, res) => {
var projectName = req.params.projectName;
var pathName = path.resolve(__dirname, '../json/');
var options = {
root: pathName,
}
res.sendFile(projectName, options, function(err){
console.log(err)
})
// res.download(pathName, projectName);
// res.download('')
})
module.exports = router;
<|start_filename|>routes/detail.js<|end_filename|>
var fs = require('fs')
var express = require('express')
var router = express.Router()
var util = require('../common/utils')
var project = require('../models/projects')
var api = require('../models/apis')
router.get('/detail/:projectId', (req, res) => {
var projectId = req.params.projectId
var selectProject = {}
project.selectOneProject(projectId).then(selectedProject => {
selectProject = project
api.selectAllApi(projectId).then(list => {
res.render('project_detail', {
haveList: true,
list: list,
projectUrl: selectedProject.url,
projectId: selectedProject.id,
page: 'list'
})
})
})
})
//存储json
router.post('/detail/save', (req, res) => {
var name = req.body.name.replace(/\s/g, ''),
url = req.body.url.replace(/\s/g, ''),
projectId = req.body.projectId.replace(/\s/g, ''),
projectName = req.body.projectName,
apiId = req.body.apiId,
desc = req.body.desc,
content = req.body.content
if (url && content) {
if (!apiId) {
api.addApi({
name: name,
desc: desc,
content: content,
projectId: projectId,
url: url
}).then(function() {
res.json({
success: true,
message: "保存成功"
})
})
} else {
api.updateApi({
name: name,
desc: desc,
content: content,
url: url
},{
where: {
id: apiId
}
}).then(function() {
res.json({
success: true,
message: "更新成功"
})
})
}
} else {
res.json({
success: false,
message: "名称或url不能为空"
})
}
})
//编辑接口页面
router.get('/detail/edit/:apiId', (req, res) => {
var apiId = req.params.apiId
if (!apiId) {
res.redirect('/')
} else {
api.selectOneApi(apiId)
.then(api => {
project.selectOneProject(api.project_id).then(project => {
var projectName = project.name
var projectId = project.id
res.render('create', {
isEdit: true,
api: api,
projectName: projectName,
projectId: projectId
})
})
})
}
})
//删除接口
router.post("/detail/delete", (req, res) => {
var id = req.body.id.replace(/\s/g, ""),
del = api.deleteApi(id)
del.then((response) => {
res.json({
code: 0,
msg : '删除成功!'
})
}).catch((e) => {
res.status(500).json({
msg: '删除出错!'
})
})
})
//创建接口页面
router.get('/create/:projectId', (req, res) => {
var projectId = req.params.projectId
project.selectOneProject(projectId).then(project => {
res.render('create', {
isEdit: false,
projectName: project.name,
projectId: project.id,
projectUrl: project.url,
})
})
})
// 查询
router.post('/detail/search', (req, res) => {
var url = req.body.url;
var projectId = req.body.projectId;
api.selectApiByCondiction({
url: {
$like: '%' + url + '%'
},
state: 1,
project_id: projectId
}).then(list => {
res.json(list)
})
})
//获取一个数据文件
router.all('/api/:apiId', (req, res) => {
var id = req.params.apiId
api.selectOneApi(id).then(api => {
res.json(JSON.parse(api.content))
}, () => {
res.status(404)
})
})
router.get('*', (req, res) => {
if (req.url) {
api.findOneApiByUrl(req.url).then(data => {
if (data) {
res.json(JSON.parse(data.content))
}
})
}
})
module.exports = router; | zouyifeng/node-quick-mock |
<|start_filename|>ExtentReports/ExtentReports/Model/Log.cs<|end_filename|>
using System;
using AventStack.ExtentReports.MarkupUtils;
using MongoDB.Bson;
namespace AventStack.ExtentReports.Model
{
[Serializable]
public class Log : IAddsMedia<Log>, IRunResult
{
public DateTime Timestamp { get; set; }
public Status Status { get; set; }
public ObjectId ObjectId { get; set; }
public int Sequence = 0;
public IMarkup Markup;
private ExtentTest _parent;
private ScreenCapture _screenCapture;
private string _details;
private Log()
{
Timestamp = DateTime.Now;
}
public Log(Test test) : this()
{
ParentModel = test;
}
public Log(ExtentTest extentTest) : this()
{
_parent = extentTest;
ParentModel = _parent.GetModel();
}
public string Details
{
get
{
if (_screenCapture != null)
return _details + _screenCapture.Source;
return _details;
}
set
{
_details = value;
}
}
public ScreenCapture ScreenCapture
{
get
{
return (ScreenCapture)_screenCapture;
}
set
{
_screenCapture = value;
_screenCapture.TestObjectId = ParentModel.ObjectId;
}
}
public Boolean HasScreenCapture()
{
return _screenCapture != null;
}
public Test ParentModel
{
get; private set;
}
public ExtentTest Parent
{
get
{
return _parent;
}
private set
{
_parent = value;
ParentModel = value.GetModel();
}
}
}
} | anshooarora/extentreports-.net |
<|start_filename|>firmware/examples/MAX17043_Simple.cpp<|end_filename|>
/******************************************************************************
MAX17043_Simple_Serial.cpp
SparkFun MAX17043 Example Code
<NAME> @ SparkFun Electronics
Original Creation Date: June 22, 2015
https://github.com/sparkfun/SparkFun_MAX17043_Particle_Library
This file demonstrates the simple API of the SparkFun MAX17043 Particle library.
Pair the Photon up to a SparkFun Photon Battery Shield
(https://www.sparkfun.com/products/13626), and away you go!
This example will print the gauge's voltage and state-of-charge (SOC) readings
to both serial (9600 baud) and out to the Particle cloud. Navigate to
https://api.particle.io/v1/devices/{DEVICE_ID}/voltage?access_token={ACCESS_TOKEN}
https://api.particle.io/v1/devices/{DEVICE_ID}/soc?access_token={ACCESS_TOKEN}
https://api.particle.io/v1/devices/{DEVICE_ID}/alert?access_token={ACCESS_TOKEN}
And read your Photon's battery charge from the Cloud!
Development environment specifics:
IDE: Particle Build
Hardware Platform: Particle Photon
SparkFun Photon Battery Shield
This code is released under the MIT license.
Distributed as-is; no warranty is given.
******************************************************************************/
#include "SparkFunMAX17043/SparkFunMAX17043.h" // Include the SparkFun MAX17043 library
double voltage = 0; // Variable to keep track of LiPo voltage
double soc = 0; // Variable to keep track of LiPo state-of-charge (SOC)
bool alert; // Variable to keep track of whether alert has been triggered
void setup()
{
Serial.begin(9600); // Start serial, to output debug data
// Set up Spark variables (voltage, soc, and alert):
Particle.variable("voltage", voltage);
Particle.variable("soc", soc);
Particle.variable("alert", alert);
// To read the values from a browser, go to:
// http://api.particle.io/v1/devices/{DEVICE_ID}/{VARIABLE}?access_token={ACCESS_TOKEN}
// Set up the MAX17043 LiPo fuel gauge:
lipo.begin(); // Initialize the MAX17043 LiPo fuel gauge
// Quick start restarts the MAX17043 in hopes of getting a more accurate
// guess for the SOC.
lipo.quickStart();
// We can set an interrupt to alert when the battery SoC gets too low.
// We can alert at anywhere between 1% - 32%:
lipo.setThreshold(20); // Set alert threshold to 20%.
}
void loop()
{
// lipo.getVoltage() returns a voltage value (e.g. 3.93)
voltage = lipo.getVoltage();
// lipo.getSOC() returns the estimated state of charge (e.g. 79%)
soc = lipo.getSOC();
// lipo.getAlert() returns a 0 or 1 (0=alert not triggered)
alert = lipo.getAlert();
// Those variables will update to the Spark Cloud, but we'll also print them
// locally over serial for debugging:
Serial.print("Voltage: ");
Serial.print(voltage); // Print the battery voltage
Serial.println(" V");
Serial.print("Alert: ");
Serial.println(alert);
Serial.print("Percentage: ");
Serial.print(soc); // Print the battery state of charge
Serial.println(" %");
Serial.println();
delay(500);
}
<|start_filename|>spark.json<|end_filename|>
{
"name": "SparkFunMAX17043",
"author": "<NAME> <<EMAIL>>",
"license": "MIT",
"version": "1.1.3",
"description": "A library to drive the MAX17043 LiPo fuel gauge."
}
| edalquist/SparkFun_MAX17043_Particle_Library |
<|start_filename|>test/ash_postgres_test.exs<|end_filename|>
defmodule AshPostgresTest do
use ExUnit.Case
end
<|start_filename|>test/support/test_repo.ex<|end_filename|>
defmodule AshPostgres.TestRepo do
@moduledoc false
use AshPostgres.Repo,
otp_app: :ash_postgres
def installed_extensions do
["uuid-ossp", "pg_trgm"]
end
def all_tenants do
Code.ensure_compiled(AshPostgres.MultitenancyTest.Org)
AshPostgres.MultitenancyTest.Org
|> AshPostgres.MultitenancyTest.Api.read!()
|> Enum.map(&"org_#{&1.id}")
end
end
<|start_filename|>test/support/repo_case.ex<|end_filename|>
defmodule AshPostgres.RepoCase do
@moduledoc false
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
alias AshPostgres.TestRepo
import Ecto
import Ecto.Query
import AshPostgres.RepoCase
# and any other stuff
end
end
setup tags do
:ok = Sandbox.checkout(AshPostgres.TestRepo)
unless tags[:async] do
Sandbox.mode(AshPostgres.TestRepo, {:shared, self()})
end
:ok
end
end
<|start_filename|>lib/transformers/verify_repo.ex<|end_filename|>
defmodule AshPostgres.Transformers.VerifyRepo do
@moduledoc "Verifies that the repo is configured correctly"
use Ash.Dsl.Transformer
def transform(resource, dsl) do
repo = AshPostgres.repo(resource)
cond do
match?({:error, _}, Code.ensure_compiled(repo)) ->
{:error, "Could not find repo module #{repo}"}
repo.__adapter__() != Ecto.Adapters.Postgres ->
{:error, "Expected a repo using the postgres adapter `Ecto.Adapters.Postgres`"}
true ->
{:ok, dsl}
end
end
end
| kingshalaby1/ash_postgres |
<|start_filename|>Dockerfile<|end_filename|>
FROM centos:7
MAINTAINER <NAME>
ENV NESSUS_VERSION="7.0.3"
VOLUME ["/opt/nessus"]
RUN set -x \
&& yum update -y \
\
# Find the download-id
&& DOWNLOAD_ID=$(curl -ssl -o - "https://www.tenable.com/downloads/nessus" | sed -n -e 's/.*data-download-id="\([0-9]*\)".*data-file-name="\([a-zA-Z0-9_\.-]\+\-es7\.x86_64\.rpm\).*".*/\1/p') \
\
# Import Tanable's GPG key
&& rpm --import https://static.tenable.com/marketing/RPM-GPG-KEY-Tenable \
\
# Fetch the rpm
&& curl -ssL -o /tmp/Nessus-${NESSUS_VERSION}-es7.x86_64.rpm \
"https://tenable-downloads-production.s3.amazonaws.com/uploads/download/file/${DOWNLOAD_ID}/Nessus-${NESSUS_VERSION}-es7.x86_64.rpm" \
\
# Install the rpm
&& rpm -ivh /tmp/Nessus-${NESSUS_VERSION}-es7.x86_64.rpm \
\
# Redirect logs to stdout
&& for lf in backend.log nessusd.messages www_server.log; do \
ln -s /dev/stdout /opt/nessus/var/nessus/logs/${lf}; done \
\
# Cleanup
&& rm /tmp/Nessus-${NESSUS_VERSION}-es7.x86_64.rpm \
&& yum clean all \
&& rm -rf /var/cache/yum \
&& rm -rf /opt/nessus/var/nessus/{uuid,*.db*,master.key}
EXPOSE 8834
CMD ["/opt/nessus/sbin/nessus-service"]
| mikenowak/docker-casa-nessus |
<|start_filename|>test/sequelize-mockingSpec.js<|end_filename|>
/**
* Testing around @{SequelizeMocking}
*
* @module test/sequelize-mocking
* @version 0.1.0
* @since 0.1.0
* @author <NAME>
*/
'use strict';
describe('SequelizeMocking - ', function () {
const expect = require('chai').expect;
const sinon = require('sinon');
const path = require('path');
const EventEmitter = require('events').EventEmitter;
const _ = require('lodash');
const Sequelize = require('sequelize');
const sequelizeFixtures = require('sequelize-fixtures');
const SequelizeMocking = require('../lib/sequelize-mocking');
const defaultMaxListeners = EventEmitter.defaultMaxListeners;
it('shall exist', function () {
expect(SequelizeMocking).to.exist;
expect(_.isPlainObject(SequelizeMocking)).to.be.false;
});
let sinonSandbox;
beforeEach(function () {
sinonSandbox = sinon.sandbox.create();
EventEmitter.defaultMaxListeners = 100; // Due to an error when we instanciate too many times fastly some dialects, like the MySql one
});
afterEach(function () {
sinonSandbox.restore();
EventEmitter.defaultMaxListeners = defaultMaxListeners;
});
describe('and the method "adaptSequelizeOptions" should ', function () {
it('exist', function () {
expect(SequelizeMocking.adaptSequelizeOptions).to.exist;
});
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
let sequelizeInstanceOptions = _.cloneDeep(sequelizeInstance.options);
it('returns an extended sequelize configuration', function () {
expect(SequelizeMocking.adaptSequelizeOptions(sequelizeInstance))
.deep
.equals({
'benchmark': false,
'clientMinMessages': 'warning',
'databaseVersion': 0,
'define': {
'engine': 'MYISAM',
'paranoid': false,
'timestamps': false
},
'dialect': 'sqlite',
'dialectModule': null,
'dialectModulePath': null,
'hooks': {},
'host': 'localhost',
'isolationLevel': null,
'logging': console.log,
'native': false,
'omitNull': false,
'pool': {
'idle': 10000,
'max': 5,
'min': 0,
},
'protocol': 'tcp',
'query': {},
'quoteIdentifiers': true,
'replication': false,
'retry': {
'match': [
'SQLITE_BUSY: database is locked'
],
'max': 5
},
'ssl': undefined,
'standardConformingStrings': true,
'storage': ':memory:',
'sync': {},
'timezone': '+00:00',
'transactionType': 'DEFERRED',
'typeValidation': false
});
});
it('does not affect the options of the sequelize instance passed as parameter', function () {
let adaptedSequelizeOptions = SequelizeMocking.adaptSequelizeOptions(sequelizeInstance);
expect(sequelizeInstance.options).deep.equals(sequelizeInstanceOptions);
});
describe('returns, based on options, ', function () {
it('a sequelize options which allows logging', function () {
let adaptedSequelizeOptions = SequelizeMocking.adaptSequelizeOptions(sequelizeInstance, { 'logging': true });
expect(adaptedSequelizeOptions.logging).equals(console.log);
});
it('a sequelize options which disables logging', function () {
let adaptedSequelizeOptions = SequelizeMocking.adaptSequelizeOptions(sequelizeInstance, { 'logging': false });
expect(adaptedSequelizeOptions.logging).to.be.false;
});
});
});
describe('and the method "copyModel" should ', function () {
it('exist', function () {
expect(SequelizeMocking.copyModel).to.exist;
});
it('duplicate a model with the same options', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
let MyModel = sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
let DuplicatedMyModel = SequelizeMocking.copyModel(mockedSequelizeInstance, MyModel);
expect(DuplicatedMyModel.name).equals(MyModel.name);
expect(_.omit(DuplicatedMyModel.options, 'sequelize')).deep.equals(_.omit(MyModel.options, 'sequelize'));
});
it('duplicate a model without keeping the references', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
let MyModel = sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
let DuplicatedMyModel = SequelizeMocking.copyModel(mockedSequelizeInstance, MyModel);
expect(DuplicatedMyModel).not.equals(MyModel);
expect(DuplicatedMyModel.options).not.equals(MyModel.options);
expect(DuplicatedMyModel.rawAttributes).not.equals(MyModel.rawAttributes);
});
it('duplicate a model with upgrading the modelManager of the Sequelize instance', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
let MyModel = sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
expect(mockedSequelizeInstance.modelManager.all.length).equals(0);
let DuplicatedMyModel = SequelizeMocking.copyModel(mockedSequelizeInstance, MyModel);
expect(MyModel.options.sequelize).equals(sequelizeInstance);
expect(DuplicatedMyModel.options.sequelize).equals(mockedSequelizeInstance);
expect(sequelizeInstance.modelManager.all.length).equals(1);
expect(mockedSequelizeInstance.modelManager.all.length).equals(1);
});
});
describe('and the method "create" should ', function () {
it('exist', function () {
expect(SequelizeMocking.create).to.exist;
});
it('should use the copyCurrentModels, modifyModelReferences, modifyConnection and hookNewModel methods', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', '<PASSWORD>UserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let stubCopy = sinonSandbox.stub(SequelizeMocking, 'copyCurrentModels').callsFake(_.noop);
let stubModifyModelReferences = sinonSandbox.stub(SequelizeMocking, 'modifyModelReferences').callsFake(_.noop);
let stubModifyConnection = sinonSandbox.stub(SequelizeMocking, 'modifyConnection').callsFake(_.noop);
let stubHook = sinonSandbox.stub(SequelizeMocking, 'hookNewModel').callsFake(_.noop);
SequelizeMocking.create(sequelizeInstance);
expect(stubCopy.called).to.be.true;
expect(stubCopy.calledOnce).to.be.true;
expect(stubCopy.calledWith(sequelizeInstance, sinon.match.instanceOf(Sequelize))).to.be.true;
expect(stubModifyModelReferences.called).to.be.true;
expect(stubModifyModelReferences.calledOnce).to.be.true;
expect(stubModifyModelReferences.calledWith(sequelizeInstance, sinon.match.instanceOf(Sequelize))).to.be.true;
expect(stubModifyConnection.called).to.be.true;
expect(stubModifyConnection.calledOnce).to.be.true;
expect(stubModifyConnection.calledWith(sequelizeInstance, sinon.match.instanceOf(Sequelize))).to.be.true;
expect(stubHook.called).to.be.true;
expect(stubHook.calledOnce).to.be.true;
expect(stubHook.calledWith(sequelizeInstance, sinon.match.instanceOf(Sequelize))).to.be.true;
});
it('should return a "mocked" sequelize instance', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let stubCopy = sinonSandbox.stub(SequelizeMocking, 'copyCurrentModels').callsFake(_.noop);
let stubModify = sinonSandbox.stub(SequelizeMocking, 'modifyModelReferences').callsFake(_.noop);
let stubHook = sinonSandbox.stub(SequelizeMocking, 'hookNewModel').callsFake(_.noop);
return SequelizeMocking
.create(sequelizeInstance)
.then(function (mockedSequelize) {
expect(mockedSequelize).to.be.instanceof(Sequelize);
expect(mockedSequelize).not.equals(sequelizeInstance);
});
});
it('should associate onto the "mocked" sequelize instance the original one', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let stubCopy = sinonSandbox.stub(SequelizeMocking, 'copyCurrentModels').callsFake(_.noop);
let stubModify = sinonSandbox.stub(SequelizeMocking, 'modifyModelReferences').callsFake(_.noop);
let stubHook = sinonSandbox.stub(SequelizeMocking, 'hookNewModel').callsFake(_.noop);
return SequelizeMocking
.create(sequelizeInstance)
.then(function (mockedSequelize) {
expect(mockedSequelize.__originalSequelize).not.to.be.undefined;
expect(mockedSequelize.__originalSequelize).to.be.instanceof(Sequelize);
expect(mockedSequelize.__originalSequelize).equals(sequelizeInstance);
});
});
it('should pass through the options', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let stubCopy = sinonSandbox.stub(SequelizeMocking, 'copyCurrentModels').callsFake(_.noop);
let stubModify = sinonSandbox.stub(SequelizeMocking, 'modifyModelReferences').callsFake(_.noop);
let stubHook = sinonSandbox.stub(SequelizeMocking, 'hookNewModel').callsFake(_.noop);
return SequelizeMocking
.create(sequelizeInstance, { 'logging': false })
.then(function (mockedSequelize) {
expect(stubHook.called).to.be.true;
expect(stubHook.calledOnce).to.be.true;
expect(stubHook.calledWith(sequelizeInstance, sinon.match.instanceOf(Sequelize), { 'logging': false })).to.be.true;
});
});
});
describe('and the method "createAndLoadFixtureFile" should ', function () {
it('exist', function () {
expect(SequelizeMocking.createAndLoadFixtureFile).to.exist;
});
it('call the "create" function', function () {
let stub = sinonSandbox.stub(SequelizeMocking, 'create').callsFake(() => Promise.reject());
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
SequelizeMocking.createAndLoadFixtureFile(sequelizeInstance, 'a/path', { 'logging': false });
expect(stub.called).to.be.true;
expect(stub.calledOnce).to.be.true;
expect(stub.calledWith(sequelizeInstance, { 'logging': false })).to.be.true;
});
it('call the "loadFixtureFile" function for the created mocked sequelize instance', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let mockedSequelizeInstance = new Sequelize('mocked-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let stub = sinonSandbox.stub(SequelizeMocking, 'create').callsFake(() => Promise.resolve(mockedSequelizeInstance));
let stub2 = sinonSandbox.stub(SequelizeMocking, 'loadFixtureFile').callsFake(() => Promise.resolve());
return SequelizeMocking
.createAndLoadFixtureFile(sequelizeInstance, 'a/path', { 'logging': false })
.then(function () {
expect(stub2.called).to.be.true;
expect(stub2.calledOnce).to.be.true;
expect(stub2.calledWith(mockedSequelizeInstance, 'a/path', { 'logging': false })).to.be.true;
});
});
it('return a Promise with the mocked sequelize instance', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let mockedSequelizeInstance = new Sequelize('mocked-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let stub = sinonSandbox.stub(SequelizeMocking, 'create').callsFake(() => Promise.resolve(mockedSequelizeInstance));
let stub2 = sinonSandbox.stub(SequelizeMocking, 'loadFixtureFile').callsFake(() => Promise.resolve());
return SequelizeMocking
.createAndLoadFixtureFile(sequelizeInstance, 'a/path', { 'logging': false })
.then(function (mockedSequelize) {
expect(mockedSequelize).equals(mockedSequelizeInstance);
});
});
});
describe('and the method "copyCurrentModels" should ', function (){
it('exist', function () {
expect(SequelizeMocking.copyCurrentModels).to.exist;
});
it('copy the models of the first sequelize instance into the second one', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
expect(sequelizeInstance.modelManager.all.length).equals(1);
expect(mockedSequelizeInstance.modelManager.all.length).equals(0);
SequelizeMocking.copyCurrentModels(sequelizeInstance, mockedSequelizeInstance);
expect(sequelizeInstance.modelManager.all.length).equals(1);
expect(mockedSequelizeInstance.modelManager.all.length).equals(1);
expect(sequelizeInstance.modelManager.all[0]).not.equals(mockedSequelizeInstance.modelManager.all[0]);
});
it('use the "copyModel" function', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
let MyModel = sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
let spyCopyModel = sinonSandbox.spy(SequelizeMocking, 'copyModel');
SequelizeMocking.copyCurrentModels(sequelizeInstance, mockedSequelizeInstance);
spyCopyModel.restore();
expect(spyCopyModel.called).to.be.true;
expect(spyCopyModel.calledOnce).to.be.true;
expect(spyCopyModel.calledWith(mockedSequelizeInstance, MyModel)).to.be.true;
});
});
describe('and the method "hookNewModel" should ', function () {
it('exist', function () {
expect(SequelizeMocking.hookNewModel).to.exist;
});
it('listen the "afterDefine" event', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let mockedSequelizeInstance = new Sequelize('mocked-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let spy = sinonSandbox.spy(sequelizeInstance, 'addHook');
SequelizeMocking.hookNewModel(sequelizeInstance, mockedSequelizeInstance);
expect(spy.called).to.be.true;
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith('afterDefine', 'sequelizeMockAfterDefine', sinon.match.func)).to.be.true;
});
it('should call "copyModel" when a new model is added', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let mockedSequelizeInstance = new Sequelize('mocked-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let spy = sinonSandbox.spy(SequelizeMocking, 'copyModel');
SequelizeMocking.hookNewModel(sequelizeInstance, mockedSequelizeInstance);
let MyModel = sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
expect(spy.called).to.be.true;
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith(mockedSequelizeInstance, MyModel)).to.be.true;
});
it('should call "modifyModelReference" when a new model is added', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let mockedSequelizeInstance = new Sequelize('mocked-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let spy = sinonSandbox.spy(SequelizeMocking, 'modifyModelReference');
SequelizeMocking.hookNewModel(sequelizeInstance, mockedSequelizeInstance);
sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
expect(spy.called).to.be.true;
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith(mockedSequelizeInstance, sinon.match.any)).to.be.true;
});
it('should use the "logging" option', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let mockedSequelizeInstance = new Sequelize('mocked-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let fakeObject = {
'sync': () => Promise.resolve()
};
let stub = sinonSandbox.stub(SequelizeMocking, 'modifyModelReference').callsFake(() => fakeObject);
let spy = sinonSandbox.stub(console, 'log');
SequelizeMocking.hookNewModel(sequelizeInstance, mockedSequelizeInstance, { 'logging': false });
sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
expect(spy.called).to.be.false;
});
});
describe('and the method "loadFixtureFile" should ', function () {
it('exist', function () {
expect(SequelizeMocking.loadFixtureFile).to.exist;
});
it('call the map models function', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
let stub = sinonSandbox.stub(sequelizeFixtures, 'loadFile').callsFake(() => Promise.resolve());
let spy = sinonSandbox.spy(SequelizeMocking, 'mapModels');
SequelizeMocking.loadFixtureFile(sequelizeInstance, '/a/path/for/json/file');
expect(spy.called).to.be.true;
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith(sequelizeInstance)).to.be.true;
});
it('load the fixture models file and return into the Promise the sequelize instance', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
return sequelizeInstance
.sync()
.then(function () {
return SequelizeMocking
.loadFixtureFile(sequelizeInstance, path.resolve(path.join(__dirname, './my-model-database.json')));
})
.then(function (sequelize) {
expect(sequelize).equals(sequelizeInstance);
});
});
it('Should detect load the fixture models files from array and return into the Promise the sequelize instance', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
return sequelizeInstance
.sync()
.then(function () {
return SequelizeMocking
.loadFixtureFile(sequelizeInstance, [
path.resolve(path.join(__dirname, './my-model-database.json')),
path.resolve(path.join(__dirname, './my-model-1-database.json'))
]);
})
.then(function (sequelize) {
expect(sequelize).equals(sequelizeInstance);
});
});
it('should not log if the logging option is false', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
let spy = sinonSandbox.spy(sequelizeFixtures, 'loadFile');
let filePath = path.resolve(path.join(__dirname, './my-model-database.json'));
return sequelizeInstance
.sync()
.then(function () {
return SequelizeMocking
.loadFixtureFile(sequelizeInstance, filePath, { 'logging': false });
})
.then(function () {
expect(spy.firstCall.args).deep.equals([
filePath,
{
'myModel': sequelizeInstance.modelManager.all[0]
},
{
'encoding': 'utf8',
'log': _.noop
}
]);
});
});
it('should allow transform the data if specified', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
let spy = sinonSandbox.spy(sequelizeFixtures, 'loadFile');
let filePath = path.resolve(path.join(__dirname, './my-model-database.json'));
function transformFixtureDataFn(data) {
// Fixtures with negative numbers allow creating data objects
// relative to the time of the import.
if(data.createdAt
&& data.createdAt < 0) {
data.createdAt = new Date((new Date()).getTime() + parseFloat(data.createdAt) * 1000 * 60);
}
return data;
}
return sequelizeInstance
.sync()
.then(function () {
return SequelizeMocking
.loadFixtureFile(sequelizeInstance, filePath, { 'logging': false, 'transformFixtureDataFn': transformFixtureDataFn });
})
.then(function () {
expect(spy.firstCall.args).deep.equals([
filePath,
{
'myModel': sequelizeInstance.modelManager.all[0]
},
{
'encoding': 'utf8',
'log': _.noop,
'transformFixtureDataFn': transformFixtureDataFn
}
]);
});
});
});
describe('and the method "mapModels" should ', function () {
it('exist', function () {
expect(SequelizeMocking.mapModels).to.exist;
});
it('return an empty map if no Sequelize models were defined', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
let mapModels = SequelizeMocking.mapModels(sequelizeInstance);
expect(mapModels).not.to.be.undefined;
expect(mapModels).to.be.empty;
});
it('return a map with the defined Sequelize model', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
let mapModels = SequelizeMocking.mapModels(sequelizeInstance);
expect(mapModels).not.to.be.undefined;
expect(mapModels).deep.equals({
'myModel': sequelizeInstance.modelManager.all[0]
});
});
it('return a map with the defined Sequelize models', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
sequelizeInstance.define('myModel1', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
sequelizeInstance.define('myModel2', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
let mapModels = SequelizeMocking.mapModels(sequelizeInstance);
expect(mapModels).not.to.be.undefined;
expect(mapModels).deep.equals({
'myModel1': sequelizeInstance.modelManager.all[0],
'myModel2': sequelizeInstance.modelManager.all[1]
});
});
});
describe('and the method "modifyConnection" should ', function () {
it('exist', function () {
expect(SequelizeMocking.modifyConnection).to.exist;
});
it('should override the dialect and the connectionManafer', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
let usedDialect = sequelizeInstance.dialect;
let usedQueryInterface = sequelizeInstance.queryInterface;
let usedConnectionManager = sequelizeInstance.connectionManager;
let sequelizeInstance2 = new Sequelize('my-database2', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
SequelizeMocking.modifyConnection(sequelizeInstance, sequelizeInstance2);
expect(sequelizeInstance.__dialect).to.exist;
expect(sequelizeInstance.__dialect).equals(usedDialect);
expect(sequelizeInstance.__queryInterface).to.exist;
expect(sequelizeInstance.__queryInterface).equals(usedQueryInterface);
expect(sequelizeInstance.__connectionManager).to.exist;
expect(sequelizeInstance.__connectionManager).equals(usedConnectionManager);
expect(sequelizeInstance.dialect === sequelizeInstance2.dialect).to.be.true;
expect(sequelizeInstance.queryInterface === sequelizeInstance2.queryInterface).to.be.true;
expect(sequelizeInstance.connectionManager === sequelizeInstance2.connectionManager).to.be.true;
});
});
describe('and the method "modifyModelReference" should ', function () {
it('exist', function () {
expect(SequelizeMocking.modifyModelReference).to.exist;
});
it('should override the sequelize property of the specified model with the specified sequelize instance', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let sequelizeInstance2 = new Sequelize('my-database2', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let MyModel = sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
expect(MyModel.sequelize).equals(sequelizeInstance);
SequelizeMocking.modifyModelReference(sequelizeInstance2, MyModel);
expect(MyModel.sequelize).equals(sequelizeInstance2);
});
it('should override the model manager based on the specified sequelize instance', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let sequelizeInstance2 = new Sequelize('my-database2', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
let MyModel = sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
expect(MyModel.sequelize).equals(sequelizeInstance);
SequelizeMocking.modifyModelReference(sequelizeInstance2, MyModel);
expect(MyModel.sequelize).equals(sequelizeInstance2);
});
});
describe('and the method "modifyModelReferences" should ', function (){
it('exist', function () {
expect(SequelizeMocking.modifyModelReferences).to.exist;
});
it('override the models of the first sequelize instance', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'mysql',
'define': {
'engine': 'MYISAM',
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
let MyModel = sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
SequelizeMocking.modifyModelReferences(sequelizeInstance, mockedSequelizeInstance);
expect(MyModel.sequelize).equals(mockedSequelizeInstance);
});
it('use the "modifyModelReference" function', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
},
'pool': {
'max': 5,
'min': 0,
'idle': 10000
}
});
let MyModel = sequelizeInstance.define('myModel', {
'id': {
'type': Sequelize.INTEGER,
'autoIncrement': true,
'primaryKey': true
},
'description': Sequelize.TEXT
});
let spyCopyModel = sinonSandbox.spy(SequelizeMocking, 'modifyModelReference');
SequelizeMocking.modifyModelReferences(sequelizeInstance, mockedSequelizeInstance);
expect(spyCopyModel.called).to.be.true;
expect(spyCopyModel.calledOnce).to.be.true;
expect(spyCopyModel.calledWith(mockedSequelizeInstance, MyModel)).to.be.true;
});
});
describe('and the method "restore" should ', function () {
it('exist', function () {
expect(SequelizeMocking.restore).to.exist;
});
it('should call "unhookNewModel" method', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let spy = sinonSandbox.spy(SequelizeMocking, 'unhookNewModel');
SequelizeMocking.restore(mockedSequelizeInstance);
expect(spy.called).to.be.true;
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith(mockedSequelizeInstance)).to.be.true;
});
it('should call "modifyModelReferences" method if the sequelize instance is a mocked one', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let sequelizeInstance = new Sequelize('my-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
mockedSequelizeInstance.__originalSequelize = sequelizeInstance;
let spy = sinonSandbox.spy(SequelizeMocking, 'modifyModelReferences');
SequelizeMocking.restore(mockedSequelizeInstance);
expect(spy.called).to.be.true;
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith(mockedSequelizeInstance, sequelizeInstance)).to.be.true;
});
it('should call "modifyConnection" method if the sequelize instance is a mocked one', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let sequelizeInstance = new Sequelize('my-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
mockedSequelizeInstance.__originalSequelize = sequelizeInstance;
mockedSequelizeInstance.__dialect = sequelizeInstance.dialect;
mockedSequelizeInstance.__queryInterface = sequelizeInstance.queryInterface;
mockedSequelizeInstance.__connectionManager = sequelizeInstance.connectionManager;
let spy = sinonSandbox.spy(SequelizeMocking, 'modifyConnection');
SequelizeMocking.restore(mockedSequelizeInstance);
expect(spy.called).to.be.true;
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith(mockedSequelizeInstance, sequelizeInstance)).to.be.true;
});
it('should remove "__originalSequelize" property', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
mockedSequelizeInstance.__originalSequelize = new Sequelize('my-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
SequelizeMocking.restore(mockedSequelizeInstance);
expect(mockedSequelizeInstance.__originalSequelize).not.to.exist;
});
it('should remove "__dialect" and "__connectionManager" properties', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
mockedSequelizeInstance.__originalSequelize = new Sequelize('my-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
SequelizeMocking.restore(mockedSequelizeInstance);
expect(mockedSequelizeInstance.__dialect).not.to.exist;
expect(mockedSequelizeInstance.__connectionManager).not.to.exist;
});
it('should flush the mocked sequelize database', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let spyGetQueryInterface = sinonSandbox.spy(mockedSequelizeInstance, 'getQueryInterface');
let spyDropAllTables = sinonSandbox.spy(mockedSequelizeInstance.getQueryInterface(), 'dropAllTables');
return SequelizeMocking
.restore(mockedSequelizeInstance)
.then(function () {
expect(spyGetQueryInterface.called).to.be.true;
expect(spyDropAllTables.called).to.be.true;
expect(spyDropAllTables.calledWith({ 'logging': true })).to.be.true;
});
});
it('should use the "logging" option', function () {
let mockedSequelizeInstance = new Sequelize('mocked-database', null, null, {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:'
});
let spyDropAllTables = sinonSandbox.spy(mockedSequelizeInstance.getQueryInterface(), 'dropAllTables');
return SequelizeMocking
.restore(mockedSequelizeInstance, { 'logging': false })
.then(function () {
expect(spyDropAllTables.called).to.be.true;
expect(spyDropAllTables.calledOnce).to.be.true;
expect(spyDropAllTables.calledWith({ 'logging': false })).to.be.true;
});
});
});
describe('and the method "unhookNewModel" should ', function () {
it('exist', function () {
expect(SequelizeMocking.unhookNewModel).to.exist;
});
it('do nothing if the sequelize was not mocked', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
expect(function () {
SequelizeMocking.unhookNewModel(sequelizeInstance);
}).not.to.throw;
});
it('remove the hook on the original sequelize on the mocked sequelize', function () {
let sequelizeInstance = new Sequelize('my-database', 'mysqlUserName', 'mysqlUserPassword', {
'host': 'localhost',
'dialect': 'sqlite',
'storage': ':memory:',
'define': {
'timestamps': false,
'paranoid': false
}
});
sequelizeInstance.__originalSequelize = {
'removeHook': function (eventName) {
}
};
let spy = sinonSandbox.spy(sequelizeInstance.__originalSequelize, 'removeHook');
SequelizeMocking.unhookNewModel(sequelizeInstance);
expect(spy.called).to.be.true;
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith('afterDefine')).to.be.true;
});
});
});
| RafaelMonteiroExaud/sequelize-mocking |
<|start_filename|>docs/_data/P100.json<|end_filename|>
{"label": "P100", "n_articles": "1734", "top_author_name": "<NAME>", "top_author_count": "26", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "83", "top_keywords": ["visual evoked potentials", "visual evoked potential", "n170", "emotion", "multiple sclerosis"], "first_publication": "1976", "name": "P100", "top_cog_assocs": ["vision", "somatosensory", "face"], "top_dis_assocs": ["multiple sclerosis", "migraine", "schizophrenia"]}
<|start_filename|>docs/_data/PINV.json<|end_filename|>
{"label": "PINV", "n_articles": "33", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Psychological medicine", "top_journal_count": "4", "top_keywords": ["control", "emotion", "event-related potential (erp)", "international affective picture system (iaps)", "late positive potential (lpp)"], "first_publication": "1976", "name": "post imperative negative variation", "top_cog_assocs": ["arousal", "auditory", "attention"], "top_dis_assocs": ["depression", "schizophrenia", "migraine"]}
<|start_filename|>docs/_data/CPS.json<|end_filename|>
{"label": "CPS", "n_articles": "37", "top_author_name": "<NAME>", "top_author_count": "14", "top_journal_name": "Journal of cognitive neuroscience", "top_journal_count": "8", "top_keywords": ["closure positive shift (cps)", "prosody", "event-related potentials (erp)", "prosodic boundary cues", "prosody processing"], "first_publication": "2001", "name": "closure positive shift", "top_cog_assocs": ["speech", "perception", "language"], "top_dis_assocs": ["dyslexia", "autism", "language impairment"]}
<|start_filename|>docs/_data/N180.json<|end_filename|>
{"label": "N180", "n_articles": "39", "top_author_name": "<NAME>", "top_author_count": "7", "top_journal_name": "Zhurnal vysshei nervnoi deiatelnosti imeni I P Pavlova", "top_journal_count": "6", "top_keywords": ["n400", "auditory word categorization", "embodied cognition", "language perception", "multimodal representation"], "first_publication": "1981", "name": "N180", "top_cog_assocs": ["vision", "spatial", "recognition"], "top_dis_assocs": ["schizophrenia", "migraine", "TBI"]}
<|start_filename|>docs/_data/P350.json<|end_filename|>
{"label": "P350", "n_articles": "68", "top_author_name": "<NAME>", "top_author_count": "6", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "6", "top_keywords": ["eye gaze", "joint attention", "social interaction", "virtual reality", "agency"], "first_publication": "1977", "name": "P350", "top_cog_assocs": ["auditory", "attention", "vision"], "top_dis_assocs": ["ADHD", "alcoholism", "schizophrenia"]}
<|start_filename|>docs/_data/N130.json<|end_filename|>
{"label": "N130", "n_articles": "34", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "7", "top_keywords": ["affordances", "attention", "grasp", "mirror neuron", "culture"], "first_publication": "1975", "name": "N130", "top_cog_assocs": ["vision", "somatosensory", "auditory"], "top_dis_assocs": ["alzheimer", "epilepsy", "psychosis"]}
<|start_filename|>docs/_data/VAN.json<|end_filename|>
{"label": "VAN", "n_articles": "31", "top_author_name": "<NAME>", "top_author_count": "6", "top_journal_name": "Consciousness and cognition", "top_journal_count": "6", "top_keywords": ["consciousness", "awareness", "inattentional blindness", "late positivity", "attention"], "first_publication": "2003", "name": "visual awareness negativity", "top_cog_assocs": ["conscious", "vision", "perception"], "top_dis_assocs": ["schizophrenia", "TBI", "dyslexia"]}
<|start_filename|>docs/_data/N450.json<|end_filename|>
{"label": "N450", "n_articles": "174", "top_author_name": "<NAME>", "top_author_count": "7", "top_journal_name": "Psychophysiology", "top_journal_count": "16", "top_keywords": ["cognitive control", "stroop", "n2", "stroop task", "conflict monitoring"], "first_publication": "1984", "name": "N450", "top_cog_assocs": ["conflict", "cognitive control", "attention"], "top_dis_assocs": ["depression", "schizophrenia", "ADHD"]}
<|start_filename|>docs/_data/N170.json<|end_filename|>
{"label": "N170", "n_articles": "1381", "top_author_name": "<NAME>", "top_author_count": "43", "top_journal_name": "Neuropsychologia", "top_journal_count": "96", "top_keywords": ["face processing", "face perception", "emotion", "faces", "facial expression"], "first_publication": "1978", "name": "N170", "top_cog_assocs": ["face", "vision", "emotion"], "top_dis_assocs": ["autism", "schizophrenia", "anxiety"]}
<|start_filename|>docs/_data/N240.json<|end_filename|>
{"label": "N240", "n_articles": "20", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Medicina (Kaunas, Lithuania)", "top_journal_count": "1", "top_keywords": ["nociception", "touch", "intravenous infusion", "laparoscopic cholecystectomy", "nefopam"], "first_publication": "1987", "name": "N240", "top_cog_assocs": ["somatosensory", "pain", "perception"], "top_dis_assocs": ["TBI", "stroke", "alzheimer"]}
<|start_filename|>docs/_data/N300.json<|end_filename|>
{"label": "N300", "n_articles": "194", "top_author_name": "<NAME>", "top_author_count": "7", "top_journal_name": "Neuropsychologia", "top_journal_count": "21", "top_keywords": ["n400", "prospective memory", "object recognition", "emotion", "n170"], "first_publication": "1980", "name": "N300", "top_cog_assocs": ["vision", "semantic", "recognition"], "top_dis_assocs": ["schizophrenia", "dyslexia", "anxiety"]}
<|start_filename|>docs/_data/P170.json<|end_filename|>
{"label": "P170", "n_articles": "32", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Human brain mapping", "top_journal_count": "2", "top_keywords": ["attention", "hearing loss", "neural mechanism", "visual compensatory", "dft"], "first_publication": "1975", "name": "P170", "top_cog_assocs": ["vision", "somatosensory", "attention"], "top_dis_assocs": ["epilepsy", "parkinson", "dyslexia"]}
<|start_filename|>docs/_data/P230.json<|end_filename|>
{"label": "P230", "n_articles": "18", "top_author_name": "<NAME>", "top_author_count": "6", "top_journal_name": "Zhurnal vysshei nervnoi deiatelnosti imeni I P Pavlova", "top_journal_count": "3", "top_keywords": ["confabulation", "desynchronization", "face recognition", "memory schema", "theta coherence"], "first_publication": "1979", "name": "P230", "top_cog_assocs": ["vision", "face", "emotion"], "top_dis_assocs": ["TBI", "stroke", "addiction"]}
<|start_filename|>docs/_data/P550.json<|end_filename|>
{"label": "P550", "n_articles": "15", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "Neuropsychologia", "top_journal_count": "1", "top_keywords": ["common region", "extrinsic grouping", "loreta", "perceptual grouping", "aging"], "first_publication": "1983", "name": "P550", "top_cog_assocs": ["semantic", "vision", "memory"], "top_dis_assocs": ["epilepsy", "stroke", "addiction"]}
<|start_filename|>docs/_data/FRN.json<|end_filename|>
{"label": "FRN", "n_articles": "594", "top_author_name": "<NAME>", "top_author_count": "32", "top_journal_name": "Psychophysiology", "top_journal_count": "61", "top_keywords": ["feedback-related negativity", "feedback-related negativity (frn)", "p300", "p3", "outcome evaluation"], "first_publication": "2003", "name": "feedback related negativity", "top_cog_assocs": ["reward", "learning", "error"], "top_dis_assocs": ["anxiety", "depression", "addiction"]}
<|start_filename|>docs/_data/N280.json<|end_filename|>
{"label": "N280", "n_articles": "21", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "Journal of speech, language, and hearing research : JSLHR", "top_journal_count": "2", "top_keywords": ["transcranial magnetic stimulation", "cortical reactivity", "motor cortex excitability", "source localization", "transcranial direct current stimulation"], "first_publication": "1996", "name": "N280", "top_cog_assocs": ["semantic", "reading", "motor"], "top_dis_assocs": ["multiple sclerosis", "TBI", "dyslexia"]}
<|start_filename|>docs/_data/P150.json<|end_filename|>
{"label": "P150", "n_articles": "157", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Brain research", "top_journal_count": "5", "top_keywords": ["aging", "congenital hypothyroidism", "iodine diet", "psychiatric diseases", "thyroid hormones"], "first_publication": "1971", "name": "P150", "top_cog_assocs": ["vision", "auditory", "attention"], "top_dis_assocs": ["anxiety", "schizophrenia", "ADHD"]}
<|start_filename|>docs/_data/ADAN.json<|end_filename|>
{"label": "ADAN", "n_articles": "29", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Brain research", "top_journal_count": "5", "top_keywords": ["attention", "edan", "spatial attention", "n2pc", "alpha oscillations"], "first_publication": "2003", "name": "anterior directing attention negativity", "top_cog_assocs": ["attention", "spatial", "vision"], "top_dis_assocs": ["autism", "TBI", "stroke"]}
<|start_filename|>docs/_data/N700.json<|end_filename|>
{"label": "N700", "n_articles": "18", "top_author_name": "<NAME>", "top_author_count": "6", "top_journal_name": "Clinical neurophysiology : official journal of the International Federation of Clinical Neurophysiology", "top_journal_count": "4", "top_keywords": ["imageability", "n400", "autism spectrum disorder", "infancy", "statistical learning"], "first_publication": "1999", "name": "N700", "top_cog_assocs": ["vision", "memory", "semantic"], "top_dis_assocs": ["autism", "TBI", "stroke"]}
<|start_filename|>docs/_data/N220.json<|end_filename|>
{"label": "N220", "n_articles": "10", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "Journal of vision", "top_journal_count": "1", "top_keywords": ["emotion", "facial expression", "humour", "misfortune"], "first_publication": "1981", "name": "N220", "top_cog_assocs": ["vision", "perception", "attention"], "top_dis_assocs": ["alzheimer", "dementia", "TBI"]}
<|start_filename|>docs/_data/pN.json<|end_filename|>
{"label": "pN", "n_articles": "23", "top_author_name": "<NAME>", "top_author_count": "20", "top_journal_name": "Neuroscience", "top_journal_count": "4", "top_keywords": ["bereitschaftspotential", "motor behavior", "decision making", "proactive control", "bereitschaftspotential (bp)"], "first_publication": "2009", "name": "prefrontal negativity", "top_cog_assocs": ["motor", "anticipation", "attention"], "top_dis_assocs": ["MCI", "TBI", "dyslexia"]}
<|start_filename|>docs/_data/MFN.json<|end_filename|>
{"label": "MFN", "n_articles": "97", "top_author_name": "<NAME>", "top_author_count": "6", "top_journal_name": "Psychophysiology", "top_journal_count": "13", "top_keywords": ["ultimatum game", "cognitive control", "p3", "performance monitoring", "p300"], "first_publication": "1999", "name": "medial frontal negativity", "top_cog_assocs": ["social", "error", "conflict"], "top_dis_assocs": ["depression", "addiction", "alzheimer"]}
<|start_filename|>docs/_data/P110.json<|end_filename|>
{"label": "P110", "n_articles": "26", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "Brain topography", "top_journal_count": "2", "top_keywords": [], "first_publication": "1981", "name": "P110", "top_cog_assocs": ["vision", "somatosensory", "auditory"], "top_dis_assocs": ["stroke", "dyslexia", "TBI"]}
<|start_filename|>docs/_data/N200.json<|end_filename|>
{"label": "N200", "n_articles": "780", "top_author_name": "<NAME>", "top_author_count": "22", "top_journal_name": "International journal of psychophysiology : official journal of the International Organization of Psychophysiology", "top_journal_count": "30", "top_keywords": ["p300", "attention", "cognitive control", "n400", "emotion"], "first_publication": "1976", "name": "N200", "top_cog_assocs": ["auditory", "attention", "vision"], "top_dis_assocs": ["schizophrenia", "alzheimer", "depression"]}
<|start_filename|>docs/_data/P300.json<|end_filename|>
{"label": "P300", "n_articles": "5839", "top_author_name": "<NAME>", "top_author_count": "116", "top_journal_name": "Psychophysiology", "top_journal_count": "248", "top_keywords": ["attention", "schizophrenia", "brain-computer interface", "working memory", "frn"], "first_publication": "1971", "name": "P300", "top_cog_assocs": ["auditory", "vision", "attention"], "top_dis_assocs": ["schizophrenia", "depression", "dementia"]}
<|start_filename|>docs/_data/SPCN.json<|end_filename|>
{"label": "SPCN", "n_articles": "41", "top_author_name": "<NAME>", "top_author_count": "17", "top_journal_name": "Psychophysiology", "top_journal_count": "11", "top_keywords": ["attention", "n2pc", "visual working memory", "visual search", "visual short-term memory"], "first_publication": "2006", "name": "sustained posterior contralateral negativity", "top_cog_assocs": ["vision", "memory", "attention"], "top_dis_assocs": ["anxiety", "depression", "TBI"]}
<|start_filename|>docs/_data/P160.json<|end_filename|>
{"label": "P160", "n_articles": "29", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "The Journal of biological chemistry", "top_journal_count": "4", "top_keywords": ["c1", "higher-order cortical processing", "perceptual learning", "primary visual cortex"], "first_publication": "1981", "name": "P160", "top_cog_assocs": ["attention", "auditory", "face"], "top_dis_assocs": ["anxiety", "depression", "TBI"]}
<|start_filename|>docs/_data/FSP.json<|end_filename|>
{"label": "FSP", "n_articles": "20", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Psychophysiology", "top_journal_count": "5", "top_keywords": ["attention-deficit/hyperactivity disorder", "p300", "contingent negative variation", "theta activity", "time reproduction"], "first_publication": "1993", "name": "frontal selection positivity", "top_cog_assocs": ["attention", "vision", "representation"], "top_dis_assocs": ["ADHD", "alcoholism", "stroke"]}
<|start_filename|>docs/_data/N400.json<|end_filename|>
{"label": "N400", "n_articles": "2594", "top_author_name": "<NAME>", "top_author_count": "91", "top_journal_name": "Neuropsychologia", "top_journal_count": "217", "top_keywords": ["p600", "semantics", "language", "prediction", "semantic processing"], "first_publication": "1980", "name": "N400", "top_cog_assocs": ["semantic", "language", "reading"], "top_dis_assocs": ["schizophrenia", "language impairment", "alzheimer"]}
<|start_filename|>docs/_data/N190.json<|end_filename|>
{"label": "N190", "n_articles": "24", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "3", "top_keywords": ["p3", "readiness potential", "self-other distinction", "early posterior negativity (epn)", "visual structural encoding"], "first_publication": "1983", "name": "N190", "top_cog_assocs": ["vision", "motor", "perception"], "top_dis_assocs": ["epilepsy", "autism", "stroke"]}
<|start_filename|>docs/_data/EPN.json<|end_filename|>
{"label": "EPN", "n_articles": "244", "top_author_name": "<NAME>", "top_author_count": "20", "top_journal_name": "Biological psychology", "top_journal_count": "25", "top_keywords": ["emotion", "early posterior negativity (epn)", "attention", "lpp", "eeg/erp"], "first_publication": "2003", "name": "early posterior negativity", "top_cog_assocs": ["emotion", "attention", "valence"], "top_dis_assocs": ["anxiety", "schizophrenia", "depression"]}
<|start_filename|>docs/_data/N320.json<|end_filename|>
{"label": "N320", "n_articles": "23", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Biological psychology", "top_journal_count": "2", "top_keywords": ["category learning", "n250", "orthography", "perceptual expertise", "selection negativity"], "first_publication": "1985", "name": "N320", "top_cog_assocs": ["recognition", "reading", "vision"], "top_dis_assocs": ["dyslexia", "TBI", "stroke"]}
<|start_filename|>docs/_data/P180.json<|end_filename|>
{"label": "P180", "n_articles": "75", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "Brain stimulation", "top_journal_count": "5", "top_keywords": ["transcranial magnetic stimulation", "tms-eeg", "tms-evoked potentials", "dorsolateral prefrontal cortex", "schizophrenia"], "first_publication": "1975", "name": "P180", "top_cog_assocs": ["motor", "vision", "somatosensory"], "top_dis_assocs": ["epilepsy", "schizophrenia", "depression"]}
<|start_filename|>docs/_data/LAN.json<|end_filename|>
{"label": "LAN", "n_articles": "57", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Frontiers in psychology", "top_journal_count": "6", "top_keywords": ["p600", "n400", "syntax", "language", "morphology"], "first_publication": "1995", "name": "left anterior negativity", "top_cog_assocs": ["syntax", "reading", "language"], "top_dis_assocs": ["language impairment", "TBI", "dyslexia"]}
<|start_filename|>docs/_data/N140.json<|end_filename|>
{"label": "N140", "n_articles": "177", "top_author_name": "<NAME>", "top_author_count": "26", "top_journal_name": "Clinical neurophysiology : official journal of the International Federation of Clinical Neurophysiology", "top_journal_count": "14", "top_keywords": ["p300", "somatosensory-evoked potentials", "somatosensory evoked potentials", "somatosensory cortex", "tactile"], "first_publication": "1979", "name": "N140", "top_cog_assocs": ["somatosensory", "attention", "vision"], "top_dis_assocs": ["parkinson", "alzheimer", "anxiety"]}
<|start_filename|>docs/_data/Pd.json<|end_filename|>
{"label": "Pd", "n_articles": "24", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "Journal of cognitive neuroscience", "top_journal_count": "5", "top_keywords": ["n2pc", "suppression", "visual search", "visual attention", "working memory"], "first_publication": "2009", "name": "distractor positivity", "top_cog_assocs": ["attention", "vision", "spatial"], "top_dis_assocs": ["ADHD", "schizophrenia", "TBI"]}
<|start_filename|>docs/_data/BP.json<|end_filename|>
{"label": "BP", "n_articles": "527", "top_author_name": "<NAME>", "top_author_count": "31", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "44", "top_keywords": ["volition", "free will", "voluntary action", "intention", "libet"], "first_publication": "1965", "name": "readiness potential", "top_cog_assocs": ["motor", "anticipation", "attention"], "top_dis_assocs": ["parkinson", "epilepsy", "stroke"]}
<|start_filename|>docs/_data/PSW.json<|end_filename|>
{"label": "PSW", "n_articles": "112", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Psychophysiology", "top_journal_count": "10", "top_keywords": ["working memory", "slow wave", "affective priming", "development", "p300"], "first_publication": "1985", "name": "positive slow wave", "top_cog_assocs": ["memory", "attention", "vision"], "top_dis_assocs": ["anxiety", "schizophrenia", "alzheimer"]}
<|start_filename|>docs/_data/LPN.json<|end_filename|>
{"label": "LPN", "n_articles": "29", "top_author_name": "<NAME>", "top_author_count": "7", "top_journal_name": "Neuroscience", "top_journal_count": "3", "top_keywords": ["source memory", "recognition memory", "item memory", "episodic memory", "familiarity"], "first_publication": "1989", "name": "late posterior negativity", "top_cog_assocs": ["memory", "recognition", "semantic"], "top_dis_assocs": ["autism", "TBI", "stroke"]}
<|start_filename|>docs/_data/ERN.json<|end_filename|>
{"label": "ERN", "n_articles": "1034", "top_author_name": "<NAME>", "top_author_count": "71", "top_journal_name": "Psychophysiology", "top_journal_count": "107", "top_keywords": ["error-related negativity", "performance monitoring", "error processing", "cognitive control", "anxiety"], "first_publication": "1995", "name": "error related negativity", "top_cog_assocs": ["error", "cognitive control", "attention"], "top_dis_assocs": ["anxiety", "depression", "OCD"]}
<|start_filename|>docs/_data/N110.json<|end_filename|>
{"label": "N110", "n_articles": "21", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "3", "top_keywords": ["empathy", "pain", "brassica napus", "s haplotype", "self-compatibility"], "first_publication": "1982", "name": "N110", "top_cog_assocs": ["somatosensory", "pain", "auditory"], "top_dis_assocs": ["schizophrenia", "addiction", "PTSD"]}
<|start_filename|>docs/_data/N100.json<|end_filename|>
{"label": "N100", "n_articles": "1006", "top_author_name": "<NAME>", "top_author_count": "33", "top_journal_name": "Clinical neurophysiology : official journal of the International Federation of Clinical Neurophysiology", "top_journal_count": "55", "top_keywords": ["schizophrenia", "attention", "p300", "transcranial magnetic stimulation", "p50"], "first_publication": "1980", "name": "N100", "top_cog_assocs": ["auditory", "attention", "vision"], "top_dis_assocs": ["schizophrenia", "alzheimer", "psychosis"]}
<|start_filename|>docs/_data/P220.json<|end_filename|>
{"label": "P220", "n_articles": "37", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "5", "top_keywords": ["complexity", "fraser spiral illusion", "illusion", "shape perception"], "first_publication": "1982", "name": "P220", "top_cog_assocs": ["vision", "somatosensory", "pain"], "top_dis_assocs": ["stroke", "TBI", "addiction"]}
<|start_filename|>docs/_data/PMN.json<|end_filename|>
{"label": "PMN", "n_articles": "11", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Neuropsychologia", "top_journal_count": "3", "top_keywords": ["n400", "bilingualism", "phonological competition", "second language processing", "spoken word recognition"], "first_publication": "2009", "name": "phonological mapping negativity", "top_cog_assocs": ["phonology", "language", "speech"], "top_dis_assocs": ["language impairment", "stroke", "TBI"]}
<|start_filename|>docs/_data/EDAN.json<|end_filename|>
{"label": "EDAN", "n_articles": "22", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Neuropsychologia", "top_journal_count": "5", "top_keywords": ["adan", "spatial attention", "attention", "n2pc", "spatial neglect"], "first_publication": "2003", "name": "early directing attention negativity", "top_cog_assocs": ["attention", "spatial", "vision"], "top_dis_assocs": ["autism", "TBI", "stroke"]}
<|start_filename|>docs/_data/NSW.json<|end_filename|>
{"label": "NSW", "n_articles": "102", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Psychophysiology", "top_journal_count": "12", "top_keywords": ["working memory", "attention", "p300", "n400", "emotion"], "first_publication": "1966", "name": "negative slow wave", "top_cog_assocs": ["memory", "vision", "auditory"], "top_dis_assocs": ["schizophrenia", "epilepsy", "ADHD"]}
<|start_filename|>docs/_data/P250.json<|end_filename|>
{"label": "P250", "n_articles": "86", "top_author_name": "<NAME>", "top_author_count": "6", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "7", "top_keywords": ["n170", "p100", "brain-computer interface", "categorization of images", "component p300"], "first_publication": "1985", "name": "P250", "top_cog_assocs": ["vision", "auditory", "attention"], "top_dis_assocs": ["ADHD", "anxiety", "depression"]}
<|start_filename|>docs/_data/CDA.json<|end_filename|>
{"label": "CDA", "n_articles": "158", "top_author_name": "<NAME>", "top_author_count": "22", "top_journal_name": "Journal of cognitive neuroscience", "top_journal_count": "20", "top_keywords": ["visual working memory", "working memory", "attention", "contralateral delay activity (cda)", "visual short-term memory"], "first_publication": "2006", "name": "contralateral delay activity", "top_cog_assocs": ["memory", "working memory", "vision"], "top_dis_assocs": ["anxiety", "ADHD", "MCI"]}
<|start_filename|>docs/_data/N150.json<|end_filename|>
{"label": "N150", "n_articles": "117", "top_author_name": "<NAME>", "top_author_count": "9", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "7", "top_keywords": ["visual evoked potentials", "c1", "evoked potentials", "somatosensory evoked potentials", "absolute reliability"], "first_publication": "1979", "name": "N150", "top_cog_assocs": ["vision", "pain", "somatosensory"], "top_dis_assocs": ["epilepsy", "schizophrenia", "migraine"]}
<|start_filename|>docs/_includes/sidebar.html<|end_filename|>
</section>
<aside id="sidebar">
<p class="site-link"><a href={{ site.baseurl }}/index.html>Home</a></p>
<p class="site-link"><a href={{ site.baseurl }}/methods.html>Methods</a></p>
<p class="site-link"><a href={{ site.baseurl }}/network.html>Network</a></p>
<p class="site-link"><a href={{ site.baseurl }}/cognitive.html>Cognition</a></p>
<p class="site-link"><a href={{ site.baseurl }}/disorders.html>Disorders</a></p>
<p class="site-link"><a href={{ site.baseurl }}/words.html>Components</a></p>
{% for post in site.posts %}
<p class="site-link-sub"><a href={{ site.baseurl }}{{ post.url }}>{{ post.title }}</a></p>
{% endfor %}
<br>
<br>
{% if site.github.is_project_page %}
<p class="repo-owner"><a href="{{ site.github.repository_url }}">{{ site.github.repository_name }}</a> is maintained by <a href="https://tomdonoghue.github.io/"><NAME></a>.</p>
{% endif %}
<br>
<br>
</aside>
<br>
<br>
</div>
</div>
</body>
</html>
<|start_filename|>docs/_data/PCN.json<|end_filename|>
{"label": "PCN", "n_articles": "62", "top_author_name": "<NAME>", "top_author_count": "19", "top_journal_name": "Psychophysiology", "top_journal_count": "12", "top_keywords": ["attention", "n2pc", "spcn", "visual search", "visual working memory"], "first_publication": "2002", "name": "posterior contralateral negativity", "top_cog_assocs": ["vision", "attention", "memory"], "top_dis_assocs": ["anxiety", "ADHD", "depression"]}
<|start_filename|>docs/_layouts/assoc.html<|end_filename|>
{% include header.html %}
{{ content }}
<h3> The highest associated components for each association term are: </h3>
<ul>
{% for data in site.data[page.title] %}
{% for label in data %}
{% if label[0] == null %}
{% continue %}
{% endif %}
<li> {{ data[0] }}: {{ label[0] }}, {{ label[1]}}, {{ label[2]}} </li>
{% endfor %}
{% endfor %}
</ul>
{% include sidebar.html %}
<|start_filename|>docs/_data/SP.json<|end_filename|>
{"label": "SP", "n_articles": "33", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Psychophysiology", "top_journal_count": "7", "top_keywords": ["visual attention", "attention-deficit/hyperactivity disorder", "p300", "contingent negative variation", "theta activity"], "first_publication": "1993", "name": "selection positivity", "top_cog_assocs": ["attention", "vision", "spatial"], "top_dis_assocs": ["ADHD", "alcoholism", "schizophrenia"]}
<|start_filename|>docs/_data/P3b.json<|end_filename|>
{"label": "P3b", "n_articles": "1061", "top_author_name": "<NAME>", "top_author_count": "34", "top_journal_name": "Psychophysiology", "top_journal_count": "83", "top_keywords": ["p300", "attention", "p3a", "n2", "p3"], "first_publication": "1978", "name": "P3b", "top_cog_assocs": ["attention", "auditory", "vision"], "top_dis_assocs": ["schizophrenia", "depression", "ADHD"]}
<|start_filename|>docs/_data/CRN.json<|end_filename|>
{"label": "CRN", "n_articles": "96", "top_author_name": "<NAME>", "top_author_count": "16", "top_journal_name": "Psychophysiology", "top_journal_count": "12", "top_keywords": ["error-related negativity", "performance monitoring", "ern", "error positivity", "error-related negativity (ern)"], "first_publication": "2002", "name": "correct related negativity", "top_cog_assocs": ["error", "conflict", "cognitive control"], "top_dis_assocs": ["anxiety", "OCD", "schizophrenia"]}
<|start_filename|>docs/_data/N350.json<|end_filename|>
{"label": "N350", "n_articles": "82", "top_author_name": "<NAME>", "top_author_count": "11", "top_journal_name": "International journal of psychophysiology : official journal of the International Organization of Psychophysiology", "top_journal_count": "10", "top_keywords": ["sleep", "brain-computer interface", "categorization of images", "component p300", "oddball paradigm"], "first_publication": "1990", "name": "N350", "top_cog_assocs": ["vision", "auditory", "semantic"], "top_dis_assocs": ["TBI", "depression", "alcoholism"]}
<|start_filename|>docs/_data/LRP.json<|end_filename|>
{"label": "LRP", "n_articles": "305", "top_author_name": "<NAME>", "top_author_count": "22", "top_journal_name": "Psychophysiology", "top_journal_count": "52", "top_keywords": ["attention", "n2", "lateralized readiness potential (lrp)", "inhibitory control", "aging"], "first_publication": "1988", "name": "lateralized readiness potential", "top_cog_assocs": ["motor", "anticipation", "attention"], "top_dis_assocs": ["parkinson", "schizophrenia", "ADHD"]}
<|start_filename|>docs/_data/P190.json<|end_filename|>
{"label": "P190", "n_articles": "26", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "British journal of haematology", "top_journal_count": "2", "top_keywords": ["n/p190", "n400", "masked conceptual priming", "masked repetition priming", "visual object processing"], "first_publication": "1981", "name": "P190", "top_cog_assocs": ["somatosensory", "semantic", "vision"], "top_dis_assocs": ["dyslexia", "bipolar", "intellectual disability"]}
<|start_filename|>docs/_data/N250.json<|end_filename|>
{"label": "N250", "n_articles": "269", "top_author_name": "<NAME>", "top_author_count": "23", "top_journal_name": "Neuropsychologia", "top_journal_count": "27", "top_keywords": ["n170", "face recognition", "n400", "face processing", "attention"], "first_publication": "1983", "name": "N250", "top_cog_assocs": ["recognition", "face", "vision"], "top_dis_assocs": ["schizophrenia", "dyslexia", "autism"]}
<|start_filename|>docs/_data/N230.json<|end_filename|>
{"label": "N230", "n_articles": "11", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "The International journal of neuroscience", "top_journal_count": "2", "top_keywords": ["alzheimer\u2019s disease", "erp n170", "erp n230", "erp p100", "erp vpp"], "first_publication": "1975", "name": "N230", "top_cog_assocs": ["vision", "attention", "emotion"], "top_dis_assocs": ["alzheimer", "autism", "TBI"]}
<|start_filename|>docs/_data/P400.json<|end_filename|>
{"label": "P400", "n_articles": "167", "top_author_name": "<NAME>", "top_author_count": "12", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "9", "top_keywords": ["infancy", "face processing", "autism", "infant", "faces"], "first_publication": "1977", "name": "P400", "top_cog_assocs": ["face", "vision", "attention"], "top_dis_assocs": ["autism", "epilepsy", "anxiety"]}
<|start_filename|>docs/_data/LPC.json<|end_filename|>
{"label": "LPC", "n_articles": "649", "top_author_name": "<NAME>", "top_author_count": "29", "top_journal_name": "Psychophysiology", "top_journal_count": "50", "top_keywords": ["n400", "emotion", "p300", "aging", "attention"], "first_publication": "1969", "name": "late positive component", "top_cog_assocs": ["memory", "vision", "semantic"], "top_dis_assocs": ["schizophrenia", "depression", "alzheimer"]}
<|start_filename|>docs/_data/LDAP.json<|end_filename|>
{"label": "LDAP", "n_articles": "15", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "Journal of cognitive neuroscience", "top_journal_count": "3", "top_keywords": ["attention", "spatial neglect", "beta-band", "cnv", "erl"], "first_publication": "2004", "name": "late directing attention positivity", "top_cog_assocs": ["attention", "vision", "spatial"], "top_dis_assocs": ["TBI", "stroke", "addiction"]}
<|start_filename|>docs/_data/N2b.json<|end_filename|>
{"label": "N2b", "n_articles": "236", "top_author_name": "<NAME>", "top_author_count": "13", "top_journal_name": "Psychophysiology", "top_journal_count": "21", "top_keywords": ["p3", "attention", "principal components analysis (pca)", "n2", "novelty"], "first_publication": "1983", "name": "N2b", "top_cog_assocs": ["attention", "auditory", "vision"], "top_dis_assocs": ["schizophrenia", "depression", "psychosis"]}
<|start_filename|>docs/_data/P500.json<|end_filename|>
{"label": "P500", "n_articles": "51", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "3", "top_keywords": ["asia", "developing countries", "philippines", "southeastern asia", "age"], "first_publication": "1980", "name": "P500", "top_cog_assocs": ["vision", "attention", "reading"], "top_dis_assocs": ["depression", "ADHD", "schizophrenia"]}
<|start_filename|>docs/_data/Pe.json<|end_filename|>
{"label": "Pe", "n_articles": "307", "top_author_name": "<NAME>", "top_author_count": "19", "top_journal_name": "Biological psychology", "top_journal_count": "30", "top_keywords": ["error-related negativity", "performance monitoring", "error processing", "cognitive control", "ern"], "first_publication": "1998", "name": "error related positivity", "top_cog_assocs": ["error", "conscious", "attention"], "top_dis_assocs": ["depression", "ADHD", "schizophrenia"]}
<|start_filename|>docs/_data/MMN.json<|end_filename|>
{"label": "MMN", "n_articles": "3685", "top_author_name": "N\u00e4\u00e4t\u0<NAME>", "top_author_count": "258", "top_journal_name": "Neuroreport", "top_journal_count": "180", "top_keywords": ["mismatch negativity (mmn)", "schizophrenia", "predictive coding", "multifocal motor neuropathy", "attention"], "first_publication": "1970", "name": "MMN", "top_cog_assocs": ["auditory", "attention", "memory"], "top_dis_assocs": ["schizophrenia", "psychosis", "dyslexia"]}
<|start_filename|>docs/_data/N290.json<|end_filename|>
{"label": "N290", "n_articles": "51", "top_author_name": "<NAME>", "top_author_count": "10", "top_journal_name": "Developmental cognitive neuroscience", "top_journal_count": "7", "top_keywords": ["infants", "face processing", "infancy", "faces", "emotion"], "first_publication": "1991", "name": "N290", "top_cog_assocs": ["face", "attention", "social"], "top_dis_assocs": ["autism", "epilepsy", "anxiety"]}
<|start_filename|>docs/_data/P340.json<|end_filename|>
{"label": "P340", "n_articles": "10", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "PloS one", "top_journal_count": "2", "top_keywords": ["auditory processing", "late discriminative negativity (ldn)", "mismatch negativity (mmn)", "n250", "t-complex"], "first_publication": "1989", "name": "P340", "top_cog_assocs": ["somatosensory", "pain", "perception"], "top_dis_assocs": ["stroke", "language impairment", "autism"]}
<|start_filename|>docs/_data/P240.json<|end_filename|>
{"label": "P240", "n_articles": "27", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "Clinical neurophysiology : official journal of the International Federation of Clinical Neurophysiology", "top_journal_count": "2", "top_keywords": ["somatosensory cortex", "touch", "chinese three-character verb\u2013object metaphor", "contextual effect", "n400"], "first_publication": "1984", "name": "P240", "top_cog_assocs": ["somatosensory", "reading", "pain"], "top_dis_assocs": ["depression", "dyslexia", "TBI"]}
<|start_filename|>docs/_data/N2a.json<|end_filename|>
{"label": "N2a", "n_articles": "95", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Neuroscience letters", "top_journal_count": "4", "top_keywords": ["p3", "autophagy", "horse", "apoptosis", "axon degeneration"], "first_publication": "1976", "name": "N2a", "top_cog_assocs": ["auditory", "attention", "memory"], "top_dis_assocs": ["alzheimer", "depression", "epilepsy"]}
<|start_filename|>docs/_data/SN.json<|end_filename|>
{"label": "SN", "n_articles": "76", "top_author_name": "<NAME>", "top_author_count": "8", "top_journal_name": "Neuropsychologia", "top_journal_count": "6", "top_keywords": ["attention", "selective attention", "category learning", "swloreta", "selection negativity (sn)"], "first_publication": "1993", "name": "selection negativity", "top_cog_assocs": ["attention", "vision", "spatial"], "top_dis_assocs": ["schizophrenia", "ADHD", "alcoholism"]}
<|start_filename|>docs/_data/ELAN.json<|end_filename|>
{"label": "ELAN", "n_articles": "33", "top_author_name": "<NAME>", "top_author_count": "18", "top_journal_name": "Journal of cognitive neuroscience", "top_journal_count": "5", "top_keywords": ["language", "auditory system", "disorders of consciousness", "grammatical tone", "morphosyntax"], "first_publication": "1996", "name": "early left anterior negativity", "top_cog_assocs": ["syntax", "language", "comprehension"], "top_dis_assocs": ["language impairment", "schizophrenia", "TBI"]}
<|start_filename|>docs/_data/N160.json<|end_filename|>
{"label": "N160", "n_articles": "55", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Clinical neurophysiology : official journal of the International Federation of Clinical Neurophysiology", "top_journal_count": "6", "top_keywords": ["dorsal attention network", "effective connectivity", "executive control network", "neuroplasticity", "visual working memory training"], "first_publication": "1984", "name": "N160", "top_cog_assocs": ["vision", "motor", "memory"], "top_dis_assocs": ["schizophrenia", "parkinson", "autism"]}
<|start_filename|>docs/_data/SPeN.json<|end_filename|>
{"label": "SPeN", "n_articles": "128", "top_author_name": "<NAME>", "top_author_count": "13", "top_journal_name": "Psychophysiology", "top_journal_count": "28", "top_keywords": ["stimulus-preceding negativity", "feedback-related negativity", "reward", "anticipation", "spn"], "first_publication": "1988", "name": "stimulus preceding negativity", "top_cog_assocs": ["anticipation", "reward", "attention"], "top_dis_assocs": ["anxiety", "depression", "schizophrenia"]}
<|start_filename|>docs/_data/EFN.json<|end_filename|>
{"label": "EFN", "n_articles": "10", "top_author_name": "<NAME>", "top_author_count": "1", "top_journal_name": "Psychophysiology", "top_journal_count": "2", "top_keywords": ["cognition", "learning", "adolescence", "attention", "distraction"], "first_publication": "2003", "name": "early frontal negativity", "top_cog_assocs": ["auditory", "vision", "attention"], "top_dis_assocs": ["TBI", "stroke", "addiction"]}
<|start_filename|>docs/_data/FN400.json<|end_filename|>
{"label": "FN400", "n_articles": "170", "top_author_name": "<NAME>", "top_author_count": "16", "top_journal_name": "Brain research", "top_journal_count": "17", "top_keywords": ["familiarity", "recollection", "recognition memory", "episodic memory", "lpc"], "first_publication": "1999", "name": "FN400", "top_cog_assocs": ["recognition", "memory", "semantic"], "top_dis_assocs": ["schizophrenia", "MCI", "alzheimer"]}
<|start_filename|>docs/_data/N550.json<|end_filename|>
{"label": "N550", "n_articles": "49", "top_author_name": "<NAME>", "top_author_count": "20", "top_journal_name": "Sleep", "top_journal_count": "8", "top_keywords": ["k-complex", "consciousness", "down state", "gamma activity", "sensory processing"], "first_publication": "1992", "name": "N550", "top_cog_assocs": ["auditory", "motor", "arousal"], "top_dis_assocs": ["alcoholism", "stroke", "alzheimer"]}
<|start_filename|>docs/_data/LDN.json<|end_filename|>
{"label": "LDN", "n_articles": "37", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "International journal of psychophysiology : official journal of the International Organization of Psychophysiology", "top_journal_count": "4", "top_keywords": ["mismatch negativity", "children", "mmn", "p3a", "dyslexia"], "first_publication": "2001", "name": "late discriminative negativity", "top_cog_assocs": ["auditory", "speech", "language"], "top_dis_assocs": ["dyslexia", "ADHD", "autism"]}
<|start_filename|>docs/_data/ORN.json<|end_filename|>
{"label": "ORN", "n_articles": "37", "top_author_name": "<NAME>", "top_author_count": "16", "top_journal_name": "Journal of cognitive neuroscience", "top_journal_count": "4", "top_keywords": ["auditory scene analysis", "object-related negativity (orn)", "concurrent sound segregation", "binaural processing", "object-related negativity"], "first_publication": "2001", "name": "object related negativity", "top_cog_assocs": ["auditory", "perception", "attention"], "top_dis_assocs": ["autism", "schizophrenia", "TBI"]}
<|start_filename|>docs/_data/P260.json<|end_filename|>
{"label": "P260", "n_articles": "37", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "Psychophysiology", "top_journal_count": "3", "top_keywords": ["pain", "cpm", "dnic", "selective attention", "moral decision-making"], "first_publication": "1981", "name": "P260", "top_cog_assocs": ["pain", "somatosensory", "attention"], "top_dis_assocs": ["anxiety", "migraine", "TBI"]}
<|start_filename|>docs/_data/P900.json<|end_filename|>
{"label": "P900", "n_articles": "21", "top_author_name": "<NAME>", "top_author_count": "5", "top_journal_name": "Sleep", "top_journal_count": "3", "top_keywords": ["k-complex", "consciousness", "down state", "gamma activity", "sensory processing"], "first_publication": "1992", "name": "P900", "top_cog_assocs": ["motor", "attention", "auditory"], "top_dis_assocs": ["alcoholism", "stroke", "addiction"]}
<|start_filename|>docs/_data/N120.json<|end_filename|>
{"label": "N120", "n_articles": "51", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "5", "top_keywords": ["attention", "movement", "cortical reactivity", "transcranial direct current stimulation", "evoked potentials"], "first_publication": "1975", "name": "N120", "top_cog_assocs": ["vision", "attention", "somatosensory"], "top_dis_assocs": ["schizophrenia", "depression", "parkinson"]}
<|start_filename|>docs/_data/P3a.json<|end_filename|>
{"label": "P3a", "n_articles": "979", "top_author_name": "<NAME>", "top_author_count": "41", "top_journal_name": "Psychophysiology", "top_journal_count": "53", "top_keywords": ["attention", "p300", "mismatch negativity", "p3b", "schizophrenia"], "first_publication": "1975", "name": "P3a", "top_cog_assocs": ["attention", "auditory", "vision"], "top_dis_assocs": ["schizophrenia", "depression", "psychosis"]}
<|start_filename|>docs/_data/N270.json<|end_filename|>
{"label": "N270", "n_articles": "67", "top_author_name": "<NAME>", "top_author_count": "29", "top_journal_name": "Neuroscience letters", "top_journal_count": "9", "top_keywords": ["n400", "event-related potential (erp)", "change detection", "brand extension", "stereotype"], "first_publication": "1990", "name": "N270", "top_cog_assocs": ["vision", "conflict", "attention"], "top_dis_assocs": ["MCI", "dementia", "epilepsy"]}
<|start_filename|>docs/_data/N2pc.json<|end_filename|>
{"label": "N2pc", "n_articles": "507", "top_author_name": "<NAME>", "top_author_count": "69", "top_journal_name": "Psychophysiology", "top_journal_count": "84", "top_keywords": ["attention", "visual search", "attentional capture", "visual attention", "working memory"], "first_publication": "1994", "name": "N2pc", "top_cog_assocs": ["attention", "vision", "spatial"], "top_dis_assocs": ["anxiety", "MCI", "ADHD"]}
<|start_filename|>docs/_data/P270.json<|end_filename|>
{"label": "P270", "n_articles": "14", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "4", "top_keywords": ["directed transfer function/adaptive directed transfer function", "stereoscopic depth", "visual discomfort", "visual evoked potential", "face"], "first_publication": "1978", "name": "P270", "top_cog_assocs": ["vision", "somatosensory", "auditory"], "top_dis_assocs": ["schizophrenia", "alzheimer", "bipolar"]}
<|start_filename|>docs/_data/LPoP.json<|end_filename|>
{"label": "LPoP", "n_articles": "24", "top_author_name": "<NAME>", "top_author_count": "3", "top_journal_name": "Brain research. Cognitive brain research", "top_journal_count": "3", "top_keywords": ["n400", "eeg oscillations", "empathy", "information processing", "pain"], "first_publication": "2005", "name": "late posterior positivity", "top_cog_assocs": ["language", "reading", "comprehension"], "top_dis_assocs": ["coma", "TBI", "stroke"]}
<|start_filename|>docs/_data/P600.json<|end_filename|>
{"label": "P600", "n_articles": "716", "top_author_name": "<NAME>", "top_author_count": "42", "top_journal_name": "Brain research", "top_journal_count": "60", "top_keywords": ["n400", "syntax", "sentence processing", "semantics", "language"], "first_publication": "1976", "name": "P600", "top_cog_assocs": ["language", "syntax", "semantic"], "top_dis_assocs": ["schizophrenia", "language impairment", "epilepsy"]}
<|start_filename|>docs/_data/P120.json<|end_filename|>
{"label": "P120", "n_articles": "61", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "4", "top_keywords": ["phosphorus application", "photosynthetic characteristics", "population structure", "uniform seeding pattern", "winter wheat"], "first_publication": "1980", "name": "P120", "top_cog_assocs": ["vision", "emotion", "face"], "top_dis_assocs": ["epilepsy", "depression", "schizophrenia"]}
<|start_filename|>docs/_data/RN.json<|end_filename|>
{"label": "RN", "n_articles": "11", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "Psychophysiology", "top_journal_count": "2", "top_keywords": ["bistable perception", "necker cube", "perceptual multistability", "reversible figures", "sensation/perception"], "first_publication": "2007", "name": "reversal negativity", "top_cog_assocs": ["perception", "vision", "attention"], "top_dis_assocs": ["TBI", "stroke", "addiction"]}
<|start_filename|>docs/_data/SPN.json<|end_filename|>
{"label": "SPN", "n_articles": "19", "top_author_name": "<NAME>", "top_author_count": "14", "top_journal_name": "The European journal of neuroscience", "top_journal_count": "3", "top_keywords": ["symmetry", "holographic model", "perceptual goodness", "reflection", "luminance polarity"], "first_publication": "2003", "name": "sustained posterior negativity", "top_cog_assocs": ["vision", "perception", "attention"], "top_dis_assocs": ["TBI", "stroke", "addiction"]}
<|start_filename|>docs/_data/N135.json<|end_filename|>
{"label": "N135", "n_articles": "45", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "Prze<NAME>", "top_journal_count": "5", "top_keywords": ["visual evoked potential", "visual evoked potentials", "neural conductivity", "acute lymphoblastic leukemia", "children"], "first_publication": "1982", "name": "N135", "top_cog_assocs": ["vision", "auditory", "spatial"], "top_dis_assocs": ["multiple sclerosis", "migraine", "epilepsy"]}
<|start_filename|>docs/_data/LPP.json<|end_filename|>
{"label": "LPP", "n_articles": "953", "top_author_name": "<NAME>", "top_author_count": "67", "top_journal_name": "Biological psychology", "top_journal_count": "80", "top_keywords": ["emotion", "emotion regulation", "attention", "late positive potential (lpp)", "depression"], "first_publication": "1976", "name": "late positive potential", "top_cog_assocs": ["emotion", "attention", "valence"], "top_dis_assocs": ["anxiety", "depression", "schizophrenia"]}
<|start_filename|>docs/_data/P200.json<|end_filename|>
{"label": "P200", "n_articles": "834", "top_author_name": "<NAME>", "top_author_count": "22", "top_journal_name": "Clinical neurophysiology : official journal of the International Federation of Clinical Neurophysiology", "top_journal_count": "35", "top_keywords": ["n400", "p300", "n170", "n100", "attention"], "first_publication": "1974", "name": "P200", "top_cog_assocs": ["auditory", "attention", "vision"], "top_dis_assocs": ["schizophrenia", "depression", "alzheimer"]}
<|start_filename|>docs/_data/VPP.json<|end_filename|>
{"label": "VPP", "n_articles": "58", "top_author_name": "<NAME>", "top_author_count": "4", "top_journal_name": "PloS one", "top_journal_count": "6", "top_keywords": ["n170", "face processing", "face", "encoding", "working memory"], "first_publication": "1999", "name": "vertex positive potential", "top_cog_assocs": ["face", "vision", "emotion"], "top_dis_assocs": ["depression", "PTSD", "parkinson"]}
<|start_filename|>docs/_layouts/erp.html<|end_filename|>
{% include header.html %}
<!-- <h1>{{ page.title }} - {{ page.label }} {{ site.data[page.title].name }} </h1> -->
<h1> {{ site.data[page.title].name }} </h1>
<h2> Words Collection Summary </h2>
<img src={{ site.baseurl }}/assets/ERPs/{{ page.title }}/wc.svg alt="wordcloud" align="middle">
<h3> The most common keywords for this ERP are: </h3>
<ul>
{% for kw in site.data[page.title].top_keywords %}
<li> {{ kw }} </li>
{% endfor %}
</ul>
<h3> Articles Summary </h3>
<p>
Number of Papers: {{ site.data[page.title].n_articles }} <br/>
First Paper: {{ site.data[page.title].first_publication }} <br/>
Most Common Journal: {{ site.data[page.title].top_journal_name }} ({{ site.data[page.title].top_journal_count }} papers) <br/>
Most Common Author: {{ site.data[page.title].top_author_name }} ({{ site.data[page.title].top_author_count }} papers)
</p>
<h3> Publication History </h3>
<img src={{ site.baseurl }}/assets/ERPs/{{ page.title }}/hist.svg alt="publication_years">
<h2> Co-occurence Collection Summary </h2>
<p> The following information is collected from the co-occurence analysis. </p>
<h3> The highest cognitive associations are: </h3>
<ul>
{% for assoc in site.data[page.title].top_cog_assocs %}
<li> {{ assoc }} </li>
{% endfor %}
</ul>
<h3> The highest disorder-related associations are: </h3>
<ul>
{% for assoc in site.data[page.title].top_dis_assocs %}
<li> {{ assoc }} </li>
{% endfor %}
</ul>
</p>
{% include sidebar.html %}
<|start_filename|>docs/_data/N2c.json<|end_filename|>
{"label": "N2c", "n_articles": "25", "top_author_name": "<NAME>", "top_author_count": "6", "top_journal_name": "Psychophysiology", "top_journal_count": "6", "top_keywords": ["principal components analysis (pca)", "cognitive control", "equiprobable go/no-go task", "decision making", "sequential processing"], "first_publication": "1996", "name": "N2c", "top_cog_assocs": ["attention", "vision", "motor"], "top_dis_assocs": ["schizophrenia", "TBI", "dyslexia"]}
<|start_filename|>docs/_data/CNV.json<|end_filename|>
{"label": "CNV", "n_articles": "1011", "top_author_name": "<NAME>", "top_author_count": "35", "top_journal_name": "Electroencephalography and clinical neurophysiology", "top_journal_count": "65", "top_keywords": ["attention", "contingent negative variation (cnv)", "reward", "p3", "adhd"], "first_publication": "1964", "name": "contingent negative variation", "top_cog_assocs": ["motor", "anticipation", "attention"], "top_dis_assocs": ["migraine", "ADHD", "depression"]}
<|start_filename|>docs/_data/N600.json<|end_filename|>
{"label": "N600", "n_articles": "13", "top_author_name": "<NAME>", "top_author_count": "2", "top_journal_name": "International journal of psychophysiology : official journal of the International Organization of Psychophysiology", "top_journal_count": "2", "top_keywords": ["emotional valence", "global-local processing", "insight problem solving", "africa", "nigeria"], "first_publication": "1985", "name": "N600", "top_cog_assocs": ["emotion", "valence", "auditory"], "top_dis_assocs": ["parkinson", "autism", "depression"]} | TomDonoghue/ERP_SCANR |
<|start_filename|>static/js/documentcloud-visualsearch-605acb3/build/visualsearch.js<|end_filename|>
// This is the annotated source code for
// [VisualSearch.js](http://documentcloud.github.com/visualsearch/),
// a rich search box for real data.
//
// The annotated source HTML is generated by
// [Docco](http://jashkenas.github.com/docco/).
/** @license VisualSearch.js 0.2.1
* (c) 2011 <NAME>, @samuelclay, DocumentCloud Inc.
* VisualSearch.js may be freely distributed under the MIT license.
* For all details and documentation:
* http://documentcloud.github.com/visualsearch
*/
(function() {
var $ = jQuery; // Handle namespaced jQuery
// Setting up VisualSearch globals. These will eventually be made instance-based.
if (!window.VS) window.VS = {};
if (!VS.app) VS.app = {};
if (!VS.ui) VS.ui = {};
if (!VS.model) VS.model = {};
if (!VS.utils) VS.utils = {};
// Sets the version for VisualSearch to be used programatically elsewhere.
VS.VERSION = '0.2.1';
VS.VisualSearch = function(options) {
var defaults = {
container : '',
query : '',
unquotable : [],
callbacks : {
search : $.noop,
focus : $.noop,
blur : $.noop,
facetMatches : $.noop,
valueMatches : $.noop
}
};
this.options = _.extend({}, defaults, options);
this.options.callbacks = _.extend({}, defaults.callbacks, options.callbacks);
VS.app.hotkeys.initialize();
this.searchQuery = new VS.model.SearchQuery();
this.searchBox = new VS.ui.SearchBox({app: this});
if (options.container) {
var searchBox = this.searchBox.render().el;
$(this.options.container).html(searchBox);
}
this.searchBox.value(this.options.query || '');
// Disable page caching for browsers that incorrectly cache the visual search inputs.
// This is forced the browser to re-render the page when it is retrieved in its history.
$(window).bind('unload', function(e) {});
// Gives the user back a reference to the `searchBox` so they
// can use public methods.
return this;
};
// Entry-point used to tie all parts of VisualSearch together. It will either attach
// itself to `options.container`, or pass back the `searchBox` so it can be rendered
// at will.
VS.init = function(options) {
return new VS.VisualSearch(options);
};
})();
(function() {
var $ = jQuery; // Handle namespaced jQuery
// The search box is responsible for managing the many facet views and input views.
VS.ui.SearchBox = Backbone.View.extend({
id : 'search',
events : {
'click .VS-cancel-search-box' : 'clearSearch',
'mousedown .VS-search-box' : 'maybeFocusSearch',
'dblclick .VS-search-box' : 'highlightSearch',
'click .VS-search-box' : 'maybeTripleClick'
},
// Creating a new SearchBox registers handlers for re-rendering facets when necessary,
// as well as handling typing when a facet is selected.
initialize : function() {
this.app = this.options.app;
this.flags = {
allSelected : false
};
this.facetViews = [];
this.inputViews = [];
_.bindAll(this, 'renderFacets', '_maybeDisableFacets', 'disableFacets',
'deselectAllFacets');
this.app.searchQuery.bind('reset', this.renderFacets);
$(document).bind('keydown', this._maybeDisableFacets);
},
// Renders the search box, but requires placement on the page through `this.el`.
render : function() {
$(this.el).append(JST['search_box']({}));
$(document.body).setMode('no', 'search');
return this;
},
// # Querying Facets #
// Either gets a serialized query string or sets the faceted query from a query string.
value : function(query) {
if (query == null) return this.serialize();
return this.setQuery(query);
},
// Uses the VS.app.searchQuery collection to serialize the current query from the various
// facets that are in the search box.
serialize : function() {
var query = [];
var inputViewsCount = this.inputViews.length;
this.app.searchQuery.each(_.bind(function(facet, i) {
query.push(this.inputViews[i].value());
query.push(facet.serialize());
}, this));
if (inputViewsCount) {
query.push(this.inputViews[inputViewsCount-1].value());
}
return _.compact(query).join(' ');
},
// Takes a query string and uses the SearchParser to parse and render it. Note that
// `VS.app.SearchParser` refreshes the `VS.app.searchQuery` collection, which is bound
// here to call `this.renderFacets`.
setQuery : function(query) {
this.currentQuery = query;
VS.app.SearchParser.parse(this.app, query);
},
// Returns the position of a facet/input view. Useful when moving between facets.
viewPosition : function(view) {
var views = view.type == 'facet' ? this.facetViews : this.inputViews;
var position = _.indexOf(views, view);
if (position == -1) position = 0;
return position;
},
// Used to launch a search. Hitting enter or clicking the search button.
searchEvent : function(e) {
var query = this.value();
this.focusSearch(e);
this.value(query);
this.app.options.callbacks.search(query, this.app.searchQuery);
},
// # Rendering Facets #
// Add a new facet. Facet will be focused and ready to accept a value. Can also
// specify position, in the case of adding facets from an inbetween input.
addFacet : function(category, initialQuery, position) {
category = VS.utils.inflector.trim(category);
initialQuery = VS.utils.inflector.trim(initialQuery || '');
if (!category) return;
var model = new VS.model.SearchFacet({
category : category,
value : initialQuery || '',
app : this.app
});
this.app.searchQuery.add(model, {at: position});
this.renderFacets();
var facetView = _.detect(this.facetViews, function(view) {
if (view.model == model) return true;
});
_.defer(function() {
facetView.enableEdit();
});
},
// Renders each facet as a searchFacet view.
renderFacets : function() {
this.facetViews = [];
this.inputViews = [];
this.$('.VS-search-inner').empty();
this.app.searchQuery.each(_.bind(function(facet, i) {
this.renderFacet(facet, i);
}, this));
// Add on an n+1 empty search input on the very end.
this.renderSearchInput();
},
// Render a single facet, using its category and query value.
renderFacet : function(facet, position) {
var view = new VS.ui.SearchFacet({
app : this.app,
model : facet,
order : position
});
// Input first, facet second.
this.renderSearchInput();
this.facetViews.push(view);
this.$('.VS-search-inner').children().eq(position*2).after(view.render().el);
view.calculateSize();
_.defer(_.bind(view.calculateSize, view));
return view;
},
// Render a single input, used to create and autocomplete facets
renderSearchInput : function() {
var input = new VS.ui.SearchInput({position: this.inputViews.length, app: this.app});
this.$('.VS-search-inner').append(input.render().el);
this.inputViews.push(input);
},
// # Modifying Facets #
// Clears out the search box. Command+A + delete can trigger this, as can a cancel button.
//
// If a `clearSearch` callback was provided, the callback is invoked and
// provided with a function performs the actual removal of the data. This
// allows third-party developers to either clear data asynchronously, or
// prior to performing their custom "clear" logic.
clearSearch : function(e) {
var actualClearSearch = _.bind(function() {
this.disableFacets();
this.value('');
this.flags.allSelected = false;
this.searchEvent(e);
this.focusSearch(e);
}, this);
if (this.app.options.callbacks.clearSearch) {
this.app.options.callbacks.clearSearch(actualClearSearch);
} else {
actualClearSearch();
}
},
// Command+A selects all facets.
selectAllFacets : function() {
this.flags.allSelected = true;
$(document).one('click.selectAllFacets', this.deselectAllFacets);
_.each(this.facetViews, function(facetView, i) {
facetView.selectFacet();
});
_.each(this.inputViews, function(inputView, i) {
inputView.selectText();
});
},
// Used by facets and input to see if all facets are currently selected.
allSelected : function(deselect) {
if (deselect) this.flags.allSelected = false;
return this.flags.allSelected;
},
// After `selectAllFacets` is engaged, this method is bound to the entire document.
// This immediate disables and deselects all facets, but it also checks if the user
// has clicked on either a facet or an input, and properly selects the view.
deselectAllFacets : function(e) {
this.disableFacets();
if (this.$(e.target).is('.category,input')) {
var el = $(e.target).closest('.search_facet,.search_input');
var view = _.detect(this.facetViews.concat(this.inputViews), function(v) {
return v.el == el[0];
});
if (view.type == 'facet') {
view.selectFacet();
} else if (view.type == 'input') {
_.defer(function() {
view.enableEdit(true);
});
}
}
},
// Disables all facets except for the passed in view. Used when switching between
// facets, so as not to have to keep state of active facets.
disableFacets : function(keepView) {
_.each(this.inputViews, function(view) {
if (view && view != keepView &&
(view.modes.editing == 'is' || view.modes.selected == 'is')) {
view.disableEdit();
}
});
_.each(this.facetViews, function(view) {
if (view && view != keepView &&
(view.modes.editing == 'is' || view.modes.selected == 'is')) {
view.disableEdit();
view.deselectFacet();
}
});
this.flags.allSelected = false;
this.removeFocus();
$(document).unbind('click.selectAllFacets');
},
// Resize all inputs to account for extra keystrokes which may be changing the facet
// width incorrectly. This is a safety check to ensure inputs are correctly sized.
resizeFacets : function(view) {
_.each(this.facetViews, function(facetView, i) {
if (!view || facetView == view) {
facetView.resize();
}
});
},
// Handles keydown events on the document. Used to complete the Cmd+A deletion, and
// blurring focus.
_maybeDisableFacets : function(e) {
if (this.flags.allSelected && VS.app.hotkeys.key(e) == 'backspace') {
e.preventDefault();
this.clearSearch(e);
return false;
} else if (this.flags.allSelected && VS.app.hotkeys.printable(e)) {
this.clearSearch(e);
}
},
// # Focusing Facets #
// Move focus between facets and inputs. Takes a direction as well as many options
// for skipping over inputs and only to facets, placement of cursor position in facet
// (i.e. at the end), and selecting the text in the input/facet.
focusNextFacet : function(currentView, direction, options) {
options = options || {};
var viewCount = this.facetViews.length;
var viewPosition = options.viewPosition || this.viewPosition(currentView);
if (!options.skipToFacet) {
// Correct for bouncing between matching text and facet arrays.
if (currentView.type == 'text' && direction > 0) direction -= 1;
if (currentView.type == 'facet' && direction < 0) direction += 1;
} else if (options.skipToFacet && currentView.type == 'text' &&
viewCount == viewPosition && direction >= 0) {
// Special case of looping around to a facet from the last search input box.
viewPosition = 0;
direction = 0;
}
var view, next = Math.min(viewCount, viewPosition + direction);
if (currentView.type == 'text') {
if (next >= 0 && next < viewCount) {
view = this.facetViews[next];
} else if (next == viewCount) {
view = this.inputViews[this.inputViews.length-1];
}
if (view && options.selectFacet && view.type == 'facet') {
view.selectFacet();
} else if (view) {
view.enableEdit();
view.setCursorAtEnd(direction || options.startAtEnd);
}
} else if (currentView.type == 'facet') {
if (options.skipToFacet) {
if (next >= viewCount || next < 0) {
view = _.last(this.inputViews);
view.enableEdit();
} else {
view = this.facetViews[next];
view.enableEdit();
view.setCursorAtEnd(direction || options.startAtEnd);
}
} else {
view = this.inputViews[next];
view.enableEdit();
}
}
if (options.selectText) view.selectText();
this.resizeFacets();
},
maybeFocusSearch : function(e) {
if ($(e.target).is('.VS-search-box') ||
$(e.target).is('.VS-search-inner') ||
e.type == 'keydown') {
this.focusSearch(e);
}
},
// Bring focus to last input field.
focusSearch : function(e, selectText) {
var view = this.inputViews[this.inputViews.length-1];
view.enableEdit(selectText);
if (!selectText) view.setCursorAtEnd(-1);
if (e.type == 'keydown') {
view.keydown(e);
view.box.trigger('keydown');
}
_.defer(_.bind(function() {
if (!this.$('input:focus').length) {
view.enableEdit(selectText);
}
}, this));
},
// Double-clicking on the search wrapper should select the existing text in
// the last search input. Also start the triple-click timer.
highlightSearch : function(e) {
if ($(e.target).is('.VS-search-box') ||
$(e.target).is('.VS-search-inner') ||
e.type == 'keydown') {
var lastinput = this.inputViews[this.inputViews.length-1];
lastinput.startTripleClickTimer();
this.focusSearch(e, true);
}
},
maybeTripleClick : function(e) {
var lastinput = this.inputViews[this.inputViews.length-1];
return lastinput.maybeTripleClick(e);
},
// Used to show the user is focused on some input inside the search box.
addFocus : function() {
this.app.options.callbacks.focus();
this.$('.VS-search-box').addClass('VS-focus');
},
// User is no longer focused on anything in the search box.
removeFocus : function() {
this.app.options.callbacks.blur();
var focus = _.any(this.facetViews.concat(this.inputViews), function(view) {
return view.isFocused();
});
if (!focus) this.$('.VS-search-box').removeClass('VS-focus');
},
// Show a menu which adds pre-defined facets to the search box. This is unused for now.
showFacetCategoryMenu : function(e) {
e.preventDefault();
e.stopPropagation();
if (this.facetCategoryMenu && this.facetCategoryMenu.modes.open == 'is') {
return this.facetCategoryMenu.close();
}
var items = [
{title: 'Account', onClick: _.bind(this.addFacet, this, 'account', '')},
{title: 'Project', onClick: _.bind(this.addFacet, this, 'project', '')},
{title: 'Filter', onClick: _.bind(this.addFacet, this, 'filter', '')},
{title: 'Access', onClick: _.bind(this.addFacet, this, 'access', '')}
];
var menu = this.facetCategoryMenu || (this.facetCategoryMenu = new dc.ui.Menu({
items : items,
standalone : true
}));
this.$('.VS-icon-search').after(menu.render().open().content);
return false;
}
});
})();
(function() {
var $ = jQuery; // Handle namespaced jQuery
// This is the visual search facet that holds the category and its autocompleted
// input field.
VS.ui.SearchFacet = Backbone.View.extend({
type : 'facet',
className : 'search_facet',
events : {
'click .category' : 'selectFacet',
'keydown input' : 'keydown',
'mousedown input' : 'enableEdit',
'mouseover .VS-icon-cancel' : 'showDelete',
'mouseout .VS-icon-cancel' : 'hideDelete',
'click .VS-icon-cancel' : 'remove'
},
initialize : function(options) {
this.flags = {
canClose : false
};
_.bindAll(this, 'set', 'keydown', 'deselectFacet', 'deferDisableEdit');
},
// Rendering the facet sets up autocompletion, events on blur, and populates
// the facet's input with its starting value.
render : function() {
$(this.el).html(JST['search_facet']({
model : this.model
}));
this.setMode('not', 'editing');
this.setMode('not', 'selected');
this.box = this.$('input');
this.box.val(this.model.get('value'));
this.box.bind('blur', this.deferDisableEdit);
// Handle paste events with `propertychange`
this.box.bind('input propertychange', this.keydown);
this.setupAutocomplete();
return this;
},
// This method is used to setup the facet's input to auto-grow.
// This is defered in the searchBox so it can be attached to the
// DOM to get the correct font-size.
calculateSize : function() {
this.box.autoGrowInput();
this.box.unbind('updated.autogrow');
this.box.bind('updated.autogrow', _.bind(this.moveAutocomplete, this));
},
// Forces a recalculation of this facet's input field's value. Called when
// the facet is focused, removed, or otherwise modified.
resize : function(e) {
this.box.trigger('resize.autogrow', e);
},
// Watches the facet's input field to see if it matches the beginnings of
// words in `autocompleteValues`, which is different for every category.
// If the value, when selected from the autocompletion menu, is different
// than what it was, commit the facet and search for it.
setupAutocomplete : function() {
this.box.autocomplete({
source : _.bind(this.autocompleteValues, this),
minLength : 0,
delay : 0,
autoFocus : true,
position : {offset : "0 5"},
select : _.bind(function(e, ui) {
e.preventDefault();
var originalValue = this.model.get('value');
this.set(ui.item.value);
if (originalValue != ui.item.value || this.box.val() != ui.item.value) {
this.search(e);
}
return false;
}, this),
open : _.bind(function(e, ui) {
var box = this.box;
this.box.autocomplete('widget').find('.ui-menu-item').each(function() {
var $value = $(this);
if ($value.data('item.autocomplete')['value'] == box.val()) {
box.data('autocomplete').menu.activate(new $.Event("mouseover"), $value);
}
});
}, this)
});
this.box.autocomplete('widget').addClass('VS-interface');
},
// As the facet's input field grows, it may move to the next line in the
// search box. `autoGrowInput` triggers an `updated` event on the input
// field, which is bound to this method to move the autocomplete menu.
moveAutocomplete : function() {
var autocomplete = this.box.data('autocomplete');
if (autocomplete) {
autocomplete.menu.element.position({
my : "left top",
at : "left bottom",
of : this.box.data('autocomplete').element,
collision : "flip",
offset : "0 5"
});
}
},
// When a user enters a facet and it is being edited, immediately show
// the autocomplete menu and size it to match the contents.
searchAutocomplete : function(e) {
var autocomplete = this.box.data('autocomplete');
if (autocomplete) {
var menu = autocomplete.menu.element;
autocomplete.search();
// Resize the menu based on the correctly measured width of what's bigger:
// the menu's original size or the menu items' new size.
menu.outerWidth(Math.max(
menu.width('').outerWidth(),
autocomplete.element.outerWidth()
));
}
},
// Closes the autocomplete menu. Called on disabling, selecting, deselecting,
// and anything else that takes focus out of the facet's input field.
closeAutocomplete : function() {
var autocomplete = this.box.data('autocomplete');
if (autocomplete) autocomplete.close();
},
// Search terms used in the autocomplete menu. These are specific to the facet,
// and only match for the facet's category. The values are then matched on the
// first letter of any word in matches, and finally sorted according to the
// value's own category. You can pass `preserveOrder` as an option in the
// `facetMatches` callback to skip any further ordering done client-side.
autocompleteValues : function(req, resp) {
var category = this.model.get('category');
var value = this.model.get('value');
var searchTerm = req.term;
this.options.app.options.callbacks.valueMatches(category, searchTerm, function(matches, options) {
options = options || {};
matches = matches || [];
if (searchTerm && value != searchTerm) {
var re = VS.utils.inflector.escapeRegExp(searchTerm || '');
var matcher = new RegExp('\\b' + re, 'i');
matches = $.grep(matches, function(item) {
return matcher.test(item) ||
matcher.test(item.value) ||
matcher.test(item.label);
});
}
if (options.preserveOrder) {
resp(matches);
} else {
resp(_.sortBy(matches, function(match) {
if (match == value || match.value == value) return '';
else return match;
}));
}
});
},
// Sets the facet's model's value.
set : function(value) {
if (!value) return;
this.model.set({'value': value});
},
// Before the searchBox performs a search, we need to close the
// autocomplete menu.
search : function(e, direction) {
if (!direction) direction = 1;
this.closeAutocomplete();
this.options.app.searchBox.searchEvent(e);
_.defer(_.bind(function() {
this.options.app.searchBox.focusNextFacet(this, direction, {viewPosition: this.options.order});
}, this));
},
// Begin editing the facet's input. This is called when the user enters
// the input either from another facet or directly clicking on it.
//
// This method tells all other facets and inputs to disable so it can have
// the sole focus. It also prepares the autocompletion menu.
enableEdit : function() {
if (this.modes.editing != 'is') {
this.setMode('is', 'editing');
this.deselectFacet();
if (this.box.val() == '') {
this.box.val(this.model.get('value'));
}
}
this.flags.canClose = false;
this.options.app.searchBox.disableFacets(this);
this.options.app.searchBox.addFocus();
_.defer(_.bind(function() {
this.options.app.searchBox.addFocus();
}, this));
this.resize();
this.searchAutocomplete();
this.box.focus();
},
// When the user blurs the input, they may either be going to another input
// or off the search box entirely. If they go to another input, this facet
// will be instantly disabled, and the canClose flag will be turned back off.
//
// However, if the user clicks elsewhere on the page, this method starts a timer
// that checks if any of the other inputs are selected or are being edited. If
// not, then it can finally close itself and its autocomplete menu.
deferDisableEdit : function() {
this.flags.canClose = true;
_.delay(_.bind(function() {
if (this.flags.canClose && !this.box.is(':focus') &&
this.modes.editing == 'is' && this.modes.selected != 'is') {
this.disableEdit();
}
}, this), 250);
},
// Called either by other facets receiving focus or by the timer in `deferDisableEdit`,
// this method will turn off the facet, remove any text selection, and close
// the autocomplete menu.
disableEdit : function() {
var newFacetQuery = VS.utils.inflector.trim(this.box.val());
if (newFacetQuery != this.model.get('value')) {
this.set(newFacetQuery);
}
this.flags.canClose = false;
this.box.selectRange(0, 0);
this.box.blur();
this.setMode('not', 'editing');
this.closeAutocomplete();
this.options.app.searchBox.removeFocus();
},
// Selects the facet, which blurs the facet's input and highlights the facet.
// If this is the only facet being selected (and not part of a select all event),
// we attach a mouse/keyboard watcher to check if the next action by the user
// should delete this facet or just deselect it.
selectFacet : function(e) {
if (e) e.preventDefault();
var allSelected = this.options.app.searchBox.allSelected();
if (this.modes.selected == 'is') return;
if (this.box.is(':focus')) {
this.box.setCursorPosition(0);
this.box.blur();
}
this.flags.canClose = false;
this.closeAutocomplete();
this.setMode('is', 'selected');
this.setMode('not', 'editing');
if (!allSelected || e) {
$(document).unbind('keydown.facet', this.keydown);
$(document).unbind('click.facet', this.deselectFacet);
_.defer(_.bind(function() {
$(document).unbind('keydown.facet').bind('keydown.facet', this.keydown);
$(document).unbind('click.facet').one('click.facet', this.deselectFacet);
}, this));
this.options.app.searchBox.disableFacets(this);
this.options.app.searchBox.addFocus();
}
return false;
},
// Turns off highlighting on the facet. Called in a variety of ways, this
// only deselects the facet if it is selected, and then cleans up the
// keyboard/mouse watchers that were created when the facet was first
// selected.
deselectFacet : function(e) {
if (e) e.preventDefault();
if (this.modes.selected == 'is') {
this.setMode('not', 'selected');
this.closeAutocomplete();
this.options.app.searchBox.removeFocus();
}
$(document).unbind('keydown.facet', this.keydown);
$(document).unbind('click.facet', this.deselectFacet);
return false;
},
// Is the user currently focused in this facet's input field?
isFocused : function() {
return this.box.is(':focus');
},
// Hovering over the delete button styles the facet so the user knows that
// the delete button will kill the entire facet.
showDelete : function() {
$(this.el).addClass('search_facet_maybe_delete');
},
// On `mouseout`, the user is no longer hovering on the delete button.
hideDelete : function() {
$(this.el).removeClass('search_facet_maybe_delete');
},
// When switching between facets, depending on the direction the cursor is
// coming from, the cursor in this facet's input field should match the original
// direction.
setCursorAtEnd : function(direction) {
if (direction == -1) {
this.box.setCursorPosition(this.box.val().length);
} else {
this.box.setCursorPosition(0);
}
},
// Deletes the facet and sends the cursor over to the nearest input field.
remove : function(e) {
var committed = this.model.get('value');
this.deselectFacet();
this.disableEdit();
this.options.app.searchQuery.remove(this.model);
if (committed) {
this.search(e, -1);
} else {
this.options.app.searchBox.renderFacets();
this.options.app.searchBox.focusNextFacet(this, -1, {viewPosition: this.options.order});
}
},
// Selects the text in the facet's input field. When the user tabs between
// facets, convention is to highlight the entire field.
selectText: function() {
this.box.selectRange(0, this.box.val().length);
},
// Handles all keyboard inputs when in the facet's input field. This checks
// for movement between facets and inputs, entering a new value that needs
// to be autocompleted, as well as the removal of this facet.
keydown : function(e) {
var key = VS.app.hotkeys.key(e);
if (key == 'enter' && this.box.val()) {
this.disableEdit();
this.search(e);
} else if (key == 'left') {
if (this.modes.selected == 'is') {
this.deselectFacet();
this.options.app.searchBox.focusNextFacet(this, -1, {startAtEnd: -1});
} else if (this.box.getCursorPosition() == 0 && !this.box.getSelection().length) {
this.selectFacet();
}
} else if (key == 'right') {
if (this.modes.selected == 'is') {
e.preventDefault();
this.deselectFacet();
this.setCursorAtEnd(0);
this.enableEdit();
} else if (this.box.getCursorPosition() == this.box.val().length) {
e.preventDefault();
this.disableEdit();
this.options.app.searchBox.focusNextFacet(this, 1);
}
} else if (VS.app.hotkeys.shift && key == 'tab') {
e.preventDefault();
this.options.app.searchBox.focusNextFacet(this, -1, {
startAtEnd : -1,
skipToFacet : true,
selectText : true
});
} else if (key == 'tab') {
e.preventDefault();
this.options.app.searchBox.focusNextFacet(this, 1, {
skipToFacet : true,
selectText : true
});
} else if (VS.app.hotkeys.command && (e.which == 97 || e.which == 65)) {
e.preventDefault();
this.options.app.searchBox.selectAllFacets();
return false;
} else if (VS.app.hotkeys.printable(e) && this.modes.selected == 'is') {
this.options.app.searchBox.focusNextFacet(this, -1, {startAtEnd: -1});
this.remove(e);
} else if (key == 'backspace') {
if (this.modes.selected == 'is') {
e.preventDefault();
this.remove(e);
} else if (this.box.getCursorPosition() == 0 &&
!this.box.getSelection().length) {
e.preventDefault();
this.selectFacet();
}
}
this.resize(e);
// Handle paste events
if (e.which == null) {
this.searchAutocomplete(e);
_.defer(_.bind(this.resize, this, e));
}
}
});
})();
(function() {
var $ = jQuery; // Handle namespaced jQuery
// This is the visual search input that is responsible for creating new facets.
// There is one input placed in between all facets.
VS.ui.SearchInput = Backbone.View.extend({
type : 'text',
className : 'search_input',
events : {
'keypress input' : 'keypress',
'keydown input' : 'keydown',
'click input' : 'maybeTripleClick',
'dblclick input' : 'startTripleClickTimer'
},
initialize : function() {
this.app = this.options.app;
this.flags = {
canClose : false
};
_.bindAll(this, 'removeFocus', 'addFocus', 'moveAutocomplete', 'deferDisableEdit');
},
// Rendering the input sets up autocomplete, events on focusing and blurring
// the input, and the auto-grow of the input.
render : function() {
$(this.el).html(JST['search_input']({}));
this.setMode('not', 'editing');
this.setMode('not', 'selected');
this.box = this.$('input');
this.box.autoGrowInput();
this.box.bind('updated.autogrow', this.moveAutocomplete);
this.box.bind('blur', this.deferDisableEdit);
this.box.bind('focus', this.addFocus);
this.setupAutocomplete();
return this;
},
// Watches the input and presents an autocompleted menu, taking the
// remainder of the input field and adding a separate facet for it.
//
// See `addTextFacetRemainder` for explanation on how the remainder works.
setupAutocomplete : function() {
this.box.autocomplete({
minLength : 1,
delay : 50,
autoFocus : true,
position : {offset : "0 -1"},
source : _.bind(this.autocompleteValues, this),
select : _.bind(function(e, ui) {
e.preventDefault();
e.stopPropagation();
var remainder = this.addTextFacetRemainder(ui.item.value);
var position = this.options.position + (remainder ? 1 : 0);
this.app.searchBox.addFacet(ui.item.value, '', position);
return false;
}, this)
});
// Renders the results grouped by the categories they belong to.
this.box.data('autocomplete')._renderMenu = function(ul, items) {
var category = '';
_.each(items, _.bind(function(item, i) {
if (item.category && item.category != category) {
ul.append('<li class="ui-autocomplete-category">'+item.category+'</li>');
category = item.category;
}
this._renderItem(ul, item);
}, this));
};
this.box.autocomplete('widget').addClass('VS-interface');
},
// Search terms used in the autocomplete menu. The values are matched on the
// first letter of any word in matches, and finally sorted according to the
// value's own category. You can pass `preserveOrder` as an option in the
// `facetMatches` callback to skip any further ordering done client-side.
autocompleteValues : function(req, resp) {
var searchTerm = req.term;
var lastWord = searchTerm.match(/\w+$/); // Autocomplete only last word.
var re = VS.utils.inflector.escapeRegExp(lastWord && lastWord[0] || ' ');
this.app.options.callbacks.facetMatches(function(prefixes, options) {
options = options || {};
prefixes = prefixes || [];
// Only match from the beginning of the word.
var matcher = new RegExp('^' + re, 'i');
var matches = $.grep(prefixes, function(item) {
return item && matcher.test(item.label || item);
});
if (options.preserveOrder) {
resp(matches);
} else {
resp(_.sortBy(matches, function(match) {
if (match.label) return match.category + '-' + match.label;
else return match;
}));
}
});
},
// Closes the autocomplete menu. Called on disabling, selecting, deselecting,
// and anything else that takes focus out of the facet's input field.
closeAutocomplete : function() {
var autocomplete = this.box.data('autocomplete');
if (autocomplete) autocomplete.close();
},
// As the input field grows, it may move to the next line in the
// search box. `autoGrowInput` triggers an `updated` event on the input
// field, which is bound to this method to move the autocomplete menu.
moveAutocomplete : function() {
var autocomplete = this.box.data('autocomplete');
if (autocomplete) {
autocomplete.menu.element.position({
my : "left top",
at : "left bottom",
of : this.box.data('autocomplete').element,
collision : "none",
offset : '0 -1'
});
}
},
// When a user enters a facet and it is being edited, immediately show
// the autocomplete menu and size it to match the contents.
searchAutocomplete : function(e) {
var autocomplete = this.box.data('autocomplete');
if (autocomplete) {
var menu = autocomplete.menu.element;
autocomplete.search();
// Resize the menu based on the correctly measured width of what's bigger:
// the menu's original size or the menu items' new size.
menu.outerWidth(Math.max(
menu.width('').outerWidth(),
autocomplete.element.outerWidth()
));
}
},
// If a user searches for "word word category", the category would be
// matched and autocompleted, and when selected, the "word word" would
// also be caught as the remainder and then added in its own facet.
addTextFacetRemainder : function(facetValue) {
var boxValue = this.box.val();
var lastWord = boxValue.match(/\b(\w+)$/);
var matcher = new RegExp(lastWord[0], "i");
if (lastWord && facetValue.search(matcher) == 0) {
boxValue = boxValue.replace(/\b(\w+)$/, '');
}
boxValue = boxValue.replace('^\s+|\s+$', '');
if (boxValue) {
this.app.searchBox.addFacet('text', boxValue, this.options.position);
}
return boxValue;
},
// Directly called to focus the input. This is different from `addFocus`
// because this is not called by a focus event. This instead calls a
// focus event causing the input to become focused.
enableEdit : function(selectText) {
this.addFocus();
if (selectText) {
this.selectText();
}
this.box.focus();
},
// Event called on user focus on the input. Tells all other input and facets
// to give up focus, and starts revving the autocomplete.
addFocus : function() {
this.flags.canClose = false;
if (!this.app.searchBox.allSelected()) {
this.app.searchBox.disableFacets(this);
}
this.app.searchBox.addFocus();
this.setMode('is', 'editing');
this.setMode('not', 'selected');
this.searchAutocomplete();
},
// Directly called to blur the input. This is different from `removeFocus`
// because this is not called by a blur event.
disableEdit : function() {
this.box.blur();
this.removeFocus();
},
// Event called when user blur's the input, either through the keyboard tabbing
// away or the mouse clicking off. Cleans up
removeFocus : function() {
this.flags.canClose = false;
this.app.searchBox.removeFocus();
this.setMode('not', 'editing');
this.setMode('not', 'selected');
this.closeAutocomplete();
},
// When the user blurs the input, they may either be going to another input
// or off the search box entirely. If they go to another input, this facet
// will be instantly disabled, and the canClose flag will be turned back off.
//
// However, if the user clicks elsewhere on the page, this method starts a timer
// that checks if any of the other inputs are selected or are being edited. If
// not, then it can finally close itself and its autocomplete menu.
deferDisableEdit : function() {
this.flags.canClose = true;
_.delay(_.bind(function() {
if (this.flags.canClose &&
!this.box.is(':focus') &&
this.modes.editing == 'is') {
this.disableEdit();
}
}, this), 250);
},
// Starts a timer that will cause a triple-click, which highlights all facets.
startTripleClickTimer : function() {
this.tripleClickTimer = setTimeout(_.bind(function() {
this.tripleClickTimer = null;
}, this), 500);
},
// Event on click that checks if a triple click is in play. The
// `tripleClickTimer` is counting down, ready to be engaged and intercept
// the click event to force a select all instead.
maybeTripleClick : function(e) {
if (!!this.tripleClickTimer) {
e.preventDefault();
this.app.searchBox.selectAllFacets();
return false;
}
},
// Is the user currently focused in the input field?
isFocused : function() {
return this.box.is(':focus');
},
// When serializing the facets, the inputs need to also have their values represented,
// in case they contain text that is not yet faceted (but will be once the search is
// completed).
value : function() {
return this.box.val();
},
// When switching between facets and inputs, depending on the direction the cursor
// is coming from, the cursor in this facet's input field should match the original
// direction.
setCursorAtEnd : function(direction) {
if (direction == -1) {
this.box.setCursorPosition(this.box.val().length);
} else {
this.box.setCursorPosition(0);
}
},
// Selects the entire range of text in the input. Useful when tabbing between inputs
// and facets.
selectText : function() {
this.box.selectRange(0, this.box.val().length);
if (!this.app.searchBox.allSelected()) {
this.box.focus();
} else {
this.setMode('is', 'selected');
}
},
// Before the searchBox performs a search, we need to close the
// autocomplete menu.
search : function(e, direction) {
if (!direction) direction = 0;
this.closeAutocomplete();
this.app.searchBox.searchEvent(e);
_.defer(_.bind(function() {
this.app.searchBox.focusNextFacet(this, direction);
}, this));
},
// Callback fired on key press in the search box. We search when they hit return.
keypress : function(e) {
var key = VS.app.hotkeys.key(e);
if (key == 'enter') {
return this.search(e, 100);
} else if (VS.app.hotkeys.colon(e)) {
this.box.trigger('resize.autogrow', e);
var query = this.box.val();
var prefixes = [];
if (this.app.options.callbacks.facetMatches) {
this.app.options.callbacks.facetMatches(function(p) {
prefixes = p;
});
}
var labels = _.map(prefixes, function(prefix) {
if (prefix.label) return prefix.label;
else return prefix;
});
if (_.contains(labels, query)) {
e.preventDefault();
var remainder = this.addTextFacetRemainder(query);
var position = this.options.position + (remainder?1:0);
this.app.searchBox.addFacet(query, '', position);
return false;
}
} else if (key == 'backspace') {
if (this.box.getCursorPosition() == 0 && !this.box.getSelection().length) {
e.preventDefault();
e.stopPropagation();
e.stopImmediatePropagation();
this.app.searchBox.resizeFacets();
return false;
}
}
},
// Handles all keyboard inputs when in the input field. This checks
// for movement between facets and inputs, entering a new value that needs
// to be autocompleted, as well as stepping between facets with backspace.
keydown : function(e) {
var key = VS.app.hotkeys.key(e);
if (key == 'left') {
if (this.box.getCursorPosition() == 0) {
e.preventDefault();
this.app.searchBox.focusNextFacet(this, -1, {startAtEnd: -1});
}
} else if (key == 'right') {
if (this.box.getCursorPosition() == this.box.val().length) {
e.preventDefault();
this.app.searchBox.focusNextFacet(this, 1, {selectFacet: true});
}
} else if (VS.app.hotkeys.shift && key == 'tab') {
e.preventDefault();
this.app.searchBox.focusNextFacet(this, -1, {selectText: true});
} else if (key == 'tab') {
e.preventDefault();
var value = this.box.val();
if (value.length) {
var remainder = this.addTextFacetRemainder(value);
var position = this.options.position + (remainder?1:0);
this.app.searchBox.addFacet(value, '', position);
} else {
this.app.searchBox.focusNextFacet(this, 0, {
skipToFacet: true,
selectText: true
});
}
} else if (VS.app.hotkeys.command &&
String.fromCharCode(e.which).toLowerCase() == 'a') {
e.preventDefault();
this.app.searchBox.selectAllFacets();
return false;
} else if (key == 'backspace' && !this.app.searchBox.allSelected()) {
if (this.box.getCursorPosition() == 0 && !this.box.getSelection().length) {
e.preventDefault();
this.app.searchBox.focusNextFacet(this, -1, {backspace: true});
return false;
}
}
this.box.trigger('resize.autogrow', e);
}
});
})();
(function(){
var $ = jQuery; // Handle namespaced jQuery
// Makes the view enter a mode. Modes have both a 'mode' and a 'group',
// and are mutually exclusive with any other modes in the same group.
// Setting will update the view's modes hash, as well as set an HTML class
// of *[mode]_[group]* on the view's element. Convenient way to swap styles
// and behavior.
Backbone.View.prototype.setMode = function(mode, group) {
this.modes || (this.modes = {});
if (this.modes[group] === mode) return;
$(this.el).setMode(mode, group);
this.modes[group] = mode;
};
})();
(function() {
var $ = jQuery; // Handle namespaced jQuery
// DocumentCloud workspace hotkeys. To tell if a key is currently being pressed,
// just ask `VS.app.hotkeys.[key]` on `keypress`, or ask `VS.app.hotkeys.key(e)`
// on `keydown`.
//
// For the most headache-free way to use this utility, check modifier keys,
// like shift and command, with `VS.app.hotkeys.shift`, and check every other
// key with `VS.app.hotkeys.key(e) == 'key_name'`.
VS.app.hotkeys = {
// Keys that will be mapped to the `hotkeys` namespace.
KEYS: {
'16': 'shift',
'17': 'command',
'91': 'command',
'93': 'command',
'224': 'command',
'13': 'enter',
'37': 'left',
'38': 'upArrow',
'39': 'right',
'40': 'downArrow',
'46': 'delete',
'8': 'backspace',
'9': 'tab',
'188': 'comma'
},
// Binds global keydown and keyup events to listen for keys that match `this.KEYS`.
initialize : function() {
_.bindAll(this, 'down', 'up', 'blur');
$(document).bind('keydown', this.down);
$(document).bind('keyup', this.up);
$(window).bind('blur', this.blur);
},
// On `keydown`, turn on all keys that match.
down : function(e) {
var key = this.KEYS[e.which];
if (key) this[key] = true;
},
// On `keyup`, turn off all keys that match.
up : function(e) {
var key = this.KEYS[e.which];
if (key) this[key] = false;
},
// If an input is blurred, all keys need to be turned off, since they are no longer
// able to modify the document.
blur : function(e) {
for (var key in this.KEYS) this[this.KEYS[key]] = false;
},
// Check a key from an event and return the common english name.
key : function(e) {
return this.KEYS[e.which];
},
// Colon is special, since the value is different between browsers.
colon : function(e) {
var charCode = e.which;
return charCode && String.fromCharCode(charCode) == ":";
},
// Check a key from an event and match it against any known characters.
// The `keyCode` is different depending on the event type: `keydown` vs. `keypress`.
//
// These were determined by looping through every `keyCode` and `charCode` that
// resulted from `keydown` and `keypress` events and counting what was printable.
printable : function(e) {
var code = e.which;
if (e.type == 'keydown') {
if (code == 32 || // space
(code >= 48 && code <= 90) || // 0-1a-z
(code >= 96 && code <= 111) || // 0-9+-/*.
(code >= 186 && code <= 192) || // ;=,-./^
(code >= 219 && code <= 222)) { // (\)'
return true;
}
} else {
// [space]!"#$%&'()*+,-.0-9:;<=>?@A-Z[\]^_`a-z{|} and unicode characters
if ((code >= 32 && code <= 126) ||
(code >= 160 && code <= 500) ||
(String.fromCharCode(code) == ":")) {
return true;
}
}
return false;
}
};
})();
(function() {
var $ = jQuery; // Handle namespaced jQuery
// Naive English transformations on words. Only used for a few transformations
// in VisualSearch.js.
VS.utils.inflector = {
// Delegate to the ECMA5 String.prototype.trim function, if available.
trim : function(s) {
return s.trim ? s.trim() : s.replace(/^\s+|\s+$/g, '');
},
// Escape strings that are going to be used in a regex. Escapes punctuation
// that would be incorrect in a regex.
escapeRegExp : function(s) {
return s.replace(/([.*+?^${}()|[\]\/\\])/g, '\\$1');
}
};
})();
(function() {
var $ = jQuery; // Handle namespaced jQuery
$.fn.extend({
// Makes the selector enter a mode. Modes have both a 'mode' and a 'group',
// and are mutually exclusive with any other modes in the same group.
// Setting will update the view's modes hash, as well as set an HTML class
// of *[mode]_[group]* on the view's element. Convenient way to swap styles
// and behavior.
setMode : function(state, group) {
group = group || 'mode';
var re = new RegExp("\\w+_" + group + "(\\s|$)", 'g');
var mode = (state === null) ? "" : state + "_" + group;
this.each(function() {
this.className = (this.className.replace(re, '')+' '+mode)
.replace(/\s\s/g, ' ');
});
return mode;
},
// When attached to an input element, this will cause the width of the input
// to match its contents. This calculates the width of the contents of the input
// by measuring a hidden shadow div that should match the styling of the input.
autoGrowInput: function() {
return this.each(function() {
var $input = $(this);
var $tester = $('<div />').css({
opacity : 0,
top : -9999,
left : -9999,
position : 'absolute',
whiteSpace : 'nowrap'
}).addClass('VS-input-width-tester').addClass('VS-interface');
// Watch for input value changes on all of these events. `resize`
// event is called explicitly when the input has been changed without
// a single keypress.
var events = 'keydown.autogrow keypress.autogrow ' +
'resize.autogrow change.autogrow';
$input.next('.VS-input-width-tester').remove();
$input.after($tester);
$input.unbind(events).bind(events, function(e, realEvent) {
if (realEvent) e = realEvent;
var value = $input.val();
// Watching for the backspace key is tricky because it may not
// actually be deleting the character, but instead the key gets
// redirected to move the cursor from facet to facet.
if (VS.app.hotkeys.key(e) == 'backspace') {
var position = $input.getCursorPosition();
if (position > 0) value = value.slice(0, position-1) +
value.slice(position, value.length);
} else if (VS.app.hotkeys.printable(e) &&
!VS.app.hotkeys.command) {
value += String.fromCharCode(e.which);
}
value = value.replace(/&/g, '&')
.replace(/\s/g,' ')
.replace(/</g, '<')
.replace(/>/g, '>');
$tester.html(value);
$input.width($tester.width() + 3);
$input.trigger('updated.autogrow');
});
// Sets the width of the input on initialization.
$input.trigger('resize.autogrow');
});
},
// Cross-browser method used for calculating where the cursor is in an
// input field.
getCursorPosition: function() {
var position = 0;
var input = this.get(0);
if (document.selection) { // IE
input.focus();
var sel = document.selection.createRange();
var selLen = document.selection.createRange().text.length;
sel.moveStart('character', -input.value.length);
position = sel.text.length - selLen;
} else if (input && $(input).is(':visible') &&
input.selectionStart != null) { // Firefox/Safari
position = input.selectionStart;
}
return position;
},
// A simple proxy for `selectRange` that sets the cursor position in an
// input field.
setCursorPosition: function(position) {
return this.each(function() {
return $(this).selectRange(position, position);
});
},
// Cross-browser way to select text in an input field.
selectRange: function(start, end) {
return this.each(function() {
if (this.setSelectionRange) { // FF/Webkit
this.focus();
this.setSelectionRange(start, end);
} else if (this.createTextRange) { // IE
var range = this.createTextRange();
range.collapse(true);
range.moveEnd('character', end);
range.moveStart('character', start);
if (end - start >= 0) range.select();
}
});
},
// Returns an object that contains the text selection range values for
// an input field.
getSelection: function() {
var input = this[0];
if (input.selectionStart != null) { // FF/Webkit
var start = input.selectionStart;
var end = input.selectionEnd;
return {
start : start,
end : end,
length : end-start,
text : input.value.substr(start, end-start)
};
} else if (document.selection) { // IE
var range = document.selection.createRange();
if (range) {
var textRange = input.createTextRange();
var copyRange = textRange.duplicate();
textRange.moveToBookmark(range.getBookmark());
copyRange.setEndPoint('EndToStart', textRange);
var start = copyRange.text.length;
var end = start + range.text.length;
return {
start : start,
end : end,
length : end-start,
text : range.text
};
}
}
return {start: 0, end: 0, length: 0};
}
});
// Debugging in Internet Explorer. This allows you to use
// `console.log(['message', var1, var2, ...])`. Just remove the `false` and
// add your console.logs. This will automatically stringify objects using
// `JSON.stringify', so you can read what's going out. Think of this as a
// *Diet Firebug Lite Zero with Lemon*.
if ($.browser.msie && false) {
window.console = {};
var _$ied;
window.console.log = function(msg) {
if (_.isArray(msg)) {
var message = msg[0];
var vars = _.map(msg.slice(1), function(arg) {
return JSON.stringify(arg);
}).join(' - ');
}
if(!_$ied){
_$ied = $('<div><ol></ol></div>').css({
'position': 'fixed',
'bottom': 10,
'left': 10,
'zIndex': 20000,
'width': $('body').width() - 80,
'border': '1px solid #000',
'padding': '10px',
'backgroundColor': '#fff',
'fontFamily': 'arial,helvetica,sans-serif',
'fontSize': '11px'
});
$('body').append(_$ied);
}
var $message = $('<li>'+message+' - '+vars+'</li>').css({
'borderBottom': '1px solid #999999'
});
_$ied.find('ol').append($message);
_.delay(function() {
$message.fadeOut(500);
}, 5000);
};
}
})();
(function() {
var $ = jQuery; // Handle namespaced jQuery
// Used to extract keywords and facets from the free text search.
VS.app.SearchParser = {
// Matches `category: "free text"`, with and without quotes.
ALL_FIELDS : /('.+?'|".+?"|[^'"\s]{2}\S*):\s*('.+?'|".+?"|[^'"\s]\S*)/g,
// Matches a single category without the text. Used to correctly extract facets.
CATEGORY : /('.+?'|".+?"|[^'"\s]{2}\S*):\s*/,
// Called to parse a query into a collection of `SearchFacet` models.
parse : function(instance, query) {
var searchFacets = this._extractAllFacets(instance, query);
instance.searchQuery.reset(searchFacets);
return searchFacets;
},
// Walks the query and extracts facets, categories, and free text.
_extractAllFacets : function(instance, query) {
var facets = [];
var originalQuery = query;
while (query) {
var category, value;
originalQuery = query;
var field = this._extractNextField(query);
if (!field) {
category = 'text';
value = this._extractSearchText(query);
query = VS.utils.inflector.trim(query.replace(value, ''));
} else if (field.indexOf(':') != -1) {
category = field.match(this.CATEGORY)[1].replace(/(^['"]|['"]$)/g, '');
value = field.replace(this.CATEGORY, '').replace(/(^['"]|['"]$)/g, '');
query = VS.utils.inflector.trim(query.replace(field, ''));
} else if (field.indexOf(':') == -1) {
category = 'text';
value = field;
query = VS.utils.inflector.trim(query.replace(value, ''));
}
if (category && value) {
var searchFacet = new VS.model.SearchFacet({
category : category,
value : VS.utils.inflector.trim(value),
app : instance
});
facets.push(searchFacet);
}
if (originalQuery == query) break;
}
return facets;
},
// Extracts the first field found, capturing any free text that comes
// before the category.
_extractNextField : function(query) {
var textRe = /^\s*(\S+)\s+(?=\w+:\s?(('.+?'|".+?")|([^'"]{2}\S*)))/;
var textMatch = query.match(textRe);
if (textMatch && textMatch.length >= 1) {
return textMatch[1];
} else {
return this._extractFirstField(query);
}
},
// If there is no free text before the facet, extract the category and value.
_extractFirstField : function(query) {
var fields = query.match(this.ALL_FIELDS);
return fields && fields.length && fields[0];
},
// If the found match is not a category and facet, extract the trimmed free text.
_extractSearchText : function(query) {
query = query || '';
var text = VS.utils.inflector.trim(query.replace(this.ALL_FIELDS, ''));
return text;
}
};
})();
(function() {
var $ = jQuery; // Handle namespaced jQuery
// The model that holds individual search facets and their categories.
// Held in a collection by `VS.app.searchQuery`.
VS.model.SearchFacet = Backbone.Model.extend({
// Extract the category and value and serialize it in preparation for
// turning the entire searchBox into a search query that can be sent
// to the server for parsing and searching.
serialize : function() {
var category = this.quoteCategory(this.get('category'));
var value = VS.utils.inflector.trim(this.get('value'));
if (!value) return '';
if (!_.contains(this.get("app").options.unquotable || [], category) && category != 'text') {
value = this.quoteValue(value);
}
if (category != 'text') {
category = category + ': ';
} else {
category = "";
}
return category + value;
},
// Wrap categories that have spaces or any kind of quote with opposite matching
// quotes to preserve the complex category during serialization.
quoteCategory : function(category) {
var hasDoubleQuote = (/"/).test(category);
var hasSingleQuote = (/'/).test(category);
var hasSpace = (/\s/).test(category);
if (hasDoubleQuote && !hasSingleQuote) {
return "'" + category + "'";
} else if (hasSpace || (hasSingleQuote && !hasDoubleQuote)) {
return '"' + category + '"';
} else {
return category;
}
},
// Wrap values that have quotes in opposite matching quotes. If a value has
// both single and double quotes, just use the double quotes.
quoteValue : function(value) {
var hasDoubleQuote = (/"/).test(value);
var hasSingleQuote = (/'/).test(value);
if (hasDoubleQuote && !hasSingleQuote) {
return "'" + value + "'";
} else {
return '"' + value + '"';
}
}
});
})();
(function() {
var $ = jQuery; // Handle namespaced jQuery
// Collection which holds all of the individual facets (category: value).
// Used for finding and removing specific facets.
VS.model.SearchQuery = Backbone.Collection.extend({
// Model holds the category and value of the facet.
model : VS.model.SearchFacet,
// Turns all of the facets into a single serialized string.
serialize : function() {
return this.map(function(facet){ return facet.serialize(); }).join(' ');
},
facets : function() {
return this.map(function(facet) {
var value = {};
value[facet.get('category')] = facet.get('value');
return value;
});
},
// Find a facet by its category. Multiple facets with the same category
// is fine, but only the first is returned.
find : function(category) {
var facet = this.detect(function(facet) {
return facet.get('category') == category;
});
return facet && facet.get('value');
},
// Counts the number of times a specific category is in the search query.
count : function(category) {
return this.select(function(facet) {
return facet.get('category') == category;
}).length;
},
// Returns an array of extracted values from each facet in a category.
values : function(category) {
var facets = this.select(function(facet) {
return facet.get('category') == category;
});
return _.map(facets, function(facet) { return facet.get('value'); });
},
// Checks all facets for matches of either a category or both category and value.
has : function(category, value) {
return this.any(function(facet) {
var categoryMatched = facet.get('category') == category;
if (!value) return categoryMatched;
return categoryMatched && facet.get('value') == value;
});
},
// Used to temporarily hide a specific category and serialize the search query.
withoutCategory : function(category) {
return this.map(function(facet) {
if (facet.get('category') != category) return facet.serialize();
}).join(' ');
}
});
})();(function(){
window.JST = window.JST || {};
window.JST['search_box'] = _.template('<div class="VS-search">\n <div class="VS-search-box-wrapper VS-search-box">\n <div class="VS-icon VS-icon-search"></div>\n <div class="VS-search-inner"></div>\n <div class="VS-icon VS-icon-cancel VS-cancel-search-box" title="clear search"></div>\n </div>\n</div>');
window.JST['search_facet'] = _.template('<% if (model.has(\'category\')) { %>\n <div class="category"><%= model.get(\'category\') %>:</div>\n<% } %>\n\n<div class="search_facet_input_container">\n <input type="text" class="search_facet_input VS-interface" value="" />\n</div>\n\n<div class="search_facet_remove VS-icon VS-icon-cancel"></div>');
window.JST['search_input'] = _.template('<input type="text" />');
})();
<|start_filename|>static/js/jquery.mmenu.js<|end_filename|>
/*
* jQuery mmenu 2.1.0
*
* Copyright (c) 2013 <NAME>
* www.frebsite.nl
*
* Dual licensed under the MIT and GPL licenses.
* http://en.wikipedia.org/wiki/MIT_License
* http://en.wikipedia.org/wiki/GNU_General_Public_License
*/
(function( $ ) {
// Global nodes
var $wndw = null,
$html = null,
$body = null,
$page = null,
$blck = null;
var $allMenus = null,
$scrollTopNode = null;
// Global variables
var _c, _e, _d;
$.fn.mmenu = function( opts )
{
// First time plugin is fired
if ( !$wndw )
{
$wndw = $(window);
$html = $('html');
$body = $('body');
$allMenus = $();
_c = getClasses();
_e = getEvents();
_d = getDatas();
$.fn.mmenu.useOverflowScrollingFallback( _useOverflowScrollingFallback );
}
// Extend options
opts = extendOptions( opts );
opts = $.extend( true, {}, $.fn.mmenu.defaults, opts );
opts = complementOptions( opts );
return this.each(
function()
{
// STORE VARIABLES
var $menu = $(this),
_opened = false,
_direction = ( opts.slidingSubmenus ) ? 'horizontal' : 'vertical';
$allMenus = $allMenus.add( $menu );
_serialnr++;
// INIT PAGE, MENU, LINKS & LABELS
$page = _initPage( $page, opts.configuration );
$blck = _initBlocker( $blck, $menu, opts.configuration );
$menu = _initMenu( $menu, opts.position, opts.configuration );
_initSubmenus( $menu, _direction, _serialnr );
_initLinks( $menu, opts.onClick, opts.configuration );
_initOpenClose( $menu, $page, opts.slidingSubmenus );
$.fn.mmenu.counters( $menu, opts.counters, opts.configuration );
$.fn.mmenu.search( $menu, opts.searchfield );
// BIND EVENTS
var $subs = $menu.find( 'ul' );
$menu.add( $subs )
.bind(
_e.toggle + ' ' + _e.open + ' ' + _e.close,
function( e )
{
e.preventDefault();
e.stopPropagation();
}
);
// menu-events
$menu
.bind(
_e.toggle,
function( e )
{
return $menu.triggerHandler( _opened ? _e.close : _e.open );
}
)
.bind(
_e.open,
function( e )
{
if ( _opened )
{
return false;
}
_opened = true;
return openMenu( $menu, opts );
}
)
.bind(
_e.close,
function( e )
{
if ( !_opened )
{
return false;
}
_opened = false;
return closeMenu( $menu, opts );
}
);
// submenu-events
if ( _direction == 'horizontal' )
{
$subs
.bind(
_e.toggle,
function( e )
{
return $(this).triggerHandler( _e.open );
}
)
.bind(
_e.open,
function( e )
{
return openSubmenuHorizontal( $(this), opts );
}
)
.bind(
_e.close,
function( e )
{
return closeSubmenuHorizontal( $(this), opts );
}
);
}
else
{
$subs
.bind(
_e.toggle,
function( e )
{
var $t = $(this);
return $t.triggerHandler( $t.parent().hasClass( _c.opened ) ? _e.close : _e.open );
}
)
.bind(
_e.open,
function( e )
{
$(this).parent().addClass( _c.opened );
return 'open';
}
)
.bind(
_e.close,
function( e )
{
$(this).parent().removeClass( _c.opened );
return 'close';
}
);
}
}
);
};
$.fn.mmenu.defaults = {
position : 'left',
slidingSubmenus : true,
onClick : {
close : true,
delayPageload : true,
blockUI : false
},
configuration : {
hardwareAcceleration: true,
selectedClass : 'Selected',
labelClass : 'Label',
counterClass : 'Counter',
pageNodetype : 'div',
menuNodetype : 'nav',
slideDuration : 500
}
};
$.fn.mmenu.search = function( $m, opts )
{
// Extend options
if ( typeof opts == 'boolean' )
{
opts = {
add : opts,
search : opts
};
}
else if ( typeof search == 'string' )
{
opts = {
add : true,
search : true,
placeholder : opts
};
}
if ( typeof opts != 'object' )
{
opts = {};
}
opts = $.extend( true, {}, $.fn.mmenu.search.defaults, opts );
// Add the field
if ( opts.add )
{
var $s = $( '<div class="' + _c.search + '" />' ).prependTo( $m );
$s.append( '<input placeholder="' + opts.placeholder + '" type="text" autocomplete="off" />' );
if ( opts.noResults )
{
$('ul', $m).not( '.' + _c.submenu ).append( '<li class="' + _c.noresults + '">' + opts.noResults + '</li>' );
}
}
// Bind custom events
if ( opts.search )
{
var $s = $('div.' + _c.search, $m),
$i = $('input', $s);
var $labels = $('li.' + _c.label, $m),
$counters = $('em.' + _c.counter, $m),
$items = $('li', $m)
.not( '.' + _c.subtitle )
.not( '.' + _c.label )
.not( '.' + _c.noresults );
var _searchText = '> a';
if ( !opts.showLinksOnly )
{
_searchText += ', > span';
}
$i.bind(
_e.keyup,
function( e )
{
$i.trigger( _e.search );
}
);
$m.bind(
_e.reset + ' ' + _e.search,
function( e )
{
e.preventDefault();
e.stopPropagation();
}
);
$m.bind(
_e.reset,
function( e )
{
$i.val( '' );
$m.trigger( _e.search );
}
);
$m.bind(
_e.search,
function( e, query )
{
if ( typeof query == 'string' )
{
$i.val( query );
}
else
{
query = $i.val().toLowerCase();
}
// search through items
$items.add( $labels ).addClass( _c.noresult );
$items.each(
function()
{
var $t = $(this);
if ( $(_searchText, $t).text().toLowerCase().indexOf( query ) > -1 )
{
$t.add( $t.prevAll( '.' + _c.label ).first() ).removeClass( _c.noresult );
}
}
);
// update parent for submenus
$( $('ul.' + _c.submenu, $m).get().reverse() ).each(
function()
{
var $t = $(this),
$p = null,
id = $t.attr( 'id' ),
$i = $t.find( 'li' )
.not( '.' + _c.subtitle )
.not( '.' + _c.label )
.not( '.' + _c.noresult );
if ( id && id.length )
{
$p = $('a.' + _c.subopen, $m).filter( '[href="#' + id + '"]' ).parent();
}
if ( $i.length )
{
if ( $p )
{
$p.removeClass( _c.noresult );
$p.removeClass( _c.nosubresult );
}
}
else
{
$t.trigger( _e.close );
if ( $p )
{
$p.addClass( _c.nosubresult );
}
}
}
);
// show/hide no results message
$m[ $items.not( '.' + _c.noresult ).length ? 'removeClass' : 'addClass' ]( _c.noresults );
// update counters
$counters.trigger( _e.count );
}
);
}
};
$.fn.mmenu.search.defaults = {
add : false,
search : true,
showLinksOnly : true,
placeholder : 'Search',
noResults : 'No results found.'
};
$.fn.mmenu.counters = function( $m, opts, conf )
{
// Extend options
if ( typeof opts == 'boolean' )
{
opts = {
add : opts,
count : opts
};
}
if ( typeof opts != 'object' )
{
opts = {};
}
opts = $.extend( true, {}, $.fn.mmenu.counters.defaults, opts );
// Refactor counter class
$('em.' + conf.counterClass, $m).removeClass( conf.counterClass ).addClass( _c.counter );
// Add the counters
if ( opts.add )
{
$('.' + _c.submenu, $m).each(
function()
{
var $s = $(this),
id = $s.attr( 'id' );
if ( id && id.length )
{
var $c = $( '<em class="' + _c.counter + '" />' ),
$a = $('a.' + _c.subopen, $m).filter( '[href="#' + id + '"]' );
if ( !$a.parent().find( 'em.' + _c.counter ).length )
{
$a.before( $c );
}
}
}
);
}
// Bind custom events
if ( opts.count )
{
$('em.' + _c.counter, $m).each(
function()
{
var $c = $(this),
$s = $('ul' + $c.next().attr( 'href' ), $m);
$c.bind(
_e.count,
function( e )
{
e.preventDefault();
e.stopPropagation();
var $lis = $s.children()
.not( '.' + _c.label )
.not( '.' + _c.subtitle )
.not( '.' + _c.noresult )
.not( '.' + _c.noresults );
$c.html( $lis.length );
}
);
}
).trigger( _e.count );
}
};
$.fn.mmenu.counters.defaults = {
add : false,
count : true
};
$.fn.mmenu.useOverflowScrollingFallback = function( use )
{
if ( $html )
{
if ( typeof use == 'boolean' )
{
$html[ use ? 'addClass' : 'removeClass' ]( _c.nooverflowscrolling );
}
return $html.hasClass( _c.nooverflowscrolling );
}
else
{
_useOverflowScrollingFallback = use;
return use;
}
};
$.fn.mmenu.support = {
touch: (function() {
return 'ontouchstart' in window.document;
})(),
overflowscrolling: (function() {
return 'WebkitOverflowScrolling' in window.document.documentElement.style;
})(),
oldAndroid: (function() {
var ua = navigator.userAgent;
if ( ua.indexOf( 'Android' ) >= 0 )
{
return 2.4 > parseFloat( ua.slice( ua.indexOf( 'Android' ) +8 ) );
}
return false;
})()
};
$.fn.mmenu.debug = function( msg )
{
if ( typeof console != 'undefined' && typeof console.log != 'undefined' )
{
console.log( 'MMENU: ' + msg );
}
};
$.fn.mmenu.deprecated = function( depr, repl )
{
if ( typeof console != 'undefined' && typeof console.warn != 'undefined' )
{
console.warn( 'MMENU: ' + depr + ' is deprecated, use ' + repl + ' instead.' );
}
};
// Global vars
var _serialnr = 0,
_useOverflowScrollingFallback = $.fn.mmenu.support.touch && !$.fn.mmenu.support.overflowscrolling;
function extendOptions( o )
{
if ( typeof o == 'string' )
{
switch( o )
{
case 'top':
case 'right':
case 'bottom':
case 'left':
o = {
position: o
};
break;
}
}
if ( typeof o != 'object' )
{
o = {};
}
// DEPRECATED
if ( typeof o.addCounters != 'undefined' )
{
$.fn.mmenu.deprecated( 'addCounters-option', 'counters.add-option' );
o.counters = {
add: o.addCounters
};
}
if ( typeof o.closeOnClick != 'undefined' )
{
$.fn.mmenu.deprecated( 'closeOnClick-option', 'onClick.close-option' );
o.onClick = {
close: o.closeOnClick
};
}
// /DEPRECATED
// OnClick
if ( typeof o.onClick == 'boolean' )
{
o.onClick = {
close : o.onClick
};
}
else if ( typeof o.onClick != 'object' )
{
o.onClick = {};
}
return o;
}
function complementOptions( o )
{
if ( typeof o.onClick.delayPageload == 'boolean' )
{
o.onClick.delayPageload = ( o.onClick.delayPageload ) ? o.configuration.slideDuration : 0;
}
if ( $.fn.mmenu.useOverflowScrollingFallback() )
{
switch( o.position )
{
case 'top':
case 'bottom':
$.fn.mmenu.debug( 'position: "' + o.position + '" not possible when using the overflowScrolling-fallback.' );
o.position = 'left';
break;
}
}
return o;
}
function _initPage( $p, conf )
{
if ( !$p )
{
$p = $('> ' + conf.pageNodetype, $body);
if ( $p.length > 1 )
{
$p = $p.wrapAll( '<' + conf.pageNodetype + ' />' ).parent();
}
$p.addClass( _c.page );
}
return $p;
}
function _initMenu( $m, position, conf )
{
if ( !$m.is( conf.menuNodetype ) )
{
$m = $( '<' + conf.menuNodetype + ' />' ).append( $m );
}
// $_dummy = $( '<div class="mmenu-dummy" />' ).insertAfter( $m ).hide();
$m.prependTo( 'body' )
.addClass( cls( '' ).slice( 0, -1 ) )
.addClass( cls( position ) );
// Refactor selected class
$('li.' + conf.selectedClass, $m).removeClass( conf.selectedClass ).addClass( _c.selected );
// Refactor label class
$('li.' + conf.labelClass, $m).removeClass( conf.labelClass ).addClass( _c.label );
return $m;
}
function _initSubmenus( $m, direction, serial )
{
$m.addClass( cls( direction ) );
$( 'ul ul', $m )
.addClass( _c.submenu )
.each(
function( i )
{
var $t = $(this),
$l = $t.parent(),
$a = $l.find( '> a, > span' ),
$p = $l.parent(),
id = $t.attr( 'id' ) || cls( 's' + serial + '-' + i );
$t.data( _d.parent, $p );
$l.data( _d.sub, $t );
$t.attr( 'id', id );
var $btn = $( '<a class="' + _c.subopen + '" href="#' + id + '" />' ).insertBefore( $a );
if ( !$a.is( 'a' ) )
{
$btn.addClass( _c.fullsubopen );
}
if ( direction == 'horizontal' )
{
var id = $p.attr( 'id' ) || cls( 'p' + serial + '-' + i );
$p.attr( 'id', id );
$t.prepend( '<li class="' + _c.subtitle + '"><a class="' + _c.subclose + '" href="#' + id + '">' + $a.text() + '</a></li>' );
}
}
);
if ( direction == 'horizontal' )
{
// Add opened-classes
$('li.' + _c.selected, $m)
.parents( 'li.' + _c.selected ).removeClass( _c.selected )
.end().each(
function()
{
var $t = $(this),
$u = $t.find( '> ul' );
if ( $u.length )
{
$t.parent().addClass( _c.subopened ).addClass( _c.subopening );
$u.addClass( _c.opened );
}
}
)
.parent().addClass( _c.opened )
.parents( 'ul' ).addClass( _c.subopened ).addClass( _c.subopening );
if ( !$('ul.' + _c.opened, $m).length )
{
$('ul', $m).not( '.' + _c.submenu ).addClass( _c.opened );
}
// Rearrange markup
$('ul ul', $m).appendTo( $m );
}
else
{
// Replace Selected-class with opened-class in parents from .Selected
$('li.' + _c.selected, $m)
.addClass( _c.opened )
.parents( '.' + _c.selected ).removeClass( _c.selected );
}
}
function _initBlocker( $b, $m, conf )
{
if ( !$b )
{
$b = $( '<div id="' + _c.blocker + '" />' ).appendTo( $body );
}
click( $b,
function()
{
$m.trigger( _e.close );
}, true
);
return $b;
}
function _initLinks( $m, onClick, conf )
{
if ( onClick.close )
{
var $a = $('a', $m)
.not( '.' + _c.subopen )
.not( '.' + _c.subclose );
click( $a,
function()
{
var $t = $(this),
href = $t.attr( 'href' );
$m.trigger( _e.close );
$a.parent().removeClass( _c.selected );
$t.parent().addClass( _c.selected );
if ( onClick.blockUI && href.slice( 0, 1 ) != '#' )
{
$html.addClass( _c.blocking );
}
if ( href != '#' )
{
setTimeout(
function()
{
window.location.href = href;
}, onClick.delayPageload
);
}
}
);
}
}
function _initOpenClose( $m, $p, horizontal )
{
// toggle menu
var id = $m.attr( 'id' );
if ( id && id.length )
{
click( 'a[href="#' + id + '"]',
function()
{
$m.trigger( _e.toggle );
}
);
}
// close menu
var id = $p.attr( 'id' );
if ( id && id.length )
{
click( 'a[href="#' + id + '"]',
function()
{
$m.trigger( _e.close );
}
);
}
// open/close horizontal submenus
if ( horizontal )
{
click( $('a.' + _c.subopen, $m),
function()
{
var $submenu = $(this).parent().data( _d.sub );
if ( $submenu )
{
$submenu.trigger( _e.open );
}
}
);
click( $('a.' + _c.subclose, $m),
function()
{
$(this).parent().parent().trigger( _e.close );
}
);
}
// open/close vertical submenus
else
{
click( $('a.' + _c.subopen, $m),
function()
{
var $submenu = $(this).parent().data( _d.sub );
if ( $submenu )
{
$submenu.trigger( _e.toggle );
}
}
);
}
}
function openMenu( $m, o )
{
var _scrollTop = findScrollTop();
$allMenus.not( $m ).trigger( _e.close );
// store style and position
$page
.data( _d.style, $page.attr( 'style' ) || '' )
.data( _d.scrollTop, _scrollTop );
// resize page to window width
var _w = 0;
$wndw.bind(
_e.resize,
function( e )
{
var nw = $wndw.width();
if ( nw != _w )
{
_w = nw;
$page.width( nw );
}
}
).trigger( _e.resize );
// prevent tabbing out of the menu...
$wndw.bind(
_e.keydown,
function( e )
{
if ( e.keyCode == 9 )
{
e.preventDefault();
return false;
}
}
);
// open
$m.addClass( _c.opened );
if ( o.configuration.hardwareAcceleration )
{
$html.addClass( _c.accelerated );
}
$html
.addClass( _c.opened )
.addClass( cls( o.position ) );
$page.scrollTop( _scrollTop );
// small timeout to ensure the "opened" class did its job
setTimeout(
function()
{
// opening
$m.trigger( _e.opening );
$html.addClass( _c.opening );
setTimeout(
function()
{
// opened
$m.trigger( _e.opened );
}, o.configuration.slideDuration
);
}, 25
);
return 'open';
}
function closeMenu( $m, o )
{
// closing
$m.trigger( _e.closing );
$html.removeClass( _c.opening );
$wndw.unbind( _e.keydown );
setTimeout(
function()
{
// closed
$m.trigger( _e.closed )
.removeClass( _c.opened );
$html.removeClass( _c.opened )
.removeClass( cls( o.position ) )
.removeClass( _c.accelerated );
// restore style and position
$page.attr( 'style', $page.data( _d.style ) );
$wndw.unbind( _e.resize );
if ( $scrollTopNode )
{
$scrollTopNode.scrollTop( $page.data( _d.scrollTop ) );
}
}, o.configuration.slideDuration + 25
);
return 'close';
}
function openSubmenuHorizontal( $submenu, o )
{
$body.scrollTop( 0 );
$html.scrollTop( 0 );
$submenu
.removeClass( _c.subopening )
.addClass( _c.opened );
var $parent = $submenu.data( _d.parent );
if ( $parent )
{
$parent.addClass( _c.subopening );
}
return 'open';
}
function closeSubmenuHorizontal( $submenu, o )
{
var $parent = $submenu.data( _d.parent );
if ( $parent )
{
$parent.removeClass( _c.subopening );
}
setTimeout(
function()
{
$submenu.removeClass( _c.opened );
}, o.configuration.slideDuration + 25
);
return 'close';
}
function findScrollTop()
{
if ( !$scrollTopNode )
{
if ( $html.scrollTop() != 0 )
{
$scrollTopNode = $html;
}
else if ( $body.scrollTop() != 0 )
{
$scrollTopNode = $body;
}
}
return ( $scrollTopNode ) ? $scrollTopNode.scrollTop() : 0;
}
function click( $b, fn, onTouchStart )
{
if ( typeof $b == 'string' )
{
$b = $( $b );
}
var event = ( onTouchStart )
? $.fn.mmenu.support.touch
? _e.touchstart
: _e.mousedown
: _e.click;
$b.bind(
event,
function( e )
{
e.preventDefault();
e.stopPropagation();
fn.call( this, e );
}
);
}
function cls( c )
{
return 'mmenu-' + c;
}
function getClasses()
{
return {
page : cls( 'page' ),
blocker : cls( 'blocker' ),
blocking : cls( 'blocking' ),
opened : cls( 'opened' ),
opening : cls( 'opening' ),
submenu : cls( 'submenu' ),
subopen : cls( 'subopen' ),
fullsubopen : cls( 'fullsubopen' ),
subclose : cls( 'subclose' ),
subopened : cls( 'subopened' ),
subopening : cls( 'subopening' ),
subtitle : cls( 'subtitle' ),
selected : cls( 'selected' ),
label : cls( 'label' ),
noresult : cls( 'noresult' ),
noresults : cls( 'noresults' ),
nosubresult : cls( 'nosubresult' ),
search : cls( 'search' ),
counter : cls( 'counter' ),
accelerated : cls( 'accelerated' ),
nooverflowscrolling : cls( 'no-overflowscrolling' )
};
}
function evt( e )
{
return e + '.mmenu';
}
function getEvents()
{
return {
toggle : evt( 'toggle' ),
open : evt( 'open' ),
close : evt( 'close' ),
search : evt( 'search' ),
reset : evt( 'reset' ),
keyup : evt( 'keyup' ),
keydown : evt( 'keydown' ),
count : evt( 'count' ),
resize : evt( 'resize' ),
opening : evt( 'opening' ),
opened : evt( 'opened' ),
closing : evt( 'closing' ),
closed : evt( 'closed' ),
touchstart : evt( 'touchstart' ),
mousedown : evt( 'mousedown' ),
click : evt( 'click' )
};
}
function dta( d )
{
return 'mmenu-' + d;
}
function getDatas()
{
return {
parent : dta( 'parent' ),
sub : dta( 'sub' ),
style : dta( 'style' ),
scrollTop : dta( 'scrollTop' )
};
}
})( jQuery );
<|start_filename|>otto.zeroconf.coffee<|end_filename|>
os = require 'os'
mdns = require 'mdns'
otto = global.otto
global.otto.zeroconf = do -> # note 'do' calls the function
zeroconf = {}
# we should use node_mdns for osx, and this avahi module under linux:
# https://github.com/D1plo1d/node_avahi_pub
# FIXME
zeroconf.createMDNSAdvertisement = ->
try
console.log 'advertising on mdns'
ad_otto = mdns.createAdvertisement mdns.tcp('otto'), 8778, { name: 'Otto Audio Jukebox @ ' + os.hostname() }
ad_otto.on 'error', handleMDNSError
ad_otto.start()
ad_http = mdns.createAdvertisement mdns.tcp('http'), 8778, { name: 'Otto Audio Jukebox @ ' + os.hostname() }
ad_http.on 'error', handleMDNSError
ad_http.start()
catch ex
handleMDNSError(ex)
handleMDNSError = (error) ->
switch (error.errorCode)
when mdns.kDNSServiceErr_Unknown
console.warn(error)
otto.misc.timeoutSet(5000, mdns.createMDNSAdvertisement)
else throw error
return zeroconf
<|start_filename|>otto.client.cubes.coffee<|end_filename|>
####
#### client side (body of otto.client.cubes.coffee served as /otto.cubes.js)
####
#### inspiration and code from http://beta.theexpressiveweb.com/
global.otto.client.cubes = ->
window.otto.client.cubes = do -> # note the 'do' causes the function to be called
$('head').append '<link rel="stylesheet" href="static/css/otto.cubes.css" />'
cubes = {}
$cubescontainer = false
$scene = false
$current = false
$progress = false
nothing = true
cubesstacked = false
maxheight = 0
height = []
reset_heights = ->
for x in [0..27]
height[x] = []
for y in [0..26]
height[x][y] = 0
reset = ->
$cubescontainer = false
$scene = false
$current = false
$progress = false
nothing = true
cubesstacked = false
maxheight = 0
height = []
reset_heights()
cubes.loader_event = (data) ->
if data.count? and data.total?
nothing = false
$progress = $progress || $('.loadingprogress')
$progress.html otto.templates.countprogress count: data.count, total: data.total
$('.loadingstatus').removeClass('searching begin nonefound finished')
$('.loadmusic2').html('scan')
else if data.album?
nothing = false
console.log 'loader says album:', data
if not data.artist and data.fileunder?[0]?.name
data.artist = data.fileunder?[0]?.name
console.log '$current', $current
if not $current
$current = $('.loadingcurrent')
$('.loadingstatus').removeClass('searching begin nonefound finished').addClass('loading')
$current.html otto.templates.album item: data, nolazy: true, noexpand: true, nostars: true
if cubesstacked
html = place_one_cube(data)
$scene.append html
adjust_parent_height()
else if data is 'finished'
$('.loadmusic2').html('scan')
if $('.loadingstatus').parent().is '.cubeswithload-container'
if $progress then $progress.empty()
if $current then $current.empty()
$('.loadingstatus').removeClass('loading searching finished nonefound').addClass('begin')
else
if not nothing
$('.loadingstatus').removeClass('loading searching begin nonefound').addClass('finished')
else
$('.loadingstatus').removeClass('loading searching begin nonefound').addClass('nonefound')
else if data is 'error'
$('.loadmusic2').html('scan')
if $('.loadingstatus').parent().is '.cubeswithload-container'
if $progress then $progress.empty()
if $current then $current.empty()
$('.loadingstatus').removeClass('loading searching finished nonefound').addClass('begin')
else
$('.loadingstatus').removeClass('loading searching').addClass('error')
hash_code = (str) ->
hash = 0
for char in str
hash = ((hash<<5)-hash)+char.charCodeAt(0)
hash = hash & hash # Convert to 32bit integer
return hash
scene_transform = (x, y, z) ->
z = z || 0
top = (-x*7-y*7-z*6)
left = (x*14-y*14)
if top < maxheight
maxheight = top
return {'top': top, 'left': left}
adjust_parent_height = ->
if not $cubescontainer
$cubescontainer = $('.cubes-container')
howhigh = -maxheight + 95
if $cubescontainer.parent().height() < howhigh
$cubescontainer.parent().height(howhigh)
create_cube = (x, y, z, color, shade, z_index, rowclass, title, id) ->
color = color || 0
shade = shade || 0
pos = scene_transform x, y, z
bg_x = -((shade%7)*28)
bg_y = -((color%9)*28)
style = "top: #{pos.top}px;"
style += "left: #{pos.left}px;"
style += "z-index: #{z_index};"
style += "visibility: visible;" # why is this here?
style += "background-position: #{bg_x}px #{bg_y}px;"
return otto.templates.cubelink id: id, rowclass: rowclass, title: title, style: style
# being wrapped in a link affects the opacity rendering effect we want
# discovered that it doesn't need to be a link, just another element. need to test this on other non-chrome browsers.
# let's try to do it with half the elements
#return "<a href='' class='cube cubelink #{rowclass}' title='#{title}' style='top: #{pos.top}px; left: #{pos.left}px; z-index: #{z_index}; visibility: visible; background-position: #{bg_x}px #{bg_y}px;'></a>"
# ha! that affects the transparency effect too. seems like we need two elements.
# that only saved about 12% in time anyways
place_one_cube = (item) ->
id = item._id
fileunderkey = item.key || item.fileunder?[0].key || ''
fileundername = item.name || item.fileunder?[0].name || ''
albumname = item.album
if not fileunderkey
console.log 'missing fileunder key for', id
fileunderkey = 'Unknown'
namehash = hash_code(fileunderkey.substr(0,9)) # why do we limit it to the first 9 chars??
color = namehash % 5
if color < 0
color = -color
c0 = fileunderkey.toUpperCase().charCodeAt(0) || 0
x = c0 - 65
if x > 25 then x = 27
if x < 0 then x = 26
y = hash_code(fileunderkey) # could we just use namehash from above?
if y < 0 then y = -y
y = y % 26
z = height[x][y]++
#z_index = (1000-(x*30+(y-29)))
z_index = (1000-(x*30+y))
title = fileundername
shade = 0
if albumname
title = title + ' - ' + albumname
albumhash = hash_code(albumname.substr(0,9)) # again with the 0,9
else
albumhash = hash_code('')
shade = albumhash % 7
if shade < 0 then shade = -shade
#title = title + ' (' + x + ',' + y + ',' + z + ',' + z_index + ',' + color + ',' + shade + ')'
if x < 26
rowclass = fileunderkey.toUpperCase()[0]
else if x is 26
rowclass = 'num'
else if x is 27
rowclass = 'other'
return create_cube(x, y, z, color, shade, z_index, rowclass, title, id)
stackupthecubes = (data) ->
console.log 'data received'
maxheight = 0
x = 0
y = -2
top_adjust = +12 # nudge it into place
left_adjust = -9
html = ''
for letter in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ#⋯'
pos = scene_transform(x, y)
pos.top += top_adjust
pos.left += left_adjust
style = "top: #{pos.top}px;"
style += "left: #{pos.left}px;"
html += otto.templates.stacklabel letter: letter, style: style
x += 1
$scene.html html
html = ''
console.log 'starting to loop through the data...'
for item in data
html += place_one_cube(item)
console.log 'done.'
console.log 'inserting html string into the dom'
$scene.append html
console.log 'done.'
console.log 'adjusting the height'
adjust_parent_height()
## this one seems to trigger the dom rendering:
#console.log 'scrolling the window'
## scroll the bottom into view if it is not already
#container_bottom = $landscape.offset().top + $landscape.height()
#scroll_bottom = $cubescontainer.parent().scrollTop() + $(window).height()
#if scroll_bottom < container_bottom-100
# $cubescontainer.parent().scrollTop(container_bottom - $(window).height() - 100)
cubesstacked = true
console.log 'all done!'
cubes.show = ->
reset()
html = otto.templates.cubes()
$html = $(html)
#$cubescontainer = $html.find('.cubes-container') # nope
# don't want to set $cubescontiner until after the html is
# inserted into the dom as the jquey object caches the parent
# link and the parent link is currently null
$scene = $html.find('.scene')
$html.on 'click', cubes.click_handler
##$html.on mouseenter: cubes.hover_handler, mouseleave: cubes.hover_handler
#$html.on 'mouseenter', '.cubelink', cubes.hover_handler
#$html.on 'mouseleave', '.cubelink', cubes.hover_handler
#$.getJSON '/all_albums_by_fileunder', {}, stackupthecubes # useful for debugging
$.getJSON '/all_albums_by_fileunder', {}, (data) ->
try
stackupthecubes(data)
catch error
console.error "cubes.show caught error #{error}"
#console.error error.stack # not useful
return $html
cubes.showall = (opacity='1') ->
console.log 'cubes.showall'
all = ''
for letter in 'A B C D E F G H I J K L M N O P Q R S T U V W X Y Z num other'.split(' ')
if all
all += ','
all += ".cubelink.#{letter}"
if document.styleSheets[0].addRule
document.styleSheets[0].addRule(all, "opacity: #{opacity}")
else
document.styleSheets[0].insertRule("#{all} {opacity: #{opacity}}", document.styleSheets[0].cssRules.length)
cubes.highlight = ($target) ->
cubes.showall('0.3')
console.log 'cubes.highlight'
val = $target.text()
if val is '#'
val = 'num'
if val is '⋯'
val = 'other'
if document.styleSheets[0].addRule
document.styleSheets[0].addRule(".cubelink.#{val}", "opacity: 1")
else
document.styleSheets[0].insertRule(".cubelink.#{val} {opacity: 1}", document.styleSheets[0].cssRules.length)
$target.parent().find('.stacklabel').removeClass 'active'
$target.addClass 'active'
cubes.click_handler = (e) ->
$target = $(e.target)
console.log 'cubes.click_handler', $target.parent()
if $target.is '.stacklabel'
if $target.is '.active'
cubes.showall()
$target.removeClass 'active'
else
cubes.highlight($target)
else if $target.is '.landscape,.cubes-container'
cubes.showall()
$target.parent().find('.stacklabel').removeClass 'active'
else if $target.is '.resort' or $target.parent().is '.resort' # this isn't working FIXME
$.getJSON '/all_albums_by_fileunder', {}, stackupthecubes
else if $target.is '.cube'
console.log '4'
# this is only needed if you use <a> tags around the cubes
console.log 'ignoring cube click'
e.cancelBubble = true
if e.stopPropagation
e.stopPropagation();
if (e.preventDefault)
e.preventDefault()
e.returnValue = false
return false
cubes.hover_handler = (e) ->
console.log 'cubes.hover_handler'
$target = $(e.target)
#if $target.is '.cube'
# $target = $target.parent()
if $target.is '.cubelink'
id = $target.data 'id'
console.log id
return cubes
######### aborted D3 attempt
#$('head').append '<script src="http://d3js.org/d3.v2.js">'
# #data = d3.range(10).map(Math.random)
# data = []
# for x in [27..0] by -1
# for y in [26..0] by -1
# pile = height[x][y].length-1
# for z in [0..pile] by 1
# item = height[x][y][z]
# pos = scene_transform x, y, z
# item.ox = item.x
# item.x = pos.left
# item.oy = item.y
# item.y = pos.top
# data.push item
# console.log data.length
# #data = data[0..20000]
# width = 1200
# height = 1000
# outerRadius = Math.min(width, height) / 2
# innerRadius = outerRadius * .6
# color = d3.scale.category20()
# donut = d3.layout.pie()
# arc = d3.svg.arc().innerRadius(innerRadius).outerRadius(outerRadius)
# vis = d3.select(".scene").append("svg").data([data]).attr("width", width).attr("height", height)
# arcs = vis.selectAll("g.rect").data(donut).enter().append("rect").attr('x', (d) ->
# return d.data.x
# ).attr('y', (d, i) ->
# return 1000+d.data.y
# ).attr('fill', (d, i) ->
# return color(Math.random())
# ).attr('height', 10).attr('width', 10)
# #arcs = vis.selectAll("g.rect").data(donut).enter().append("g").attr("class", "rect").attr("transform", "translate(" + outerRadius + "," + outerRadius + ")")
# #arcs.append("path").attr("fill", (d, i) ->
# # return color(i)
# # ).attr("d", arc)
# #arcs.append("text").attr("transform", (d) ->
# # return "translate(" + arc.centroid(d) + ")"
# # ).attr("dy", ".35em").attr("text-anchor", "middle").attr("display", (d) ->
# # return d.value > .15 ? null : "none"
# # ).text (d, i) ->
# # return d.value.toFixed(2)
# just saving these style sheet hacking attempts here for now
#$('head').append($ '<script src="static/js/jquery.rule-min.js">')
#$('head').append($ '<script src="static/js/jss.js">')
# now we try to make our own style sheet for dynamically adding rules
# from https://developer.mozilla.org/en-US/docs/DOM/CSSStyleSheet/insertRule
##style = document.createElement('style')
##$('head').append(style)
##if not window.createPopup # for Safari
## style.appendChild document.createTextNode('')
# give the last line a chance to manipulate the dom and then grab the last stylesheet created
# this is risky as other stylesheet loads might be pending but i don't know what else to do.
# it should probably be ok as long as we don't assume we can do things like remove 'our' stylesheet
##setTimeout(->
## cubes.style = document.styleSheets[document.styleSheets.length - 1]
## console.log 'cubes.style', cubes.style
##,0)
# let's try it the jQuery.rule way
# it said "we must append to get a stylesheet":
#storageNode = $('<style rel="alternate stylesheet" type="text/css" />').appendTo('head')[0]
#if storageNode.sheet
# cubes.style = storageNode.sheet
#else
# cubes.style = storageNode.styleSheet
#cubes.style = $ cubes.style
#cubes.style = $ storageNode
# let's try it this way (http://stackoverflow.com/questions/5618742):
#style = document.createElement('style')
#text = ''
#style.setAttribute("type", "text/css")
#if style.styleSheet # for IE
# style.styleSheet.cssText = text
#else # others
# textnode = document.createTextNode(text)
# style.appendChild(textnode)
#document.getElementsByTagName('head')[0].appendChild(style)
#cubes.style = document.styleSheets[document.styleSheets.length - 1]
#console.log 'cubes.style', cubes.style
# i give up! i don't know where $.rule...('style') is appending the rules
# but it works and i don't care enough to keep the dynamic css rules in their own sheet
# ... arrrrgh! doesn't work in FF. sigh.
# read the 'link' worked better in FF than 'script'. didn't help.
#if cubes.style.addRule
# console.log 'addRule'
# cubes.style.addRule(".cubelink."+val, "opacity: 1")
#else
# console.log 'insertRule:', cubes.style.insertRule
# cubes.style.insertRule(".cubelink."+val+" {opacity: 1}", cubes.style.cssRules.length)
#$.rule('.cubelink.'+val+'{ opacity: 1; }').append(cubes.style)
#$.rule(".cubelink.#{val}", cubes.style).add "{ opacity: 1}"
#$.rule('.cubelink.'+val+'{ opacity: 1; }').appendTo('link')
#$(".cubelink.#{val}").css('opacity', 1)
#jss ".cubelink.#{val}", opacity: 1
#jss ".cubelink.#{val}", display: 'block'
#jss ".cubelink.A", display: 'block'
#document.styleSheets[0].addRule(all, "opacity: " + opacity)
#$.rule('#content ul','style').remove();
#$.rule(all + '{ opacity: '+opacity+'; }').appendTo(cubes.style);
#if cubes.style.addRule
# cubes.style.addRule(all, "opacity: #{opacity}")
#else
# cubes.style.insertRule("#{all} {opacity: #{opacity}}", cubes.style.cssRules.length)
#$.rule(all, cubes.style).add "{ opacity: #{opacity}}"
#$.rule(all + '{ opacity: '+opacity+'; }').appendTo('link');
#$(all).css('opacity', opacity)
#jss all, opacity: opacity
#jss '.cublink.A', display: 'none'
<|start_filename|>static/css/miniAlert.css<|end_filename|>
/*
body {
font-family: "Helvetica Neue", helvetica,'Georgia',arial,sans-serif;
background-color: #fafbf1;
color: #3C3C3C;
}
h1 {
font-size: 24px;
line-height: 48px;
font-weight: 100;
letter-spacing: 1px;
border-bottom: 1px solid #3c3c3c;
}
h1 strong {
text-transform: uppercase;
font-weight: bold;
}
h2 {
font-size: 14px;
line-height: 18px;
font-weight: 0;
}
p {
line-height: 16px;
font-size: 12px;
}
#container {
width: 980px;
margin: 0 auto;
}
footer {
text-align: right;
color: #3C3C3C;
font-size: 12px;
}
footer a{
color: #3C3C3C;
}
*/
.mini-alert {
padding: 5px 20px;
border-radius: 8px;
margin: 20px 0;
}
.mini-alert .close {
float: right;
background: transparent;
cursor: pointer;
border: 0;
color: #808080;
font-size: 14px;
position: relative;
}
.mini-alert .close:hover {
color: #1a1a1a;
}
#alert-1 {
background-color: #DAEDF8;
color: #4085AE;
border: 1px solid #4085AE;
}
#alert-1 .close {
top: 5px;
}
#alert-2 {
background-color: #FCF9E2;
color: #BE9A59;
border: 1px solid #BE9A59;
}
#alert-2 .close {
top: -20px;
font-size: 12px;
text-decoration: underline;
}
#alert-3 {
background-color: #F1DEDE;
color: #CD5445;
border: 1px solid #CD5445;
}
#alert-3 .close {
top: 5px;
}
<|start_filename|>otto.listeners.coffee<|end_filename|>
require './otto.misc'
require './otto.events'
otto = global.otto
otto.listeners = do -> # note the 'do' causes the function to be called
listeners = {}
# we could just use [] for our arrays, but then
# the object type would be Array instead of Object
# and JSON would only stringify the Array items
# and would miss our awful abuse of the poor object
# (we are adding attributes like [key].attribute)
listeners.ListenerList = class ListenerList extends Array
constructor: -> # is this necessary? (oddly, it seems it is)
add: (id, user='', address='', channelname='') ->
if id not in @
@push id
@[id] =
socketids: {}
user: user
address: address
channelname: channelname
streams: 0
setUser: (id, user, address='', channelname='') ->
if id not in @
@add(id, user, address, channelname)
return yes
return no
changeUser: (id, user, address='', channelname='') ->
if id not in @
return @setUser(id, user, address, channelname)
@[id].user = user
@[id].address = address
@[id].channelname = channelname
return no
changeChannel: (id, channelname) ->
@[id].channelname = channelname
remove: (id) ->
if id in @
return removefromarray(@, id)
return no
empty: (id) ->
if Object.keys(@[id].socketids).length == 0 and @[id].streams == 0
return yes
return no
addSocket: (id, socketid) ->
@add(id)
@[id].socketids[socketid] =
inchat: 0
typing: 0
focus: 1
idle: 0
getSockets: (id) ->
return @[id].socketids
removeSocket: (id, socketid) ->
#removefromarray(@[id].socketids, socketid)
delete @[id].socketids[socketid]
if @empty(id)
return @remove(id)
return no
addStream: (id) ->
@[id].streams++
removeStream: (id) ->
if @[id].streams > 0
@[id].streams--
set: (id, socketid, k, v) ->
return no if not @[id].socketids[socketid]
oldv = @[id].socketids[socketid][k]
#console.log 'k', k, 'oldv', oldv, 'v', v
if not (v is oldv)
@[id].socketids[socketid][k] = v
return yes
return no
##### end of class ListenersList
listeners.Listeners = class Listeners extends otto.events.EventEmitter
constructor: ->
# valid events:
super [ '*'
'update'
'userjoin'
'userchange'
'userleft'
'streamingstart'
'streamingstop'
]
@list = new ListenerList
# hey! i just introduced a race condition! cool! FIXME
# (not sure what this ^^^ means anymore)
@timeouts = {}
otto.ll = @list # ugh.
# on the 'update' event, we should consider making a copy of ourselves while
# skipping any incomplete connections and send that instead
update: ->
@trigger 'update', @list
hysteresis: (type, sessionID, callback) ->
@timeouts[sessionID] = {} if not @timeouts[sessionID]?
if not @timeouts[sessionID][type]
@timeouts[sessionID][type] = otto.misc.timeoutSet 5000, =>
@timeouts[sessionID][type] = false
callback()
set_user: (session) ->
if @list.setUser session.sessionID, session.user, session.address
@hysteresis 'join', session.sessionID, =>
if not @list.empty(session.sessionID)
@trigger 'userjoin', @list[session.sessionID]
change_user: (session) ->
if @list.changeUser session.sessionID, session.user, session.address
@hysteresis 'join', session.sessionID, =>
if not @list.empty(session.sessionID)
@trigger 'userjoin', @list[session.sessionID]
else
@trigger 'userchange', @list[session.sessionID]
change_channel: (session) ->
console.log 'listeners.change_channel', session.channelname
@list.changeChannel session.sessionID, session.channelname
add_socket: (session, socket) ->
console.log 'add_socket sessionID', session.sessionID
@list.setUser session.sessionID, session.user, session.address, session.channelname
@list.addSocket session.sessionID, socket.id
@update()
get_sockets: (session) ->
@list.getSockets session.sessionID
remove_socket: (session, socket) ->
console.log 'remove_socket sessionID', session.sessionID, 'socket.id', socket.id
left = @list.removeSocket session.sessionID, socket.id
if left
@hysteresis 'join', session.sessionID, =>
if @list.empty(session.sessionID)
@trigger 'userleft', left
@update()
add_stream: (session) ->
console.log 'add_stream for sessionID', session.sessionID
@list.setUser session.sessionID, session.user, session.address
@list.addStream session.sessionID
@update()
if @list[session.sessionID].streams == 1
@hysteresis 'stream', session.sessionID, =>
if @list[session.sessionID].streams > 0
@trigger 'streamingstart', @list[session.sessionID]
remove_stream: (session) ->
console.log 'remove_stream for sessionID', session.sessionID
@list.removeStream session.sessionID
@update()
if @list[session.sessionID].streams == 0
@hysteresis 'stream', session.sessionID, =>
if @list[session.sessionID].streams == 0
@trigger 'streamingstop', @list[session.sessionID]
set_state: (sessionID, socketid, state, value) ->
#console.log 'set_state', sessionID, socketid, state, value
if @list.set sessionID, socketid, state, value
@update()
list_socketids: (sessionID) ->
return @list[sessionID].socketids
get_list: ->
return @list
##### end of class Listeners
removefromarray = (array, item) ->
for victim, i in array
if item == victim
return array.splice(i, 1)
return null
return listeners
# a previous attempt, after which i said "let's dial that back a bit"
## ref http://www.bennadel.com/blog/2292-\
# Extending-JavaScript-Arrays-While-Keeping-Native-Bracket-Notation-Functionality.htm
#ListenersX = (->
# # the construction function
# Listeners = ->
# listeners = Object.create( Array:: )
# listeners = (Array.apply( listeners, arguments ) || listeners)
# Listeners.injectClassMethods( listeners )
# return listeners
#
# Listeners.injectClassMethods = (listeners) ->
# for method of Listeners::
# # Make sure this is a local method
# #if Listeners::hasOwnProperty(method)
# listeners[method] = Listeners::[method];
# return listeners
#
# Listeners:: =
# add: (id) ->
#
# return Listeners
#).call( {} ) # magic
<|start_filename|>otto.events.coffee<|end_filename|>
otto = global.otto
##### parent class for adding events to other classes
global.otto.events = do -> # note 'do' calls the function
events = {}
events.EventEmitter = class EventEmitter
constructor: (@validevents) ->
# very inefficent ^^^ that the static valid events list is stored with each object FIXME
@eventhandlers = {}
on: (eventname, callback) ->
if eventname not in @validevents
throw new Error "object {@constructor.name} doesn't have an event named #{eventname}"
if not callback
throw new Error "on method for {@constructor.name} missing callback"
if not @eventhandlers[eventname]
@eventhandlers[eventname] = []
@eventhandlers[eventname].push callback
trigger: (eventname, args...) ->
#console.log "trigger '#{eventname}' for #{@name}" if eventname is not 'time'
if eventname not in @validevents
throw new Error "object {@constructor.name} invalid event name #{eventname}"
for name in [eventname, '*']
if @eventhandlers[name]
for handler in @eventhandlers[name]
handler eventname, @, args...
return events
<|start_filename|>static/js/miniAlert.js<|end_filename|>
jQuery(function() {
$.miniAlert = function(element, options) {
var addButton, bindButtonEvent, init, removeElement, setState,
_this = this;
this.defaults = {
text: 'x',
cssClass: 'close',
position: 'before',
effect: 'basic',
duration: 100,
onLoad: function() {},
onHide: function() {},
onHidden: function() {}
};
this.state = '';
this.settings = {};
this.$element = $(element);
setState = function(state) {
this.state = state;
};
this.getState = function() {
return state;
};
this.getSetting = function(settingKey) {
return this.settings[settingKey];
};
this.callSettingFunction = function(functionName, args) {
if (args == null) {
args = [this.$element, this.$button];
}
return this.settings[functionName].apply(this, args);
};
removeElement = function() {
_this.$element.remove();
setState('hidden');
return _this.callSettingFunction('onHidden', []);
};
addButton = function() {
options = {
"class": _this.settings.cssClass,
text: _this.settings.text
};
_this.$button = $('<button />', options);
if (_this.settings.position === 'after') {
return _this.$button.appendTo(_this.$element);
} else {
return _this.$button.prependTo(_this.$element);
}
};
bindButtonEvent = function() {
return _this.$button.bind('click', function(e) {
e.preventDefault();
setState('hiding');
_this.callSettingFunction('onHide');
if (_this.settings.effect === 'fade') {
return _this.$element.fadeOut(_this.settings.duration, removeElement);
} else if (_this.settings.effect === 'slide') {
return _this.$element.slideUp(_this.settings.duration, removeElement);
} else {
return removeElement();
}
});
};
init = function() {
setState('loading');
_this.settings = $.extend({}, _this.defaults, options);
addButton();
bindButtonEvent();
setState('loaded');
return _this.callSettingFunction('onLoad');
};
init();
return this;
};
return $.fn.miniAlert = function(options) {
return this.each(function() {
var plugin;
if (void 0 === ($(this)).data('miniAlert')) {
plugin = new $.miniAlert(this, options);
return ($(this)).data('miniAlert', plugin);
}
});
};
});
<|start_filename|>otto.livedev.coffee<|end_filename|>
##### live dev module. only loaded in 'development' mode
fs = require 'fs'
glob = require 'glob'
otto = global.otto
module.exports = global.otto.livedev = do -> # note the 'do' causes the function to be called
livedev = {}
#otto.development = true
reload_module = (name) ->
# remove from require cache
for own path of require.cache
if otto.misc.endsWith path, '/' + name
console.log "removing #{path} from require.cache"
delete require.cache[path]
break
console.log "reloading module #{name}"
require './' + name
fs.watchFile 'otto.client.coffee', interval: 200, => # 200ms drains batteries
filename = 'otto.client.coffee'
#code = fs.readFileSync filename, 'utf8'
#CoffeeScript.run(code.toString(), {filename: file})
# save names of loaded client modules so we can reload them too
client_modules = []
for own modulename of otto.client
client_modules.push modulename
console.log "#{filename} changed..."
reload_module filename
# since we wiped otto.client, we also need to reload
# all the other client modules since they bind to it
for modulename in client_modules
reload_module "otto.client.#{modulename}.coffee"
otto.zappa.io.sockets.emit 'reloadself', filename
glob 'otto.client.*.coffee', (err, filenames) =>
for filename in filenames
do (filename) => # to each his own
console.log "watching #{filename}"
fs.watchFile filename, interval: 200, => # 200ms drains batteries
console.log "#{filename} changed..."
reload_module filename
otto.zappa.io.sockets.emit 'reloadmodule', filename
glob 'static/css/*.css', (err, filenames) =>
for filename in filenames
do (filename) => # to each his own
console.log "watching #{filename}"
fs.watchFile filename, interval: 200, => # 200ms drains batteries
console.log "#{filename} changed..."
css = fs.readFileSync filename, 'utf8'
sheetname = filename.match(/([^\/]*.css)$/)[1]
otto.zappa.io.sockets.emit 'restyle', filename: filename, sheetname: sheetname, css: css
return livedev
<|start_filename|>static/css/mmenu-theme-light.css<|end_filename|>
/*
Menu, submenus, items
- Styling (light background)
*/
.mmenu
{
background: #f6f6f6;
}
.mmenu *
{
text-shadow: none;
}
.mmenu li
{
border-top: 1px solid rgba( 255, 255, 255, 0.7 );
border-bottom: 1px solid rgba( 0, 0, 0, 0.1 );
}
.mmenu li,
.mmenu li > a,
.mmenu li > a:hover,
.mmenu li > span
{
color: rgba( 0, 0, 0, 0.5 );
text-decoration: none;
}
.mmenu li.mmenu-selected > a,
.mmenu li.mmenu-selected > span
{
background: rgba( 255, 255, 255, 0.6 );
}
.mmenu li.mmenu-selected > a.mmenu-subopen
{
background: transparent;
}
/* subopen/close */
.mmenu a.mmenu-subopen
{
border-left-color: rgba( 0, 0, 0, 0.1 );
}
.mmenu a.mmenu-subclose
{
background: rgba( 255, 255, 255, 0.6 );
}
/* vertical submenu */
.mmenu.mmenu-vertical li.mmenu-opened > a.mmenu-subopen,
.mmenu.mmenu-vertical li.mmenu-opened > ul
{
background: rgba( 0, 0, 0, 0.05 );
}
/* page + blocker */
html.mmenu-opened .mmenu-page
{
box-shadow: 0 0 10px rgba( 0, 0, 0, 0.6 );
}
/*
Labels
- Styling
*/
.mmenu li.mmenu-label
{
background: rgba( 0, 0, 0, 0.05 );
color: rgba( 0, 0, 0, 0.4 );
}
/*
Counters
- Styling
*/
.mmenu em.mmenu-counter
{
background: rgba( 0, 0, 0, 0.2 );
box-shadow: none;
color: rgba( 255, 255, 255, 0.8 );
}
/*
Arrows
- Styling
*/
.mmenu a.mmenu-subopen:after,
.mmenu a.mmenu-subclose:before
{
border-color: rgba( 0, 0, 0, 0.2 );
}
/*
Search
- Styling
*/
.mmenu div.mmenu-search
{
background: rgba( 0, 0, 0, 0.2 );
border-bottom: 1px solid rgba( 0, 0, 0, 0.1 );
}
.mmenu div.mmenu-search input
{
background: rgba( 255, 255, 255, 0.8 );
border: none;
color: rgba( 0, 0, 0, 0.7 );
}
.mmenu li.mmenu-noresults
{
color: rgba( 0, 0, 0, 0.5 );
}
<|start_filename|>static/css/mmenu-widescreen.css<|end_filename|>
/*
CSS for having the menu always opened.
To use on widescreens only, include it using a mediaquery:
<link type="text/css" href="mmenu-widescreen.css" media="all and (min-width: 900px)"/>
*/
/* hide open-button */
a[href="#menu"]
{
display: none !important;
}
/* positioning and sizing */
body
{
position: relative;
}
.mmenu-page
{
box-shadow: none !important;
background-color: inherit;
width: 70% !important;
min-height: 100%;
margin: 0 !important;
left: 30% !important;
top: 0 !important;
position: fixed;
z-index: 1;
overflow-y: auto;
-webkit-overflow-scrolling: touch;
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
}
.mmenu
{
position: fixed;
z-index: 0;
}
.mmenu > ul
{
padding-right: 0 !important;
padding-left: 0 !important;
}
.mmenu.mmenu-vertical
{
width: 30% !important;
}
.mmenu.mmenu-horizontal
{
width: 150% !important;
}
.mmenu.mmenu-right.mmenu-horizontal
{
left: 0;
right: auto;
}
.mmenu a.mmenu-subopen
{
right: 0;
}
.mmenu.mmenu-right a.mmenu-subclose:before
{
left: 20px;
}
.mmenu em.mmenu-counter
{
right: 40px;
}
.mmenu div.mmenu-search
{
width: 20% !important;
padding: 10px !important;
}
.mmenu:first-child,
.mmenu.mmenu-opened
{
display: block;
}
<|start_filename|>otto.client.coffee<|end_filename|>
####
#### client side (body of global.otto.client served by zappa as /otto.client.js)
####
global.otto.client = ->
window.otto = window.otto || {}
otto.socketconnected = false
otto.serverproceed = false
otto.clientstarted = false
otto.clientready = false
otto.salutations = false
@on 'connect': ->
console.log 'sio connect'
otto.socketconnected = true
# now we wait for the server to say 'proceed' or ask us to 'resession'
@on 'proceed': ->
console.log 'sio proceed'
otto.serverproceed = true
otto.sayhello()
# now we wait for the server to say 'welcome' and give us data
@on 'resession': ->
console.log 'sio resession, sir, yes sir!'
$.get '/resession', =>
console.log '/resession, sir, done sir!'
otto.serverproceed = true
otto.sayhello()
@on 'disconnect': ->
console.log 'sio disconnect'
$('body').addClass 'disconnected'
otto.socketconnected = false
otto.serverproceed = false
#otto.clientstarted = false
otto.clientready = false
otto.salutations = false
otto.saygoodbye()
@on 'error': ->
console.log 'sio error, reloading'
window.location.reload()
# note: connect doesn't work when moved under $ or nextTick!
# it appears you have to call @connect inside zappa.run's initial call
# or else the context.socket isn't created inside zappa.run() in
# time for it to be used internally. i think this also means it's going
# to be very difficut to rig things so we can call @connect again to connect
# to a different server. -jon
# first arg is the url to connect to, undefined connects back to where we were served from
@connect undefined, 'reconnection limit': 3000, 'max reconnection attempts': Infinity
# this might be in a race condition with the rest of this file being parsed (move it to end?)
# i think i fixed ^^ this with the added otto.clientstarted logic
# using nextTick here so the function, and all the functions it calls, are finished being defined
nextTick -> otto.start_client()
otto.start_client = =>
console.log 'start_client'
otto.clientstate = {}
otto.myusername = no
otto.mychannel = no
otto.current_track_qid = no
otto.channel_list = []
otto.current_channel = no
otto.play_state = 'unknown'
otto.connect_state = 'disconnected'
otto.ignore_reload = false
otto.cache = { queue: [], list: [], stars: [] }
otto.current_volume = 80
otto.soundfx = no
otto.notifications = no
# no notifications in the app
if /Otto_OSX/.test(navigator.userAgent)
delete Notification
otto.touch_init()
otto.showdown_converter = new Showdown.converter()
$(window).on 'scrollstop', otto.results_lazyload_handler
$(window).smartresize otto.results_lazyload_handler
$(window).smartresize otto.autosize_adjust
$(window).on 'focus blur', otto.window_focus_handler
otto.ouroboros_ajax_hooks()
otto.window_idle_handler_init()
$('body').on 'click', otto.button_click_handler
$('body').on 'click', otto.logo_click_handler
$('body').on 'click', otto.letterbar_click_handler
$('body').on 'change', otto.checkbox_click_handler
$('body').on 'submit', otto.form_submit_handler
$(window).on 'unload', ->
otto.lastnotification.close() if otto.lastnotification
$(window).on 'beforeunload', ->
otto.lastnotification.close() if otto.lastnotification
# hmmm, was hoping beforeunload would help with refresh, but alas
if /^#/.test location.hash
params = location.hash.slice(1).split('&')
$.each params, ->
if @ != ''
kv = @.split '='
k = kv[0]
v = kv[1]
switch k
when 'connect' then if v == '1' then otto.connect_player()
when 'chat' then if v == '1' then $('.chattoggle').click()
when 'ignorereload' then if v = '1' then otto.ignore_reload = true
otto.clientstarted = true
otto.sayhello()
otto.sayhello = =>
if otto.socketconnected and otto.serverproceed and otto.clientstarted and not otto.salutations
otto.salutations = true
console.log 'well, hello server!'
@emit 'hello', otto.clientstate # causes the server to welcome us and tell us our state
otto.saygoodbye = =>
console.log 'ok lady, goodbye!'
otto.salutations = false
otto.myusername = false
otto.mychannel = false
otto.current_track_qid = false
@on 'welcome': ->
console.log 'welcome data', @data
$('body').removeClass 'disconnected'
otto.localhost = @data.localhost
otto.emptydatabase = @data.emptydatabase
otto.largedatabase = @data.largedatabase
otto.haslineout = @data.haslineout
otto.musicroot = @data.musicroot
if otto.emptydatabase
otto.create_hellopage()
otto.channel_list = @data.channellist
otto.myusername = @data.myusername
otto.mychannel = @data.mychannel
else
if $('.mmenu-page').length
otto.templates.body_reset()
else
$(document.body).html otto.templates.body()
$('.channellist-container').on 'click', otto.channellist_click_handler
otto.process_channellist @data.channellist, true #process_mychannel will do the final html
otto.process_myusername.call @, @data.myusername
otto.process_mychannel.call @, @data.mychannel
@on 'begun': ->
otto.emptydatabase = false
$(document.body).html otto.templates.body()
$('.channellist-container').on 'click', otto.channellist_click_handler
otto.process_channellist otto.channel_list, true #process_mychannel will do the final html
otto.process_myusername.call @, otto.myusername
otto.process_mychannel.call @, otto.mychannel
$('.output').append navigator.userAgent
$('.output').append otto.app
@emit 'updateme'
otto.create_mainpage = ->
#$(document).attr 'title', otto.current_channel.fullname + ' ▪ ' + otto.myurl + ' ▪ otto' #FIXME
#$(document).attr 'title', otto.current_channel.fullname + ' ▪ otto' #FIXME
if Modernizr.localstorage
size = localStorage.getItem('size') || 'size1'
$('#mainpage').html otto.templates.mainpage channel: otto.current_channel, largedatabase: otto.largedatabase, size: size
$('.playing-container').on 'click', otto.results_click_handler
$('.browseresults-container').on 'click', otto.results_click_handler
$('.console-container').on 'click', otto.console_click_handler
$('.volume').slider().slider value: otto.current_volume, range: 'min', slide: otto.adjust_volume_handler
$('.volumelineout').slider().slider value: otto.current_volume, range: 'min', slide: otto.adjust_volumelineout_handler
$('.scrollkiller').on 'mousewheel', otto.scroll_bubble_stop_handler
$('.console-container').resizable handles: 's', alsoResize: $('.output'), minHeight: 45, autoHide: true
$('.channellist-container').mmenu( { slidingSubmenus: false } )
#$('.cursor-hider').hover (e) -> e.stopPropagation() # don't suppress the mouseleave event FIXME
$('.cursor-hider').on 'mouseenter', (e) -> e.stopPropagation()
otto.chat_init()
# preserve chat window state
if otto.clientstate.inchat
otto.clientstate.inchat = false # to convince enable_chat to act
otto.enable_chat true
otto.create_hellopage = ->
otto.load_module 'cubes' # all the welcome css is in otto.cubes.css
$(document.body).html otto.templates.body_welcome musicroot: otto.musicroot
$('.folder .path').keydown (e) ->
if e.keyCode is 13
e.preventDefault()
$('.folder .path').blur()
@on 'queue': ->
console.log 'queue', @data
if @data.length
n = 0
console.log @data
for song, i in @data
if song.nowplaying
n = i
break
if not otto.current_track_qid == false || otto.current_track_qid != @data[n].mpdqueueid
len = Math.ceil(@data[n].length) # we want to round up so it matches what mpd does
# take 'len' if you don't want an initial progress bar
time = otto.current_song_time || { current: 0, total: len || 0 }
active = $('.currentcover-container .thumb').is '.active'
$('.currenttrack-container').html otto.templates.currenttrack song: @data[n], current: time.current, total: time.total
otto.autosize_adjust()
if active
top = $(window).scrollTop()
$('.currentcover-container .thumb').click()
$(window).scrollTop(top)
# that isn't cutting it, still scrolls if track changes
# and a minimum we should check if the album changed and only repaint if so
otto.current_track_qid = @data[n].mpdqueueid
$target = if otto.noscroll_click_event then $(otto.noscroll_click_event.target) else false
otto.noscroll_click_event = false
otto.render_without_scrolling $target, =>
if otto.current_channel.layout is 'featured'
$('.ondeck-container').html otto.templates.featured songs: @data[0..@data.length]
else
$('.ondeck-container').html otto.templates.ondeck songs: @data[1..@data.length]
if otto.notifications # add notification for enter/leaving chat room #FIXME <- might be ok
song = @data[n]
if song._id isnt otto.lastnotificationid
otto.lastnotificationid = song._id
album = song.album || ''
#album += ' • ' + song.year if song.year
album += ' ' + song.year if song.year
artist = song.artist || ''
body = "#{album}\n#{artist}"
body += "\n#{song.owners[0].owner}" if song.owners?[0].owner?
otto.lastnotification.close() if otto.lastnotification # we should (also?) close on 'finished' event
otto.lastnotification = new Notification song.song, body: body, icon: "/image/120?id=#{song.cover}"
#n.onshow = ->
# timeoutSet 10000, -> n.close()
else
$('.currenttrack-container').html otto.templates.currenttrack {}
otto.cache.queue = @data
otto.mark_allthethings()
@on 'state': ->
console.log 'state', @data
otto.play_state = @data
if @data is 'play'
$play = $('#play')
$play.html otto.templates.icon 'pause'
$play.addClass 'shy'
else
$play = $('#play')
$play.html otto.templates.icon 'play'
$play.removeClass 'shy'
@on 'time': ->
if @data
parts = @data.split(':')
[current, total] = parts[0..1]
otto.current_song_time = { current: current, total: total }
$('.timeprogress-container').html otto.templates.timeprogress_widgets total: total, current: current
@on 'loader': ->
console.log 'loader says:', @data
otto.load_module 'cubes', =>
otto.client.cubes.loader_event @data
if @data is 'started'
if otto.emptydatabase
$(document.body).html otto.templates.initialload folder: $('.folder .path').text()
$('.loadingstatus').addClass('searching');
$('.loadingcubes').html otto.client.cubes.show()
else if otto.localhost and $('.loadingstatus').length is 0
# switch to cubes view (for when scan is triggered from app menu)
$('.browseresults-container').html otto.templates.cubeswithload()
$('.browseresults-container').append otto.templates.footer()
$('.loadingstatus').addClass('begin')
$('.loadingcubes').html otto.call_module 'cubes', 'show'
if $('.loadmusic2').text() is 'scan'
$('.loadmusic2').html otto.templates.ouroboros size: 'small', direction: 'cw', speed: 'slow'
else if @data.stdout and @data.stdout isnt 'new' and @data.stdout isnt ' ' and @data.stdout isnt ''
$output = $('.output')
if $output.length
amountscrolledup = $output.prop('scrollHeight') - $output.height() - $output.scrollTop()
$output.append otto.templates.loader event: @data.stdout
if amountscrolledup < 500 # was 80, but i think we need a different detection mech. for autoscroll
$output.scrollToBottom()
@on 'myusername': ->
console.log 'myusername'
otto.process_myusername.call @, @data
otto.process_myusername = (username) ->
otto.myusername = username
if username
$('body').addClass 'loggedin'
$('body').removeClass 'loggedout'
else
otto.enable_chat false
$('.browseresults-container').empty()
$('body').addClass 'loggedout'
$('body').removeClass 'loggedin'
@on 'myurl': ->
console.log 'myurl'
otto.myurl = @data.name
$('#channelname').text @data.fullname
@on 'mychannel': ->
console.log 'changing channel to', @data.name
otto.process_mychannel.call @, @data.name
otto.process_mychannel = (name) ->
otto.mychannel = name
otto.current_track_qid = false
otto.current_channel = false
for channel in otto.channel_list
if channel.name is otto.mychannel
otto.current_channel = channel
if not otto.current_channel then otto.current_channel = otto.channel_list[0]
if not otto.emptydatabase
otto.create_mainpage()
$('.channellist-container').html otto.templates.channellist channellist: otto.channel_list
if otto.connect_state isnt 'disconnected'
otto.connect_player()
otto.clientready = true
@emit 'updateme'
@on 'reload': ->
if otto.ignore_reload
console.log 'ignoring reload event'
return
if otto.connect_state isnt 'disconnected'
window.location.hash = '#connect=1'
else
window.location.hash = '' # hash still appears, rumour is you can't get rid of it
if otto.clientstate.inchat
if window.location.hash isnt ''
window.location.hash += '&chat=1'
else
window.location.hash += '#chat=1'
window.location.reload()
@on 'flushstream': ->
console.log 'flushstream'
if otto.connect_state isnt 'disconnected'
otto.reconnect_player() # make it stop playing instantly (flush buffer)
@on 'chat': ->
console.log @data.name
return if @data.name is 'finished'
otto.play_soundfx @data.name
otto.play_notification @data
$output = $('.output')
amountscrolledup = $output.prop('scrollHeight') - $output.height() - $output.scrollTop()
$output.append otto.templates.event event: @data
if amountscrolledup < 80
$output.scrollToBottom()
@on 'lists': ->
console.log 'lists'
if @data
otto.process_lists @data
otto.process_lists = (lists) ->
if lists
for user in lists
if user == otto.myusername
otto.cache.list = user.list
otto.mark_listed_items()
$('.browsersults-container').trigger('scrollstop')
@on 'stars': ->
console.log 'on stars', @data
if @data
otto.process_stars @data
otto.process_stars = (stars) ->
console.log 'process_stars'
for own username of stars
console.log 'username', username
console.log 'myusername', otto.myusername
if username == otto.myusername
console.log 'matched'
otto.cache.stars = stars[username]
otto.mark_starred_items()
$('.browseresults-container').trigger('scrollstop')
@on 'reloadmodule': ->
modulename = @data.match(/otto[.]client[.](.*)[.]coffee/)[1]
console.log 'reloading module ' + modulename
if modulename is 'templates' # templates module exception
delete otto.templates
$('head').append '<script src="/otto.client.templates.js">'
else
if modulename of otto.client
delete otto.client[modulename]
otto.load_module modulename
@emit 'updateme'
@on 'restyle': ->
sheetname = @data.sheetname
console.log "restyle time! (#{sheetname})"
original_sheet = undefined
for sheet in document.styleSheets
continue if sheet.disabled
try
if (sheet.ownerNode?.dataset?.href?.indexOf '/'+sheetname) > -1 # is this Chrome specific?
console.log 'disabling old style sheet'
sheet.disabled = true
else if sheet.data is sheetname
console.log 'disabling old reloaded style sheet'
sheet.disabled = true
catch err
console.error 'failed to disable style sheet'
#$new_sheet = $('<style id="#'+@data.sheetname+'css">').html @data.css
$new_sheet = $('<style>').html @data.css
#$new_sheet[0].data = sheetname
$('head').append $new_sheet
document.styleSheets[document.styleSheets.length-1].data = sheetname
@on 'listeners': ->
#console.log 'listeners', @data
socketid = @socket?.socket?.sessionid
$('.listeners-container').html otto.templates.listeners(listeners: @data, socketid: socketid)
#$('.channellist-container').html otto.templates.channellist channellist: otto.channel_list, listeners: @data, socketid: socketid
# let's try to insert the listeners without rebuilding the entire channel list
# so that open channel setting don't go away every time a listener changes state
for channel in otto.channel_list
html = otto.templates.format_listeners_for_channel_in_channelbar listeners: @data, channelname: channel.name
$('.channellist-container [data-channelname="'+channel.name+'"] .channellisteners').html html
@on 'channellist': ->
console.log 'channellist'
otto.process_channellist @data
@on 'outputs': ->
@on 'lineout': ->
if @data
for channelname,lineout of @data
$el = $('.channellist-container .changechannel[data-channelname="'+channelname+'"]')
if $el
if lineout == '1'
$el.addClass 'lineout'
else
$el.removeClass 'lineout'
@on 'status': ->
return if not otto.clientready or not @data
for channelname,status of @data
$el = $('.channellist-container .changechannel[data-channelname="'+channelname+'"]')
if $el.length
if status.state == 'play'
$el.addClass 'playing'
else
$el.removeClass 'playing'
if channelname == otto.mychannel
# should prob. do this here too: if parseInt($vol.slider('value')) != parseInt(status.volume)
$('#mainpage .volumelineout').slider 'option', 'value', status.volume
$el.find('.volumelineout').each ->
$vol = $(@)
# when i do a log based volume formula, i'll need to reverse it here
if parseInt($vol.slider('value')) != parseInt(status.volume)
#console.log '$vol.value', $vol.slider('value'), 'status.volume', status.volume
$vol.slider 'option', 'value', status.volume
$el.find('.crossfade').each ->
if status.xfade is '0'
$(@).removeClass 'enabled'
else
$(@).addClass 'enabled'
$el.find('.channelerrata-container').html otto.templates.channel_status_errata_widget status: status
@on 'replaygain': ->
if @data
for channelname,replaygain of @data
$el = $('.channellist-container .changechannel[data-channelname="'+channelname+'"]')
$el.find('.replaygain').each ->
if replaygain is 'off'
$(@).removeClass 'enabled'
else
$(@).addClass 'enabled'
otto.process_channellist = (channellist, skiphtml) =>
otto.channel_list = channellist
unless skiphtml
$('.channellist-container').html otto.templates.channellist channellist: otto.channel_list
$('.volumelineout').slider value: otto.current_volume, range: 'min', slide: otto.adjust_volumelineout_handler
####
#### handlers
####
otto.checkbox_click_handler = (e) =>
$checkbox = $(e.target)
if not $checkbox.is 'input[type="checkbox"]'
return
e.stopPropagation()
# no longer used
if $checkbox.is '#fxtoggle'
otto.soundfx = $checkbox.is ':checked'
if otto.soundfx
otto.play_soundfx 'fxenabled'
# not used anymore
if $checkbox.is '#lineouttoggle'
otto.lineout = $checkbox.is ':checked'
if otto.lineout
$('.volumelineout').show()
else
$('.volumelineout').hide()
for channel in otto.channel_list
if channel.name is otto.mychannel
@emit 'lineout', otto.lineout
break
otto.button_click_handler = (e) =>
$button = $(e.target)
if not $button.is 'button'
$button = $(e.target).parents('button').first()
if not $button.is 'button'
#console.log 'this did not look like a button to me'
#console.log $button
return
e.stopPropagation()
find_id = ($el, ancestorlimit=2) ->
id = $el.data('id')
if id then return id
for oneclass in $el[0].className.split /\s/
found = oneclass.match /^id(.{24})$/
if found then return found[1]
$el.find("*").each ->
id = find_id $(this), 0
if id then return false # stops .each
if id then return id
if ancestorlimit > 0
return find_id $el.parent(), ancestorlimit - 1
return 0
find_qid = ($el, ancestorlimit=2) ->
qid = $el.data('mpdqueueid')
if qid then return qid
$el.find("*").each ->
qid = find_qid $(this), 0
if qid then return false # stop .each
if qid then return qid
if ancestorlimit > 0
return find_qid $el.parent(), ancestorlimit - 1
return 0
# check for unqueue class before enqueue since the button will be .enqueue.unqueue
if $button.is '.unqueue'
qid = find_qid $button
console.log 'deleteid', qid
otto.noscroll_click_event = e
@emit 'deleteid', qid
else if $button.is '.enqueue'
id = find_id $button
console.log 'enqueue', id
otto.noscroll_click_event = e
@emit 'enqueue', id
else if $button.is '.stars'
#console.log '.stars', e
console.log $button
#console.log 'e.pageX', e.pageX
console.log '$button.offset().left', $button.offset().left
if $('html').is '.doubler'
clickpoint = e.pageX - ($button.offset().left * 2) - 8
clickpoint = clickpoint / 2
else
clickpoint = e.pageX - $button.offset().left - 4
#console.log 'clickpoint', clickpoint
if clickpoint < 2
halfstars = 0
else if clickpoint < 11
halfstars = 1
else if clickpoint < 19
halfstars = 2
else if clickpoint < 26
halfstars = 3
else if clickpoint < 35
halfstars = 4
else if clickpoint < 42
halfstars = 5
else
halfstars = 6
#console.log halfstars
id = find_id $button
console.log "stars #{halfstars} " + id
$button.removeClass('n0 n1 n2 n3 n4 n5 n6').addClass('n' + halfstars)
@emit 'stars', id: id, rank: halfstars
else if $button.is '#connect'
console.log otto.connect_state
if otto.connect_state is 'disconnected'
otto.connect_player()
else
otto.disconnect_player()
else if $button.is '#play'
toggle_play.call @
else if $button.is '#next'
next_track.call @
else if $button.is '.smaller'
#if $('html').is '.doubler'
# console.log 'undoubler'
# $('html').removeClass('doubler')
#else
# $('.currenttrack-container').addClass('size1').removeClass('size2')
# $('.next-container').addClass('size1').removeClass('size2')
$('.currenttrack-container,.next-container,.size-container').addClass('size1').removeClass('size2')
otto.autosize_adjust()
if Modernizr.localstorage
localStorage.setItem 'size', 'size1'
else if $button.is '.bigger'
#window.resizeTo(1920, 1080) # just for debugging tv mode
#if $('.currenttrack-container').is '.size2'
# $('html').addClass('doubler')
#else
# $('.currenttrack-container').addClass('size2').removeClass('size1')
# $('.next-container').addClass('size2').removeClass('size1')
$('.currenttrack-container,.next-container,.size-container').addClass('size2').removeClass('size1')
otto.autosize_adjust()
if Modernizr.localstorage
localStorage.setItem 'size', 'size2'
else if $button.is '.close'
container_top = $button.parent().parent().parent().offset().top
$button.parent().remove()
if $('.browseresults-container').parent().scrollTop() > container_top
$('.browseresults-container').parent().scrollTop(container_top)
else if $button.is '.runself'
run = $button.data('run')
run()
else if $button.is '.download'
id = $button.data('id')
$iframe = $("<iframe class='download' id='#{id}' style='display:none'>")
$(document.body).append $iframe
$iframe.attr 'src', "/download/#{id}"
$iframe.load ->
console.log "iframe #{id} loaded"
#$iframe.remove() # this seems to cut off the download FIXME
else if $button.is '.chattoggle'
#$('.console-container').toggle(200)
if not otto.clientstate.inchat
otto.enable_chat true
else
otto.enable_chat false
else if $button.is '.channeltoggle'
toggle_channellist $button
else if $button.is '.logout'
@emit 'logout'
else if $button.is '.play'
@emit 'play', $button.data('position')
else if $button.is '.notificationstoggle'
if otto.notifications
otto.notifications = false
$button.removeClass 'enabled'
otto.lastnotification.close() if otto.lastnotification
else if Notification?
Notification.requestPermission (status) ->
console.log 'notifications permission', status # looking for "granted"
if status isnt "granted"
otto.notifications = false
$button.removeClass 'enabled'
else
otto.notifications = true
$button.addClass 'enabled'
n = new Notification "Notifications Enabled", {body: ""} # this also shows the notification
n.onshow = ->
timeoutSet 4000, -> n.close()
otto.lastnotification = n
else if $button.is '.soundfxtoggle'
if otto.soundfx
otto.soundfx = false
$button.removeClass 'enabled'
else
otto.soundfx = true
$button.addClass 'enabled'
otto.play_soundfx 'fxenabled'
else if $button.is '.selectfolder'
if otto.localhost and /Otto$/.test navigator.userAgent
#@emit('selectfolder') # uneven message processing in Otto.py make this unusable
# instead i use a UIDelegate on the webview to override the file selection input
# so the rest of this if should never be run
$('#selectFolder').click()
$('#selectFolder').change ->
alert 'sorry, you can\'t use the folder selection dialog from a web browser'
#$('.folder .path').text $('#selectFolder').val()
return false
else
$path = $('.folder .path')
$path.focus()
#$path.text $path.text() # move cursor to end? nope.
#len = $path.val().length
#$path[0].setSelectionRange(len, len) # nope (setSelectionRange not defined)
else if $button.is '.loadmusic'
@emit 'loadmusic', $('.folder .path').text()
else if $button.is '.loadmusic2'
if $button.text() is 'scan'
$button.html otto.templates.ouroboros size: 'small', direction: 'cw', speed: 'slow'
@emit 'loadmusic' # scan.py defaults to last directory scanned
else
$button.html('scan')
@emit 'loadmusiccancel'
else if $button.is '.begin'
console.log 'begin!'
@emit 'begin'
else if $button.is '.restartload'
console.log 'restartload'
otto.create_hellopage()
else
console.log 'did not know what action to do with button'
console.log $button
return
e.cancelBubble = true
if e.stopPropagation
e.stopPropagation()
otto.results_click_handler = (e) =>
$target = $(e.target)
if $target.is '.expand'
$expand = $target
else if $target.parent().is '.expand'
$expand = $target.parent()
if $target.is '.gotothere'
$gotothere = $target
else if $target.parent().is '.gotothere'
$gotothere = $target.parent()
else if $target.parent().parent().is '.gotothere'
$gotothere = $target.parent().parent()
if $expand
id = $expand.data('id')
containerid = $expand.data('container') || id
#$container = $(".id#{containerid}")
$parent = $expand.parent()
$container = $parent
if $parent.is '.thumbnails'
$container = $parent.parent()
$expanded = $('.expanded')
if $expanded.length and $expanded.data('id') == id
# same item, close it instead of redisplaying it (i.e. do nothing)
otto.render_without_scrolling $(e.target), ->
$expanded.remove()
$('.active').removeClass('active')
else
$.getJSON '/album_details', {'id': id}, (data) ->
if data.albums
data=data.albums
$.getJSON '/load_object', {'id': containerid}, (fileunder) ->
otto.render_without_scrolling $(e.target), ->
$expanded.remove()
$('.active').removeClass('active')
$element = $ "<div class='expanded cf' data-id=#{id}>"
$element.html otto.templates.albums_details data: [].concat(data), fileunder: fileunder, id: id
if $parent.is '.albumall'
# search forward to find where the line breaks and put it there
$eol = false
$last = $parent
while not $eol
$next = $last.next()
if $next.length is 0
$eol = $last
else if $next.offset().top > $last.offset().top
$eol = $last
else
$last = $next
$eol.after $element
else
$container.append $element
$expand.addClass('active')
otto.mark_allthethings()
else if $gotothere
id = $gotothere.data('id')
if $gotothere.is '.active'
$('.browseresults-container').empty()
$gotothere.removeClass('active')
console.log 'removing active'
else
$('.active').removeClass('active')
$gotothere.addClass('active')
console.log 'adding active'
if not $('.browseresults-container').children().first().is '.albumdetailscontainer'
$('.browseresults-container').empty()
$.getJSON '/load_object', {id: id, load_parents: 20}, (object) ->
if object.otype is 10
id = object.albums[0]._id
else if object.otype in [20, 30]
id = object._id
if object.otype in [10, 20]
$.getJSON '/album_details', {'id': id}, (data) ->
displayed_id = $('.browseresults-container').children().first().data('id')
if not displayed_id or displayed_id != id
if data.albums
data=data.albums
$('.browseresults-container').html otto.templates.albums_details data: [].concat(data), id: id
otto.mark_allthethings()
else if object.otype in [30]
# database is broken. artist->fileunder isn't recorded in collections! FIXME
# hack around this
id = $gotothere.data('albumid')
$.getJSON '/load_fileunder', {'artistid': id}, (fileunder) ->
id = 0
for fu in fileunder
if fu.key != 'various'
id = fu._id
if id
$.getJSON '/album_details', {'id': id}, (data) ->
displayed_id = $('.browseresults-container').children().first().data('id')
if not displayed_id or displayed_id != id
if data.albums
data=data.albums
$('.browseresults-container').html otto.templates.albums_details data: [].concat(data), id: id
otto.mark_allthethings()
else if $target.is('.progress') or $target.is('.progress-indicator')
if $target.is('.progress-indicator')
width = $target.parent().innerWidth()
adjust = 0
else
width = $target.innerWidth()
adjust = -2
seconds = Math.round( (e.offsetX+adjust) / (width-1) * otto.current_song_time.total)
console.log 'seconds', seconds
@emit 'seek', seconds
#if otto.connect_state isnt 'disconnected'
# otto.reconnect_player() # make it stop playing instantly (flush buffer)
#else if $target.is '.loadmusic' # i don't understand why this is needed here, why button_click_handler doesn't see it
# #@emit 'loadmusic', $('.folder .path').text()
# console.log 'this one'
# @emit 'loadmusic', '/Users/jon/Music'
else
console.log 'do not know what to do with clicks on this element:'
console.dir $target
console.dir e
toggle_play = ->
if otto.play_state is 'play'
@emit 'pause'
else
@emit 'play'
next_track = ->
qid = otto.current_track_qid
console.log 'deleteid', qid
@emit 'deleteid', qid
otto.current_song_time = false
#if otto.connect_state isnt 'disconnected'
# otto.reconnect_player() # make it stop playing instantly (flush buffer)
toggle_channellist = ($button) ->
$channellist = $('.channellist-container')
if $channellist.is '.mmenu-opened'
$channellist.trigger('close')
else
$channellist.trigger('open')
$button.trigger 'mousemove'
# webkit bug leaves the div hovered when it is moved from under cursor
#$('.channelbar').trigger 'mouseleave' # doesn't work
otto.channellist_click_handler = (e) =>
$target = $(e.target)
$button = $(e.target)
if $target.is 'button'
$button = $target
else
$button = $target.parents('button').first()
if $button.is 'button'
e.stopPropagation()
else
$button = false
find_channelname = ($el, ancestorlimit=4) ->
channelname = $el.data('channelname')
if channelname then return channelname
if ancestorlimit > 0
return find_channelname $el.parent(), ancestorlimit - 1
return false
if $button
if $button.is '.channeltoggle'
toggle_channellist $button
else if $button.is '.channeloutput'
channelname = find_channelname($target)
alt = e.altKey
@emit 'togglelineout', channelname: channelname, alt: e.altKey
else if $button.is '.crossfade'
channelname = find_channelname($target)
@emit 'togglecrossfade', channelname: channelname
else if $button.is '.replaygain'
channelname = find_channelname($target)
@emit 'togglereplaygain', channelname: channelname
else if $button.is '.channelplay'
channelname = find_channelname($target)
@emit 'toggleplay', channelname
else if $button.is '.channelsettings'
if $button.parent().is '.open'
$button.parent().parent().parent().find('.open').removeClass 'open'
else
$button.parent().parent().parent().find('.open').removeClass 'open'
$button.parent().addClass 'open'
else if $target.is '.channelselect, .channelname, .channellisteners, .listener'
newchannelname = find_channelname($target)
console.log 'change channel to', newchannelname
@emit 'changechannel', newchannelname
$('.channellist-container').trigger('close')
else
console.log 'do not know what to do about a click on this here element:'
console.dir $target
otto.console_click_handler = (e) ->
$target = $(e.target)
if $target.is '.console-container'
$target.focus()
return true
else
console.log 'do not know what to do with a click on this element:'
console.dir $target
otto.command_change_handler = (command) =>
if command and command != '' and command.indexOf('.') != 0
if not otto.clientstate.typing
@emit 'typing', 1
otto.clientstate.typing = 1
else
if otto.clientstate.typing
@emit 'typing', 0
otto.clientstate.typing = 0
otto.window_focus_handler = (e) =>
if e.type is 'focus'
otto.clientstate.focus = 1
else if e.type is 'blur'
otto.clientstate.focus = 0
@emit 'focus', otto.clientstate.focus
otto.window_idle_handler_init = =>
$.idleTimer(5 * 60 * 1000)
$(document).on 'idle.idleTimer', =>
if not otto.clientstate.idle
@emit 'idle', 1
otto.clientstate.idle = 1
$(document).on 'active.idleTimer', =>
if otto.clientstate.idle
@emit 'idle', 0
otto.clientstate.idle = 0
otto.logo_click_handler = (e) =>
$logo = $(e.target)
if not $logo.is '.logo'
return
if $logo.is '.footer-logo'
$(window).scrollTop(0)
# had to use a real link so Otto.py could intercept it
#else
# window.open('http://ottoaudiojukebox.com/')
otto.letterbar_click_handler = (e) =>
$letter = $(e.target)
if not $letter.is '.letter'
$letter = $(e.target).parents('.letter').first()
if not $letter.is '.letter'
#console.log 'this did not look like a letter to me'
#console.log $letter
return
e.stopPropagation()
if $letter.is '.active'
$letter.removeClass 'active'
$('.browseresults-container').empty()
return
$('.active').removeClass 'active'
$letter.addClass 'active'
if $letter.is '.warn'
if $letter.is '.big'
$alert = $ otto.templates.alert
#message: '''
# Warning: This shows everything, which is quite a lot.
# It takes a while, give it at least a minute or two.
# It can be very hard on your browser (Chrome probably handles it best).
#'''
message: '''
Warning: This is suppose to show everything, but it
is not currently working.
'''
if $letter.is '.beta'
$alert = $ otto.templates.alert
message: 'Warning: this feature is not really working yet'
$alert.find('#ok').data 'run', ->
$('.browseresults-container').empty()
$letter.clone().removeClass('warn active').on('click', otto.letter_click_handler).click()
$alert.find("#cancel").data 'run', ->
$letter.parent().find("li").removeClass 'active'
$('.browseresults-container').html '<div>canceled</div>'
$('.browseresults-container').children().fadeOut 1500, ->
$('.browseresults-container').empty()
$('.browseresults-container').html $alert
return
if $letter.is '.shownewest'
return otto.render_json_call_to_results '/load_newest_albums', {}, 'newest_albums'
if $letter.is '.showusers'
return otto.render_json_call_to_results '/load_users', {}, 'show_users'
if $letter.is '.showall'
return otto.render_json_call_to_results '/all_albums_by_year', {}, 'allalbums'
if $letter.is '.showcubes'
otto.load_module 'cubes', ->
$('.browseresults-container').html otto.templates.cubeswithload()
$('.browseresults-container').append otto.templates.footer()
$('.loadingstatus').addClass('begin')
$('.loadingcubes').html otto.call_module 'cubes', 'show'
return
if $letter.is '.showlists'
return otto.render_json_call_to_results '/load_lists', { objects: 1 }, 'show_lists'
if $letter.is '.showstars'
return otto.render_json_call_to_results '/load_stars', { objects: 1 }, 'show_stars'
val = $letter.text()
if val is '/'
return otto.dirbrowser()
if val is '#'
val = 'num'
if val is '⋯'
val = 'other'
otto.render_json_call_to_results '/starts_with', \
{ value: val, otype: 40, attribute: 'key' }, 'startswith'
otto.results_lazyload_handler = (e) ->
$(".browseresults-container").children().each ->
$this = $ this
# skip this container if it's marked nolazy
if $this.is '.nolazy'
return
s = {threshold: 2000, container: window}
# check if this container is visible, skip it if it's not
if $.belowthefold(this, s) || $.rightoffold(this, s) || $.abovethetop(this, s) || $.leftofbegin(this, s)
return
# now dive in to the top level items on a page
$(".browseresults-container").children().each ->
$this = $(this)
# skip this container if it's marked nolazy
if $this.is '.nolazy'
return
s = {threshold: 2000, container: window}
# check if this container is visible, skip it if it's not
if $.belowthefold(this, s) || $.rightoffold(this, s) || $.abovethetop(this, s) || $.leftofbegin(this, s)
return
$this.find("img.lazy").each ->
$this = $(this)
if not ($.belowthefold(this, s) || $.rightoffold(this, s) || $.abovethetop(this, s) || $.leftofbegin(this, s))
$lazy = $this
$real = $("<img />").on 'load', ->
$lazy.attr('src', $lazy.data('original'))
$real.attr("src", $lazy.data('original'))
$lazy.removeClass('lazy')
otto.form_submit_handler = (e) =>
$form = $(e.target)
return if not $form.is 'form'
e.preventDefault()
if $form.is '.searchform'
$searchtext = $('#searchtext')
search_word = $searchtext.val()
$searchtext.select()
if not (search_word=='')
$('.active').removeClass 'active'
otto.render_json_call_to_results '/search', {value: search_word}, 'search', \
" Searching <span class='highlight'>#{search_word}</span>"
else if $form.is '.loginform'
name = $('#logintext').val()
if not (name=='')
@emit 'login', name
$('#logintext').val('')
otto.adjust_volume_handler = (e, ui) ->
console.log 'adjust_volume'
otto.current_volume = ui.value
otto.call_module_ifloaded 'player', 'setvolume', otto.current_volume
otto.adjust_volumelineout_handler = (e, ui) =>
console.log 'adjust_volumelineout', ui.value
find_channelname = ($el, ancestorlimit=4) ->
channelname = $el.data('channelname')
if channelname then return channelname
if ancestorlimit > 0
return find_channelname $el.parent(), ancestorlimit - 1
return false
channelname = find_channelname( $(e.target) ) || otto.mychannel
@emit 'setvol', channelname: channelname, volume: ui.value
# prevent mouse wheel events from bubbling up to the parent
otto.scroll_bubble_stop_handler = (e, delta) ->
# requires the jquery.mousewheel plugin which adds the delta param
$this = $(this)
height = $this.height()
scroll_height = $this.prop 'scrollHeight'
if scroll_height > height # only kill scrolling on elements with scroll bars
if delta > 0 # scroll up
if $this.scrollTop() == 0
e.preventDefault()
else if delta < 0 # scroll down
scroll_bottom = $this.scrollTop() + height
if scroll_bottom == scroll_height
e.preventDefault()
####
#### other stuff
####
otto.render_json_call_to_results = (url, params, template, message, callback) ->
$results = $('.browseresults-container')
if message
$results.html message
else
$results.empty()
$.getJSON url, params, (data) ->
# we could put the rendering under nextTick so the waiting spinner goes away
# and is not at risk of staying there if the rendering throws an exception
# *but* the rendering is often what takes the most time so we still
# want a spinner.
#nextTick ->
# we could also consider changing the spinnder to be the slower one once
# the rendering starts
# let's try catching any exceptions during rendering so we can exit
# cleanly and jquery can call out code to dismiss the spinner
try
$results.append otto.templates[template] data: data, params: params
#document.body.scrollTop=0
otto.mark_allthethings()
$results.trigger 'scrollstop'
console.log 'top', $results.offset().top
console.log 'height', $results.height()
console.log 'window', $(window).height()
if $results.offset().top + $results.height() > $(window).height() - 50
$results.append otto.templates.footer()
if callback
callback(data)
catch error
console.error "render_json_call_to_results caught error #{error}"
otto.render_without_scrolling = ($target, render) ->
if not $target
render()
else
console.log 'before', $target.offset()
top_before = $target.offset().top
render()
top_after = $target.offset().top
if top_after isnt 0 # sometimes the element is removed (e.g. removing songs from ondeck panel)
console.log 'after', $target.offset()
amount_moved = top_after - top_before
console.log 'moved', amount_moved, $(window).scrollTop(), $(window).scrollTop() + amount_moved
$(window).scrollTop( $(window).scrollTop() + amount_moved )
otto.dirbrowser = ->
dirbrowser_html = $ otto.templates.dirbrowser()
dirbrowser_click_handler = (e) ->
item = $(e.target)
id = item.attr('id')
if item.is '.path'
$.getJSON '/load_dir', {'id': id}, (data) ->
$('#subdirs').html otto.templates.dirbrowser_subdir data: data
else if item.is '.subdir'
$('#path').append(
$('<li class="path">').html(
item.attr('data-filename')+'/'
)
)
$.getJSON '/load_dir', {'id': id}, (data) ->
$('#subdirs').html otto.templates.dirbrowser_subdir data: data
dirbrowser_html.click dirbrowser_click_handler
$('.browseresults-container').html dirbrowser_html
$.getJSON '/music_root_dirs', (data) ->
$('#path').html otto.templates.dirbrowser_item data: data
otto.mark_allthethings = ->
otto.mark_queued_songs()
otto.mark_listed_items()
otto.mark_starred_items()
otto.mark_queued_songs = () ->
$('.inqueue').removeClass('inqueue')
$('.first').removeClass('first')
$('.enqueue.unqueue').removeClass('unqueue')
first = true
for song in otto.cache.queue
#if not song.requestor then continue # if we only want to mark non auto picked songs
$items = $('.id'+song._id)
classstr = 'inqueue'
if first
classstr += ' first'
$items.addClass(classstr)
$items.parent().find('.enqueue').addClass('unqueue')
if first
$items.parent().find('.enqueue').addClass('first')
$items.data('mpdqueueid', song.mpdqueueid)
first = false
otto.mark_listed_items = () ->
otto.mark_starred_items = () ->
$('.stars.n1:not(.noupdate), .stars.n2:not(.noupdate), .stars.n3:not(.noupdate), .stars.n4:not(.noupdate), .stars.n5:not(.noupdate), .stars.n6:not(.noupdate)').removeClass('n1 n2 n3 n4 n5 n6').addClass('n0')
if otto.cache.stars
for item in otto.cache.stars
$el = $('[data-id='+item.child.toString()+'].stars')
if not $el.is('.noupdate')
$el.addClass('n'+item.rank)
otto.compute_artistinfo = (album) ->
# FIXME doubled artists? (filter out 5,6?)
all = []
various = soundtrack = false
single = primary = secondary = ''
if album.artists
for artist in album.artists
if artist.artist is 'Soundtrack'
soundtrack = 'Soundtrack'
# FIXME we shouldn't have to remove these
# (but we do until i track down why everything is soundtrack and various bug)
continue if artist.artist is 'Soundtrack'
continue if artist.artist is 'Various'
all.push(artist.artist)
else if album.artist
all.push(album.artist)
if all.length > 2
various = 'Various'
single = 'Various'
if not various
if all.length
primary = all[0]
if all.length is 2
secondary = all[1]
single = primary
if secondary
single += ', ' + secondary
if soundtrack
if not single
single = 'Soundtrack'
#console.log all
return { various: various, soundtrack: soundtrack, single: single, primary: primary, secondary: secondary, all: all }
otto.connect_player = ->
if not otto.ismoduleloaded 'player'
# give some immediate feedback while the module loads
$('#connect').html otto.templates.ouroboros size: 'small', direction: 'cw', speed: 'slow'
otto.connect_state = 'connected'
otto.call_module 'player', 'connect', otto.mychannel
otto.disconnect_player = ->
otto.connect_state = 'disconnected'
otto.call_module 'player', 'disconnect'
otto.reconnect_player = ->
otto.disconnect_player()
otto.connect_player()
otto.play_soundfx = (name) ->
if otto.soundfx
otto.call_module 'soundfx', 'play', name
otto.play_notification = (event) ->
if otto.notifications
return if event.name is 'finished'
text = otto.templates.event_text event
n = new Notification event.name, body: text
n.onshow = ->
timeoutSet 10000, -> n.close()
otto.touch_init = ->
#disable shy controls on touch devices
#touch_device = 'ontouchstart' in document.documentElement
#touch_device = ('ontouchstart' in window) or window.DocumentTouch and document instanceof DocumentTouch
#touch_device = 'ontouchstart' in window or 'onmsgesturechange' in window # 2nd test for ie10
#touch_device = true #detection not working for some reason wtf? FIXME
#$('head').append '<script src="static/js/modernizr.custom.66957.js">'
touch_device = Modernizr.touch # prob doesn't work for ie10
#http://stackoverflow.com/questions/4817029/whats-the-best-way-to-detect-a-touch-screen-device-using-javascript
if touch_device
otto.touchdevice = true
console.log 'touch device detected, disabling shy controls'
$('body').addClass 'noshy'
$('head').append '<script src="static/js/fastclick.js">'
FastClick.attach(document.body)
addToHomescreen skipFirstVisit: true, maxDisplayCount: 1
# http://blog.flinto.com/how-to-get-black-status-bars.html
if window.navigator.standalone
$("meta[name='apple-mobile-web-app-status-bar-style']'").remove()
otto.ouroboros_ajax_hooks = ->
# css animations seem to stop when the client is working (e.g. when rending the template
# after receiving the data from the ajax call). i wonder if animated gifs do.
# i hear making it it's own layer on the GPU will allow animations to continue (ref AEA '13 notes)
$(document).ajaxStart ->
$('.ouroboros-container').html otto.templates.ouroboros()
$(document).ajaxStop ->
$('.ouroboros-container').empty()
$(document).ajaxError (e, xhr, settings, exception) ->
# or maybe use ajaxComplete (no)? look at the jQuery docs
# (see http://stackoverflow.com/questions/4419241/jqueryajaxstop-versus-jqueryajaxcomplete)
# it seems if you throw and error while processing in your success callback this (or ajaxStop)
# doesn't get called? if true then this is fragile. perhaps use the ajaxSend/ajaxComplete hooks?
# those should run before any of our error-prone code, yes? we'd need to keep an outstanding call count
$('.ouroboros-container').empty()
console.log 'ajax error in: ' + settings.url + ' \n'+'error:\n' + exception
throw exception
otto.chat_init = =>
chatinput = (str) =>
$('.output').scrollToBottom()
str = $.trim(str) # should strip tabs too FIXME
if str != ''
parts = str.match(/^([/.])([^ ]*)[ ]*(.*)$/) # spaces *and* tabs FIXME
if parts
prefix = parts[1]
command = parts[2]
therest = parts[3]
args = therest.split('/[ \\t]/')
switch command
when 'cls' then $('.output').empty()
when 'reload' then @emit 'reloadme'
when 'reloadall' then @emit 'reloadall'
when 'nick' then @emit 'login', therest
when 'exit' then otto.enable_chat false
when 'leave' then otto.enable_chat false
when 'part' then otto.enable_chat false
when 'pause' then if otto.play_state is 'play' then toggle_play.call @
when 'play' then if otto.play_state is 'pause' then toggle_play.call @
when 'next' then next_track.call @
when 'help' then $('.output').append otto.templates.chathelp()
else $('.output').append otto.templates.chatunknowncommand prefix: prefix, command: command
$('.output').scrollToBottom()
else
@emit 'chat', str
$('#inputr').first().cmd
prompt: -> # empty function supresses the addition of a prompt
width: '100%'
elementtobind: $('.console-container')
commands: chatinput
onCommandChange: otto.command_change_handler
otto.enable_chat = (state) =>
if state and not otto.clientstate.inchat and otto.myusername
$('.console-container').slideDown(200)
$('.console-container').focus()
otto.clientstate.inchat = 1
@emit 'inchat', 1
$('body').addClass 'inchat'
else if not state and otto.clientstate.inchat
$('.console-container').slideUp(200)
$('.channelbar-container').focus()
otto.clientstate.inchat = 0
@emit 'inchat', 0
$('body').removeClass 'inchat'
## should only do this in dev mode, but we need to tell the client we are in dev mode somehow FIXME
#window.console.log = ->
# @emit 'console.log', Array().slice.call(arguments)
#window.console.dir = ->
# @emit 'console.dir', Array().slice.call(arguments)
#alternatively:
#$('body').append $ '<script src="http://jsconsole.com/remote.js?656D8845-91E3-4879-AD29-E7C807640B61">'
<|start_filename|>static/css/otto.cubes.css<|end_filename|>
/* the welcome screen css is tucked here so it only loads when needed
along with the cubes module which it requires anyways */
.welcome-container {
height: 100%;
background-color: #333;
//overflow: hidden;
white-space: nowrap;
}
.welcome {
width: 100%;
padding-top: 15px;
vertical-align: top;
font-family: 'Lato', helvetica, arial;
}
.greeting {
display: block;
visibility: visible; /* undo the FOUC fix in otto.css */
width: 100%;
margin-top: 60px;
text-align: center;
color: #BF6F04;
}
.hi {
font-size: 48px;
margin-bottom: 60px;
}
.explain {
font-size: 22px;
line-height: 36px;
color: #BF6F04;
}
.greeting .folder {
margin: 20px 0 0px 0;
font-size: 32px;
color: #6C6C6C;
}
.greeting .folder button {
position: relative;
top: 3px;
left: -8px;
}
.greeting .folder .path {
position: relative;
left: -4px;
padding: 5px 20px 5px 6px;
border: solid 2px rgba(0,0,0,0);
}
.greeting .folder .path:active,
.greeting .folder .path:focus {
border: solid 2px #BF6F04;
border-radius: 3px;
outline: none;
}
.explain.note {
position: relative;
top: 6px;
font-size: 16px;
//color: #6C6C6C;
}
.explain.note button {
position: relative;
top: 6px;
left: -2px;
}
.control.loadmusic,
.control.restartload,
.control.continue {
position: relative;
line-height: 19px;
z-index: 3;
}
.control.continue {
margin-top: 10px;
}
.control.medium2.begin {
margin-top: 6px;
}
.control.loadmusic {
margin-top: 55px;
}
.beginmessage {
position: absolute;
top: 70px;
width: 100%;
text-align: center;
z-index: 2;
}
.beginmessage div {
display: inline-block;
}
.beginmessage div button {
position: relative;
top: 8px;
left: -1px;
}
.beginmessage > button {
margin-top: 22px;
}
.initialload-container {
position: absolute;
top: 0;
height: 100%;
width: 100%;
}
.cubeswithload-container {
}
.loadingstatus {
position: relative;
padding-top: 74px;
height: 120px;
width: 100%;
}
.browseresults-container .loadingstatus {
padding-top: 0;
}
.status {
display: none;
width: 100%;
text-align: center;
font-size: 22px;
line-height: 22px;
color: #BF6F04;
}
.status {
}
.loadingstatus.begin .status.begin,
.loadingstatus.searching .status.searching,
.loadingstatus.loading .status.loading,
.loadingstatus.finished .status.finished,
.loadingstatus.nonefound .status.nonefound,
.loadingstatus.error .status.error {
display: inline-block;
}
.loadingresults.error .loadingcurrent {
visibility: hidden;
}
.loadingprogress {
position: relative;
top: -50px;
width: 60%;
height: 20px;
left: 45px;
margin: 0 auto;
white-space: nowrap;
}
.loadingprogress .progress {
cursor: inherit;
}
.loadingcurrent {
position: relative;
top: -40px;
margin-top: 25px;
margin-left: 100px;
height: 48px;
white-space: nowrap;
overflow: hidden;
}
.thumb.px40:hover {
border: solid 2px #333;
cursor: inherit;
}
.loadingspinner {
position: relative;
display: inline-block;
}
.loadingspinner div {
display: inline-block;
}
.loadingspinner .note {
position: relative;
top: -6px;
left: 12px;
width: 110px;
text-align: left;
}
.control.loadmusic2 {
position: absolute;
padding-top: 0;
right: 30px;
top: -7px;
line-height: 11px;
}
.loadmusic2 .ouroboros {
position: relative;
top: 1px;
}
.browseresults-container .cubeswithload-container {
margin-right: 75px;
}
.browseresults-container .loadingstatus {
height: 30px;
}
.browseresults-container .loadingstatus .status,
.browseresults-container .loadingspinner {
display: none !important;
}
.browseresults-container .loadingstatus.begin .status.begin {
display: block !important;
}
.browseresults-container .loadingprogress {
top: -30px;
}
.browseresults-container .loadingcurrent {
top: -50px;
}
.browseresults-container .loadingcubes-container {
position: relative;
top: -40px;
}
.loadingcubes {
position: relative;
height: 512px;
}
.cubes-container {
position: absolute;
bottom: 0;
height: 100%;
width: 100%;
overflow: hidden;
}
.resort {
position: absolute;
bottom: 60px;
left: 80px;
}
.landscape-right {
position: absolute;
bottom: 0;
width: 50%;
right: 50%;
height: 500px;
transform: scaleX(-1);
-webkit-transform: scaleX(-1);
/* background: url('/static/images/grid_tile.png'); */
background: url('/static/images/grid_tile.svg');
background-color: #333;
border-top: solid 1px #2D2D2D;
/* -webkit-mask-image: -webkit-gradient(top, rgba(255,255,255,0), rgba(255,255,255, 1)) */
/* http://mir.aculo.us/2012/09/16/masking-html-elements-with-gradient-based-fadeouts/ */
-webkit-mask-image: -webkit-gradient(linear, right bottom, right top, from(rgba(0,0,0,1)), color-stop(0.40, rgba(0,0,0,1)), to(rgba(0,0,0,0.3)));
/* mask: url(#fade_right_svg_mask); */
background-position: top;
}
.landscape-left {
position: absolute;
bottom: 0;
width: 50%;
left: 50%;
height: 500px;
/* background: url('/static/images/grid_tile.png'); */
background: url('/static/images/grid_tile.svg');
background-color: #333;
border-top: solid 1px #2D2D2D;
/* -webkit-mask-image: -webkit-gradient(top, rgba(255,255,255,0), rgba(255,255,255, 1)) */
/* http://mir.aculo.us/2012/09/16/masking-html-elements-with-gradient-based-fadeouts/ */
-webkit-mask-image: -webkit-gradient(linear, right bottom, right top, from(rgba(0,0,0,1)), color-stop(0.40, rgba(0,0,0,1)), to(rgba(0,0,0,0.3)));
/* mask: url(#fade_right_svg_mask); */
background-position: top;
}
.cubes {
position: absolute;
bottom: 94px;
left: 50%;
width: 1px;
margin: 0 auto;
text-align: left;
}
.scene {
position: relative;
}
.cube {
width: 28px;
height: 28px;
display: block;
/* visibility: hidden; */
position: absolute;
/*background-image: url('/static/images/blocks.png');*/
background-image: url('/static/images/cds.png');
background-repeat: no-repeat;
}
.stacklabel {
position: absolute;
padding-right: 4px;
padding-left: 4px;
padding-bottom: 10px;
font-weight: 800;
font-size: 9px;
color: #505050;
cursor: pointer;
transform: skew(63deg,-26.5deg) !important;
transform-origin: -8px 1px;
-webkit-transform: skew(63deg,-26.5deg) !important;
-webkit-transform-origin: -8px 1px;
}
.stacklabel:hover {
/* text-decoration: underline; */
/* text-shadow: 1px 1px 5px white */
color: #999;
}
.stacklabel.active {
color: #AAA;
}
<|start_filename|>otto.client.misc.coffee<|end_filename|>
###############
### client side (body of otto.client.misc.coffee served as /otto.misc.js)
###############
global.otto.client.misc = ->
window.otto = window.otto || {}
# on demand client side modules
otto.client = otto.client || {}
otto.load_module = (modulename, callback) ->
if not otto.client[modulename]
console.log "loading module #{modulename}"
$.getScript "/otto.client.#{modulename}.js", ->
console.log "module #{modulename} loaded"
if callback
callback()
else
if callback
callback()
otto.call_module = (modulename, methodname, args...) ->
otto.load_module modulename, ->
console.log "calling otto.client.#{modulename}.#{methodname}(args...)"
otto.client[modulename][methodname](args...)
otto.call_module_ifloaded = (modulename, methodname, args...) ->
# only call the module if it is already loaded, otherwise do nothing
if otto.client[modulename] # don't trigger a automatic module load
otto.call_module modulename, methodname, args...
else
console.log "ignoring call to unloaded module otto.client.#{modulename}.#{methodname}(args...)"
otto.ismoduleloaded = (modulename) ->
return otto.client[modulename]?
# client side version of node's nextTick
window.nextTick = (func) -> setTimeout(func, 0)
# coffeescript friendly version of setTimeout and setInterval
window.timeoutSet = (ms, func) -> setTimeout(func, ms)
window.intervalSet = (ms, func) -> setInterval(func, ms)
$.fn.scrollToBottom = ->
this.animate scrollTop: this.prop('scrollHeight') - this.height(), 100
otto.autosize_clear_cache = -> otto.$autosize_elements_cache = false
otto.autosize_clear_cache()
otto.autosize_adjust = ->
console.log 'autosize_adjust'
if !otto.$autosize_elements_cache
otto.$autosize_elements_cache = $('.autosize')
otto.$autosize_elements_cache.each (index, element) ->
$element = $ element
maxFontSize = $element.data('autosize-max') || $element.height()-4
minFontSize = $element.data('autosize-min') || Math.round($element.height/2)-4
rightMargin = $element.data('autosize-right-margin') || 0
fontSize = maxFontSize
#while size > minFontSize and element.scrollWidth > element.offsetWidth
# $element.css 'font-size': "#{fontSize}px"
desiredWidth = $element.parent().width()
$resizer = $element.clone()
$resizer.css
'display': 'inline'
'white-space': 'nowrap'
'width': 'auto'
'font-size': "#{fontSize}px"
$resizer.insertAfter($element)
while fontSize > minFontSize and $resizer.width() > desiredWidth
fontSize = fontSize - 1
$resizer.css 'font-size': "#{fontSize}px"
# adjust the top so the text stays centered in the div
heightAdjust = 0
if fontSize > minFontSize
heightAdjust = (maxFontSize - fontSize) / 2
$resizer.remove()
$element.css
'font-size': "#{fontSize}px"
'top': "#{heightAdjust}px"
# from http://stackoverflow.com/questions/6658517/window-resize-in-jquery-firing-multiple-times
# debouncing function from <NAME>
# http://unscriptable.com/index.php/2009/03/20/debouncing-javascript-methods/
# usage:
# $(window).smartresize ->
# code that takes it easy...
do ($ = jQuery, sr = 'smartresize') ->
debounce = (func, threshold, execAsap) ->
timeout = null
debounced = ->
obj = this
args = arguments
delayed = ->
if not execAsap
func.apply(obj, args)
timeout = null
if timeout
clearTimeout timeout
else if execAsap
func.apply(obj, args)
timeout = setTimeout(delayed, threshold || 50)
return debounced
# smartresize
$.fn[sr] = (fn) ->
return if fn then this.bind('resize', debounce(fn)) else this.trigger(sr)
<|start_filename|>otto.client.templates.coffee<|end_filename|>
####
#### client side (body of otto.client.templates.coffee served as /otto.templates.js)
####
# binds to otto.templates (on the client side), not otto.client.templates
# for historical reasons (and brevity)
global.otto.client.templates = ->
$('head').append '<script src="static/js/coffeecup.js">' if not window['coffeecup']?
window.otto = window.otto || {}
window.otto.client = window.otto.client || {}
window.otto.client.templates = true # for otto.load_module's benefit
window.otto.templates = do ->
templates = {}
t = otto.t = templates
ccc = coffeecup.compile
# you can't reference 'templates' or 't' in the compiled functions scope
# (i'm guessing because they are 'eval'ed), use otto.templates instead
add = templates: templates, t: t
t.body_welcome = ccc ->
div '#welcome', otto.t.welcome @
t.body = ccc ->
nav '.channellist-container', ''
div '#mainpage', ''
div '.ouroboros-container', ''
#div '.footer-container', otto.t.footer()
div '.cursor-hider', ''
t.body_reset = ->
$('.channellist-container').empty()
$('#mainpage').empty()
$('.ouroboros-container').empty()
#$('.footer-container').html otto.t.footer()
$('.cursor-hider').empty()
t.welcome = ccc ->
div '.wecome-container', ->
div '.welcome', ->
text otto.t.logo()
div '.greeting', ->
div '.hi', 'hello!'
div '.explain', 'I love to play your music for you, but first I need to scan it'
div '.explain', 'don\'t worry, I won\'t move it or anything like that'
br()
br()
div '.explain', 'I\'ll scan for music in this folder'
div ->
div '.folder', ->
button '.control.medium2.selectfolder', otto.t.icon 'folder'
input '#selectFolder', type: 'file', style: 'display: none' #must match UIDelegate in Otto.py
span '.path', contenteditable: '', @musicroot
div '.explain.note', ->
text '(press '
button '.control.small.selectfolder.', otto.t.icon 'folder'
text ' to change this)'
button '.control.large.wide.loadmusic', 'scan'
div '.footer-container', ''
t.initialload = ccc ->
div '.welcome-container', ->
div '.welcome', ->
text otto.t.logo()
div '.initialload-container', otto.t.cubesloader @
t.cubeswithload = ccc ->
div '.cubeswithload-container', otto.t.cubesloader @
t.cubesloader = ccc ->
div '.loadingstatus', otto.t.loadingstatuses @
div '.loadingprogress', ''
div '.loadingcurrent', ''
div '.loadingcubes-container', ->
div '.loadingcubes', ''
t.loadingstatuses = ccc ->
div '.status.begin', ->
button '.control.large.wide.loadmusic2', 'scan'
div '.status.searching', ->
div '.loadingspinner', ->
div otto.t.ouroboros size: 'medium', direction: 'cw', speed: 'fast'
div '.note', 'searching'
div '.status.loading', ->
div '.loadingspinner', ->
div otto.t.ouroboros size: 'medium', direction: 'cw', speed: 'slow'
div '.note', 'scanning'
div '.status.finished', ->
div ->
text 'all finished! press '
button '.control.small.begin', otto.t.icon 'play'
text ' to begin.'
button '.control.medium2.begin', otto.t.icon 'play'
div '.status.nonefound', ->
div ->
div 'sorry, I was unable to find any music I can play'
br()
if @folder
div 'in folder ' + @folder
else
div 'in that folder'
br()
br()
button '.control.large.wide.restartload', 'restart'
div '.status.error', ->
div ->
text 'sorry, I encountered an error while scanning'
button '.control.large.wide.begin.continue', 'continue'
t.cubes = ccc ->
div '.cubes-container', ->
div '.landscape-right', ''
div '.landscape-left', ''
div '.cubes', ->
div '.scene', ''
#div '.resort.control.medium2', otto.t.icon 'cubes'
t.cubelink = ccc ->
div '.cubelink.'+@rowclass, 'data-id': @id, title: @title, ->
div '.cube', style: @style
t.stacklabel = ccc ->
div '.stacklabel', style: @style, @letter
t.countprogress = ccc ->
if @total or @count
div '.countprogress-binder', ->
s = @total
sizePercent = 100
progressPercent = Math.min((@count / @total * 100), 100)
div '.progress-maximum', ->
div '.progress-container', style: "width: #{sizePercent}%;", ->
div '.progress', ->
div '.progress-indicator', style: "width: #{progressPercent}%;", ''
div '.count-container', otto.t.count_widget(@)
t.logo = ccc ->
div '.logo-container', ->
#span '.logo', ''
a '.logo', href: 'http://ottoaudiojukebox.com/', target: '_blank', ->
t.mainpage = ccc ->
if @channel.layout is 'webcast'
text otto.t.channelbar channel: @channel
text otto.t.console()
text otto.t.webcast()
else if @channel.layout is 'featured'
text otto.t.channelbar channel: @channel
text otto.t.console()
text otto.t.featured()
#else if @channel.layout is 'holidays'
# happy holidays
else
text otto.t.channelbar channel: @channel
text otto.t.login()
text otto.t.playing @
text otto.t.thealbum()
div '.ondeckchattoggle-container', ->
div '.ondeck-container', ''
div '.chattoggle-container', ->
button '.control.medium.chattoggle.shy', {title: 'chat'}, otto.t.icon 'chat'
text otto.t.console()
text otto.t.browse @
#div '.footer-backer', ''
templates.console = coffeecup.compile ->
div '.console-container', tabindex: -1, ->
button '.control.medium.chattoggle.shy', otto.t.icon 'close'
div '.output-container', ->
div '.output.scrollkiller', ''
div '.input-container', ->
div '.inputl', ->
#pre '#prompt', ''
div '#prompt', ''
div '.inputr-container', ->
div '#inputr', -> # must be an id, not class
div '#terminal', ->
#textarea '.input', spellcheck: 'false'
#div '.inputcopy', ''
t.chathelp = ccc ->
div '.chathelp', ->
div '/cls - clear screen'
div '/next - next track'
div '/pause - pause playing'
div '/play - resume playing'
div '/nick <name> - change username'
div '/part - leave chat'
div '/help - show commands'
t.chatunknowncommand = ccc ->
div '.chatunknowncommand', ->
'unknown command ' + @prefix + @command
t.channelbar = ccc ->
console.log 'channelbar', @
div '.channelbar-container.reveal', ->
div '.channelbar', ->
div '.channelbar-left', ->
button '.control.medium.channeltoggle.shy', {title: 'channels'}, otto.t.icon 'menu'
div '.channelbar-center', ->
div '.channelname-container', ->
div '.channelname', @channel.fullname
div '.hostname', ->
#host = @host
#if host and host.indexOf(':') > 0
# host = host.substr(0, host.indexOf ':') || @host
#'http://' + host
r = /^(http:\/\/)?([^\/]*)/.exec(document.URL)
host = if r and r.length is 3 then r[2] else ''
host
text otto.t.logo()
div '.topcontrols-container', ->
#input '#fxtoggle', type: 'checkbox', checked: false
#label '#fx.shy', for: 'fxtoggle', ->
# span 'sound cues'
button '.control.medium2.soundfxtoggle.shy', {title: 'sound cues'}, otto.t.icon 'soundfx'
if Notification?
#input '#notificationstoggle', type: 'checkbox', checked: false
#label '#notifications.shy', for: 'notificationstoggle', ->
# span 'notifications'
button '.control.medium2.notificationstoggle.shy', {title: 'notifications'}, otto.t.icon 'notifications'
div '.channelbar-right', ->
#div '.chattoggle-container', ->
# button '.control.medium.chattoggle.shy', otto.t.icon 'chat'
div '.channelbar-lower', ->
div '.listeners-container', ''
templates.webcast = coffeecup.compile ->
div '#webcast-container', ->
div '#webcast-background', ->
img src: '/static/images/8013980828_82a933115b_k.jpg', title: '', alt: ''
div '#webcast-background-attribution', ->
a '#webcast-background-link', href: 'http://www.flickr.com/photos/joi/8013980828', target: '_blank',
"DJ Aaron by <NAME>"
div '#webcast-overlay', ->
div '.autosizeX', 'data-autosize-max': 34, 'data-autosize-min': 19, 'data-autosize-right-margin': 56, ->
otto.autosize_clear_cache()
div ->
span '.webcast-title', "Live Webcast"
#div '#webcast-compatability', ->
# "live broadcast currently works in Chrome and Firefox only"
div '#webcast-chatpointer', ->
"there is a chat button in the upper right"
templates.featuredX = coffeecup.compile ->
div '#archive-container', ->
div '#archive-background', ->
img src: '/static/images/webcast.png', title: '', alt: ''
div '#archive-background-attribution', ->
a '#archive-background-link', href: 'https://www.facebook.com/photo.php?fbid=10150666518958461&set=o.406990045995364&type=1&theater', ->
"photo by [AttributionHere]"
div '#archive-overlay', ->
div '.autosize', 'data-autosize-max': 34, 'data-autosize-min': 19, 'data-autosize-right-margin': 56, ->
otto.autosize_clear_cache()
div ->
span '.archive-title', "Archives"
templates.featured = coffeecup.compile ->
div '#playlist.featured.reveal', ->
t.play_widget = ccc ->
button '#play.control.medium2', {title: 'play/pause'}, otto.t.icon 'play'
t.next_widget = ccc ->
button '#next.control.medium2.shy', {title: 'next'}, otto.t.icon 'kill'
# no longer used
t.lineout_widget = ccc ->
input '#lineouttoggle', type: 'checkbox', checked: false
label '#lineout.shy', for: 'lineouttoggle', ->
span 'server output'
text otto.t.volumelineout_widget
t.volume_widget = ccc ->
div '.volume-container', {title: 'local volume'}, ->
div '.volume', ''
t.volumelineout_widget = ccc ->
div '.volumelineout-container', {title: 'lineout volume'}, ->
div '.volumelineout', ''
t.size_widget = ccc ->
div '.size-widget.shy', ->
button '#size.smaller.control.small', {title: 'smaller'}, otto.t.icon 'smaller'
button '#size.bigger.control.small', {title: 'bigger'}, otto.t.icon 'bigger'
t.currentsong_widget = ccc ->
div '.currenttrack.autosize', {
'data-autosize-max': 34,
'data-autosize-min': 19,
'data-autosize-right-margin': 56 }, ->
otto.autosize_clear_cache()
if @song
span '.gotothere', 'data-id': @song._id, ->
@song.song || 'unknown'
if otto.myusername
button '.stars.control.teeny.shy.n0', 'data-id': @song._id
t.currentalbum_widget = ccc ->
if @song?.album
div '.album.gotothere', 'data-id': @song.albums[0]._id, ->
span @song.album
t.currentyear_widget = ccc ->
span '.year', @song?.year or ''
t.currentartist_widget = ccc ->
if @song?.artist
artist_id = @song.artists[0]?._id or 0
div '.artist.gotothere', 'data-id': artist_id, 'data-albumid': @song.albums[0]._id, ->
# data-albumid is a hack. see artist->fileunder note in gotothere code
@song.artist
t.count_widget = ccc ->
span '#count', ->
if @total or @count
totalstr = "#{@total}"
countstr = "#{@count}"
if countstr.length < totalstr.length
less = totalstr.length - countstr.length
for i in [1..less]
countstr = ' ' + countstr
span '#current-count', countstr
span '#total-count.sep', totalstr
t.time_widget = ccc ->
span '#time', ->
if @total or @current
totalstr = otto.t.format_time @total
currentstr = otto.t.format_time @current, totalstr.length
span '#current-time', currentstr
span '#total-time.sep', totalstr
t.timeprogress_widgets = ccc ->
if @total or @current
div '.timeprogress-binder', ->
s = @total
if s < 10 then s = 10
if s > 3600 then s = 3600
# fun fact: 2397 = 39:57, longest single to reach the UK charts!
#x = s / 2397 * 0.58
#x = s / 3600 * 0.58
x = s / 3600 * 1.718
scale = Math.sqrt( Math.log( x+1 ) )
sizePercent = scale * 100
progressPercent = Math.min((@current / @total * 100), 100)
div '.progress-maximum', {title: 'seek'}, ->
div '.progress-container', style: "width: #{sizePercent}%;", ->
div '.progress', ->
div '.progress-indicator', style: "width: #{progressPercent}%;", ''
div '.time-container', otto.t.time_widget(@)
t.channel_status_errata_widget = ccc ->
div '.time-container', ->
div '.time', ->
if @status.time
times = @status.time.split ':'
text otto.t.time_widget current: times[0], total: times[1]
#div '.audio', @status.audio || ''
div '.bitrate', if @status.bitrate then @status.bitrate + 'kbps' else ''
t.owner_widget = ccc ->
owner = ''
if @song? and @song.owners? and @song.owners[0]? and @song.owners[0].owner?
owner = @song.owners[0].owner
span '.owner', owner
t.requestor_widget = ccc ->
classstr = ''
if @nodecorations
if @song?.requestor
span '.requestor', @song.requestor.split('@')[0]
else
if @song? and @song.owners? and @song.owners[0]? and @song.owners[0].owner?
classstr = '.sep'
if @song?.requestor
span classstr, 'requested by '
span '.requestor', @song.requestor
t.filename_widget = ccc ->
if @song?.filename
span '.filename.shy', @song.filename
t.currentcover_widget = ccc ->
if @song
div '.thumb.px300.gotothere', { 'data-id': @song._id }, ->
if @song.cover
img
height: 300
width: 300
#src: "/image/300?id=#{@song.cover}"
src: "/image/orig?id=#{@song.cover}"
title: @song.album
else
div '.noimg.px300', ->
div @song.album
div '.noimgspacer', ''
div @song.artist
else
div '.thumb.px300', {}, ->
t.enqueue_widget = ccc ->
button '.enqueue.control.teeny.shy', ''
t.unqueue_widget = ccc ->
addtoclassstr = @addtoclassstr || ''
button '.btn.teeny.control.unqueue'+addtoclassstr, ''
t.currenttrack = ccc ->
div '.currenttrack-binder', ->
div '.currentsong-container', otto.t.currentsong_widget(@)
div '.timeprogress-container', otto.t.timeprogress_widgets(@)
div '.currentalbum-container', otto.t.currentalbum_widget(@)
div '.currentyear-container', otto.t.currentyear_widget(@)
div '.currentartist-container', otto.t.currentartist_widget(@)
div '.currenterrata-container', ->
div '.owner-container', otto.t.owner_widget(@)
div '.requestor-container', otto.t.requestor_widget(@)
div '.currentcover-container', otto.t.currentcover_widget(@)
div '.filename-container', ->
div '.filename-clipper', otto.t.filename_widget(@)
t.playing = ccc ->
div '.playing-container.reveal', ->
if otto.haslineout and otto.localhost
div '.play-container', otto.t.play_widget
div '.shy', otto.t.volumelineout_widget
else
#button '#connect.control.large.'+@channel.type, otto.t.icon 'disconnected'
#button '#connect.control.large.'+@channel.type, ->
div '.connect-container', ->
button '#connect.control.large', { title: 'connect/disconnect' }, ->
#img src: 'static/images/disconnected.svg', height: 20, width: 20
text otto.t.icon 'connect'
div '.shy', otto.t.volume_widget
size = @size || 'size1'
div ".size-container.#{size}", otto.t.size_widget
div ".next-container.#{size}", otto.t.next_widget
div ".currenttrack-container.#{size}", otto.t.currenttrack(@)
t.thealbum = ccc ->
div '.thealbum-container.reveal', ->
''
templates.browse = coffeecup.compile ->
div '.browse-container', ->
div '.browsecontrols-container', ->
div '.search-container', ->
form '#searchform.searchform', method:'get', action:'', ->
input '#searchtext.searchtext', type:'text', name:'search', placeholder: 'search', autocorrect: 'off', autocapitalize: 'off'
input '.search_button.buttonless', type:'submit', value:'Search'
div '.letterbar-container', ->
ul '.letterbar', ->
#bigwarning = if @largedatabase then '.warn.big' else '' # bzzz! not passed in FIXME
bigwarning = ''
li '.letter.control.shownewest.gap', {title: 'newest'}, otto.t.icon 'newest'
li '.letter.control.showall.gap'+bigwarning, {title: 'all'}, otto.t.icon 'all'
if not @largedatabase # need to make it faster, times out on very large databases FIXME
li '.letter.control.showusers.gap', {title: 'users'}, otto.t.icon 'users'
li '.letter.control.showstars.gap', {title: 'starred'}, otto.t.icon 'star'
li '.letter.control.showcubes.gap'+bigwarning, {title: 'cubes'}, otto.t.icon 'cubes'
# other fun character considerations: ⁂ ? № ⁕ ⁖ ⁝ ⁞ ⃛ ⋯ +⚂ ⚐ ⚑
# someday add back: st va
li '.letter.gap', 'A'
for letter in 'B C D E F G H I J K L M N O P Q R S T U V W X Y Z # ⋯'.split(' ')
if letter is '#'
li '.letter', {title: 'numbers'}, letter
else if letter is '⋯'
li '.letter', {title: 'other'}, letter
else
li '.letter', letter
#li '.letter.gap.warn.beta', '/'
#li '.letter.showlists.gap', '✓'
div '.browseresults-container', ''
t.footer = ccc ->
div '.logo-container.footer-logo-container', ->
span '.logo.footer-logo', ''
templates.login = coffeecup.compile ->
div '.login-container', ->
div '.login', ->
form '#loginform.loginform', method:'get', action:'', ->
span '.loginlabel', 'To browse and select songs '
# note the homograph unicode cryillic 'a' in 'email' in the placeholder string
# this is to keep safari from prompting for an auto fill. sigh.
input '#logintext.logintext', type:'text', placeholder: 'enter your emаil / username here', autocorrect: 'off', autocapitalize: 'off', autocomplete: 'off', autofill: 'off'
input '.login_button.buttonless', type:'submit', value:'Search'
templates.listeners = coffeecup.compile ->
span '.listeners', ->
count=0
othercount=0
for id in @listeners
if @listeners[id].socketids or @listeners[id].streams
#console.log @listeners[id].channelname
if @listeners[id].channelname and @listeners[id].channelname == otto.mychannel
count++
else
othercount++
if not count
label = 'no listeners'
else
label = count + ' ' + 'listener' + otto.t.plural(count)
span '.count', label
if count
span '.sep', ''
first = true
us = null
for id in @listeners
if @listeners[id].socketids or @listeners[id].streams
for sid of @listeners[id].socketids
if sid is @socketid
us = id
if us and @listeners[us]
text otto.t.format_listener listener: @listeners[us], first: first, me: true
first = false
for id in @listeners
if id is us
continue
if @listeners[id].socketids or @listeners[id].streams
if @listeners[id].channelname and @listeners[id].channelname == otto.mychannel
text otto.t.format_listener listener: @listeners[id], first: first, me: false
first = false
if othercount
label = othercount + ' ' + 'other listener' + otto.t.plural(othercount)
span '', ' | '
span '.count', label
for id in @listeners
if id is us
continue
if @listeners[id].socketids or @listeners[id].streams
if @listeners[id].channelname and @listeners[id].channelname != otto.mychannel
text otto.t.format_listener listener: @listeners[id], first: first, me: false, showchannel: true
first = false
templates.format_listener = coffeecup.compile ->
name = @listener.user || @listener.host || @listener.address
if @shortname
name = name.split('@')[0]
inchat = no
typing = no
focus = no
idle = yes
for id of @listener.socketids
socket = @listener.socketids[id]
if socket
inchat = yes if socket.inchat? and socket.inchat
typing = yes if socket.typing? and socket.typing
focus = yes if socket.focus? and socket.focus
if socket.idle?
idle = no if not socket.idle
if idle
idle = 1
for id of @listener.socketids
socket = @listener.socketids[id]
if socket
idle = socket.idle if socket.idle > idle
classes = ''
classes += '.streaming' if @listener.streams
classes += '.inchat' if inchat
classes += '.typing' if typing
classes += '.idle' if idle or not focus
classes += '.thisisme' if @me
classes += '.sep' if not @first
title = ''
title += 'Streaming' if @listener.streams
if @listener.streams > 1
title += "X#{@listener.streams}"
if inchat
title += ', ' if title != ''
title += 'In chat' if inchat
if typing
title += ', ' if title != ''
title += 'Typing'
if idle or not focus
title += ', ' if title != ''
if idle
minutes = Math.floor( (Date.now() - parseInt(idle)) / (60 * 1000) )
if minutes < 120
title += "Idle for #{minutes} minutes"
else
title += 'Idle since ' + Date(idle).toString()
else
title += 'Window not focused'
if @listener.host || @listener.address # i guess these are not being set? FIXME
title += ', ' if title != ''
title += 'Connected from ' + @listener.host || @listener.address
div '.listener'+classes, title: title, ->
text name
if @showchannel
span '.channel', ' (' + @listener.channelname + ')'
if @me
if @listener.user
span '.thisisme', ->
span '.you', '(you)'
button '.control.small.logout', {title: 'logout'}, otto.t.icon 'logout'
t.format_listeners_for_channel_in_channelbar = ccc ->
span '.listeners', ->
first = true
for id in @listeners
if @listeners[id].socketids or @listeners[id].streams
if @listeners[id].channelname and @listeners[id].channelname == @channelname
text otto.t.format_listener listener: @listeners[id], first: first, me: false, shortname: true
first = false
templates.channellist = coffeecup.compile ->
div '.channellistheader', ->
button '.control.medium.channeltoggle', otto.t.icon 'close'
ul ->
for i in [1..1]
for channel in @channellist
classes = '.changechannel'
classes = classes + '.currentchannel.open' if channel.name is otto.mychannel
li classes, 'data-channelname': channel.name, ->
button '.channelsettings.control.small.shy', {title: 'more info'}, otto.t.icon 'info'
div '.channelselect', ->
div '.channelname.autosize', {
'data-autosize-max': 20,
'data-autosize-min': 12,
'data-autosize-right-margin': 0 }, ->
otto.autosize_clear_cache()
channel.fullname
div '.channellisteners', ->
if @listeners
# if we reactive the count we should consider omitting if it's 1
#span '.listeners.count', count || ''
text otto.t.format_listeners_for_channel_in_channelbar listeners: @listeners, channelname: channel.name
button '.channeloutput.control.small.shy', {title: 'toggle lineout'}, otto.t.icon 'output'
div '.settings', ->
#button '.channelsettings.control.small', otto.t.icon 'close'
button '.channelplay.control.medium2', {title: 'play/pause'}, otto.t.icon 'play'
text otto.t.volumelineout_widget()
div '.channelerrata-container', ''
#button '.channelfork.control.small', {title: 'fork'}, otto.t.icon 'fork'
button '.crossfade.control.small', {title: 'crossfade'}, 'CF'
button '.replaygain.control.small', {title: 'replay gain'}, 'RG'
templates.page_it_out = (items, pagesize, lazychunksize, element, render) ->
pages = 0
pagestart = 0
# someday we should get clever here about not making pages with too few items
# we could also consider waiting to construct pages until they are scrolled to
while pagestart < items.length
pageitems = items.slice(pagestart, pagestart+pagesize)
chunkstart = 0
# further break the page into chunks to make it easier for lazyload searching
element '.page', ->
while chunkstart < pageitems.length
element '.lazychunk', ->
chunk = pageitems.slice(chunkstart, chunkstart+lazychunksize)
for item in chunk
# call the supplied render routine on each item
render item
chunkstart += lazychunksize
pagestart += pagesize
pages += 1
templates.startswith = coffeecup.compile ->
empty = true
otto.t.page_it_out @data, 200, 10, div, (item) ->
empty = false
text otto.t.artist item: item
if empty
div '.none', 'Nothing filed under ' + @params.value
templates.allalbums = coffeecup.compile ->
div '.thumbnails', ->
empty = true
otto.t.page_it_out @data, 300, 100, span, (album) ->
empty = false
div '.albumall', ->
div '.thumb.px120.expand', 'data-id': album._id, ->
if album.cover
img '.albumimg.lazy', height: 120, width: 120, \
#src: 'static/images/gray.gif', \
#src: 'data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==', \
#src: 'static/images/clear.gif', \
src: 'static/images/clear.png', \
'data-original': "/image/120?id=#{album.cover}", \
title: album.album
else
div '.noimg.px120', -> album.album + '<br>' + album.fileunder[0].name + '<br>' + album.owners[0].owner + '<br>' + album.year #+ album.genre
if otto.myusername
button '.stars.control.teeny.shy.n0', 'data-id': album._id
if empty
div '.none', 'No albums loaded'
templates.artist = coffeecup.compile ->
div '.artistlist', ->
if not @nostars
if otto.myusername
button '.stars.control.teeny.shy.n0', 'data-id': @item._id
div '.artistname-container', ->
span '.artistname.expand', {'data-id': @item._id }, @item.name # was @item.artist before fileunder
ul '.thumbnails', ->
if @item.albums?
albumorder = otto.t.orderalbums @item.albums
for album in albumorder
li '.h.thumb.px40.expand', 'data-id': album._id, 'data-container': @item._id, ->
if album.cover
img '.albumimg.lazy', src: 'static/images/clear.png', height: 40, width: 40, 'data-original': "/image/40?id=#{album.cover}", title: album.album
else
div '.noimg.px40', -> album.album
templates.album = coffeecup.compile ->
expand = if @noexpand then '' else '.expand'
div '.albumlist', ->
div '.thumbnails', ->
div '.thumb.px40'+expand, 'data-id': @item._id, 'data-container': @item._id, ->
if @item.cover
if @nolazy
img '.albumimg', src: "/image/40?id=#{@item.cover}", height: 40, width: 40, title: @item.album
else
img '.albumimg.lazy', src: 'static/images/clear.png', height: 40, width: 40, 'data-original': "/image/40?id=#{@item.cover}", title: @item.album
else
div '.noimg.px40', -> @item.album
span '.albumname'+expand, 'data-id': @item._id, ->
artistinfo = otto.compute_artistinfo @item
span @item.album
if artistinfo.single
span '.artist.sep', artistinfo.single
if @item.year?
span '.sep', @item.year
if otto.myusername and not @nostars
button '.stars.control.teeny.shy.n0', 'data-id': @item._id
templates.orderalbums = (albums) ->
albumorder = []
variousorder = []
for album in albums
if not album.artistinfo?
album.artistinfo = otto.compute_artistinfo album
if album.artistinfo.various
variousorder.push album
else
albumorder.push album
sorter = (a, b) ->
#if a.songs?[0]?.year? and b.songs?[0].year?
if a.year? and b.year?
ayear = Number(a.year)
byear = Number(b.year)
if ayear < byear
return -1
else if ayear > byear
return 1
else
if a.album? and b.album?
return a.album.localeCompare b.album, {sensitivity: "base", numeric: true}
else if a.album?
return -1
else if b.album?
return 1
else return 0
else if a.year?
return -1
else if b.year?
return 1
else
if a.album? and b.album?
return a.album.localeCompare b.album, {sensitivity: "base", numeric: true}
else if a.album?
return -1
else if b.album?
return 1
else return 0
albumorder.sort(sorter)
variousorder.sort(sorter)
return albumorder.concat(variousorder)
templates.albums_details = coffeecup.compile ->
div '.albumlist-container', { 'data-id': @_id }, ->
button '.close.control.tiny.shy', otto.t.icon 'close'
#if @data.length > 1
button '.close.lower.control.tiny.shy', otto.t.icon 'close'
div '.albumlist', ->
albumorder = otto.t.orderalbums @data
had_various = false
had_nonvarious = false
for album in albumorder
if album.artistinfo.various
if had_nonvarious and not had_various
div '.varioussep', ''
had_various = true
else
had_nonvarious = true
text otto.t.album_details album: album, fileunder: @fileunder
templates.album_details = coffeecup.compile ->
if not @album.artistinfo?
@album.artistinfo = otto.compute_artistinfo @album
div '.albumdetails', ->
div '.albumcover-container', ->
if @album.cover
div '.thumb.px200', ->
#img src: "/image/300?id=#{@album.cover}", alt: @album.album, title: @album.album
img src: "/image/orig?id=#{@album.cover}", alt: @album.album, title: @album.album
else
div '.noimg.px200', ->
if @album.artistinfo.various
span @album.artistinfo.various
else
for artist in @album.artistinfo.all
span -> artist
br()
span @album.album
div '.stars-container', ->
if otto.myusername
button '.stars.control.teeny.shy.n0', 'data-id': @album._id
div '.year-container', ->
if @album.years?
format_years = @album.years[0]
for year in @album.years[1..]
format_years += ', '+year
div '.year', format_years
else if @album.year?
div '.year', @album.year
div '.albuminfo', ->
div '.album', ->
span ".id#{@album._id}", 'data-id': @album._id, ->
span @album.album
if otto.myusername
button '.stars.control.teeny.shy.n0', 'data-id': @album._id
if @album.artistinfo.various
div '.artist', @album.artistinfo.various
else
for artist in @album.artistinfo.all
div '.artist', -> artist
if @album.owners?[0]?.owner
div '.owner', -> @album.owners[0].owner
div '.albumsongs.cf', ->
table ->
for song in @album.songs
tr -> td ->
text otto.t.enqueue_widget()
span ".id#{song._id}", {'data-id': song._id}, song.song
if @album.artistinfo.various or song.artist is not @album.artistinfo.primary
# this doesn't work when the fileunder name has been transformed in any way FIXME
if @album.artistinfo.various and @fileunder and song.artist is @fileunder.name
span '.subartist.highlight.sepgray', song.artist
else
span '.subartist.sep', song.artist
if otto.myusername
button '.stars.control.teeny.shy.n0', 'data-id': song._id
#button '.stars.control.teeny.shy.n0', {'data-id': song._id}, otto.t.icon 'star'
span '.time.sep.shy', otto.t.format_time(song.length)
div '.albumdir.dirpath.shy', ->
@album.dirpath
templates.ondeck = coffeecup.compile ->
table '.ondeck', ->
# the rest of the queue, on deck
for song in @songs
tr ->
td '.requestor-container', otto.t.requestor_widget( song: song, nodecorations: true )
td ->
text otto.t.unqueue_widget( addtoclassstr: '.shy' )
addtoclassstr = ''
if song.requestor
addtoclassstr='.requested'
span ".song.id#{song._id}#{addtoclassstr}", { 'data-id': song._id, 'data-mpdqueueid': song.mpdqueueid }, song.song
span '.album.sep', song.album
span '.artist.sep', song.artist
span '.sep', otto.t.format_time(song.length)
span '.shy', ->
if song.owners
owner = song.owners[0].owner
else
owner = ''
span '.owner.sep', -> owner
span '.filename.sep', -> song.filename
templates.featured = coffeecup.compile ->
ul '.ondeck', ->
# the rest of the queue, on deck
for song, n in @songs
li ->
if song.requestor
span '.requestor', ->
div -> song.requestor
else
span '.requestor', -> ''
if song.nowplaying
span '.playing.control.teeny', otto.t.icon 'play'
span '.song.currenttrack', -> song.song
else
button '.play.control.teeny.shy', id: song.mpdqueueid, 'data-position': n, ->
text otto.t.icon 'play'
span '.song', song.song
span '.album.sep', song.album
span '.artist.sep', song.artist
span '.sep', otto.t.format_time(song.length)
span '.shy', ->
if song.owners
owner = song.owners[0].owner
else
owner = ''
span '.owner.sep', -> owner
span '.filename.sep', -> song.filename
templates.alert = coffeecup.compile ->
div class: 'alert alert-info', ->
span @message
br()
br()
button '#ok.runself.control.large', 'ok'
text ' '
button '#cancel.runself.control.large', 'cancel'
templates.search = coffeecup.compile ->
div '.search', ->
if not @data.fileunders.length and not @data.albums.length and not @data.songs.length
div class: 'noresults'
else
if @data.fileunders.length
div class: 'section', 'Artists'
div ->
for fileunder in @data.fileunders
#li -> fileunder.name
div -> otto.t.artist item: fileunder
if @data.albums.length
div class: 'section', 'Albums'
div class: 'albums', ->
for album in @data.albums
div -> otto.t.album item: album
if @data.songcomposers? and @data.songcomposers.length
div '.section', 'Composers'
ul class: 'songs', ->
for song in @data.songcomposers
filename = song.filename
li ->
button '.enqueue.control.teeny.shy', 'data-oid': song.oid
composers = ''
if song.tags['©wrt']
composers = song.tags['©wrt']
composers = composers.replace /^\[u\'/, ''
composers = composers.replace /\'\]$/, ''
if song.tags['TCOM']
if composers
composers = composers + ', '
composers = song.tags['TCOM']
span "[#{composers}] "
span id: song.oid, class: 'song', -> song.song
span class: 'sep'
span class: 'album', -> song.album
span class: 'sep'
span class: 'artist', -> song.artist
if otto.myusername
button '.stars.control.teeny.shy.n0', 'data-oid': song.oid
span class: 'shy', ->
span class: 'sep'
span -> otto.t.format_time(song.length)
if song.owners
owner = song.owners[0].owner
else
owner = ''
span class: 'sep'
span class: 'queue owner', -> owner
span class: 'sep'
span class: 'queue filename', -> filename
songs_list = {}
if @data.songs.length
div class: 'section', 'Songs'
ul class: 'songs', ->
for song in @data.songs
songs_list[song._id] = true
li ->
text otto.t.enqueue_widget()
span ".song.id#{song._id}", { 'data-id': song._id }, song.song
span '.album.sep', song.album
span '.artist.sep', song.artist
if otto.myusername
button '.stars.control.teeny.shy.n0', 'data-id': song._id
span class: 'shy', ->
span class: 'sep'
span -> otto.t.format_time(song.length)
owner = ''
if song.owners
owner = song.owners[0].owner
span '.owner.sep', owner
span '.filename.sep', song.filename
other_cleaned = []
if @data.other
for song in @data.other
if songs_list[song._id]
continue
other_cleaned.push(song)
if other_cleaned.length
div 'Other'
ul class: 'my-new-list', ->
for song in other_cleaned
li ->
text otto.t.enqueue_widget()
span ".id#{song._id}", { 'data-id': song._id }, song.song
span '.sep', song.album
span '.sep', song.artist
if otto.myusername
button '.stars.control.teeny.shy.n0', 'data-id': song._id
span '.filename.sep.shy', song.filename
templates.newest_albums = coffeecup.compile ->
div '.search', ->
div '.section', 'Newest Albums'
empty = true
lasttimestamp = false
div '.albums', ->
owner = ''
for album in @data
empty = false
if lasttimestamp
interval = lasttimestamp - album.timestamp
else
interval = 0
lasttimestamp = album.timestamp
if album.owners?
if owner isnt album.owners[-1..][0].owner
owner = album.owners[-1..][0].owner
div '.newestowner', owner + ' ' + otto.t.format_timestamp(album.timestamp)
else if interval > 3600000
div '.newestowner', owner + ' ' + otto.t.format_timestamp(album.timestamp)
else if owner
owner = ''
div '.newestowner', '' + otto.t.format_timestamp(album.timestamp)
else if interval > 3600000
div '.newestowner', owner + ' ' + otto.t.format_timestamp(album.timestamp)
div -> otto.t.album item: album
if empty
div '.none', 'None'
otto.event_last_display_time = false
templates.event = coffeecup.compile ->
div '.event', ->
timestamp = new Date(@event.timestamp)
display_time = otto.t.format_time(timestamp.getHours() * 60 + timestamp.getMinutes(), 5)
if display_time isnt otto.event_last_display_time
span '.timestamp', display_time
otto.event_last_display_time = display_time
else
span '.timestamp', ''
#span class: 'id', -> @event.id
if @event.user
short_username = @event.user.split('@')[0]
span '.user', -> short_username
#span '.channel', -> @event.channel
if @event.message?
if otto.showdown_converter?
message_markeddown = otto.showdown_converter.makeHtml(@event.message)
else
message_markeddown = @event.message
span '.message', message_markeddown
#text message_markeddown
else
span '.name', -> @event.name
templates.event_text = (event) ->
text = ""
if event.user
short_username = event.user.split('@')[0]
else
short_username = 'unknown'
text += short_username + ' '
#text += event.channel
if event.message?
#if otto.showdown_converter?
# message_markeddown = otto.showdown_converter.makeHtml(@event.message)
#else
# message_markeddown = @event.message
#text += message_markeddown
text += event.message
else
text += event.name
templates.loader = coffeecup.compile ->
div class: 'event loader', ->
span class: 'message', "scan: #{@event}"
templates.show_users = coffeecup.compile ->
div '.userlist', ->
empty = true
if @data
table ->
for user in @data
empty = false
tr '.section', ->
td '.owner', user.owner
td if user.songs then "#{user.songs} song" + otto.t.plural(user.songs)
td if user.albums then "#{user.albums} album" + otto.t.plural(user.albums)
td if user.artists then "#{user.artists} artist" + otto.t.plural(user.artists)
td "#{user.stars} starred item" + otto.t.plural(user.stars)
if empty
div '.none', 'None'
templates.show_stars = coffeecup.compile ->
div '.starslist', ->
nostar = true
if @data
for user of @data
starlist = []
for staritem in @data[user]
if staritem.rank > 0
starlist.push staritem
if starlist and starlist.length
nostar = false
div ' '
div '.section', ->
span user
span '.sep', ->
span starlist.length.toString() + ' item' + otto.t.plural(starlist.length)
if otto.myusername
span class: 'shy', -> button class: 'download btn teeny control', 'data-id': user._id, -> i class: 'download-alt'
div '.songs', -> # .songs? that isn't good FIXME
if starlist and starlist.length
for item in starlist
switch item.otype
when 40 then addclass = '.starredartist'
when 20 then addclass = '.starredalbum'
else addclass = '.starredsong'
div '.starreditem'+addclass, ->
if otto.myusername and user is otto.myusername
button '.stars.control.teeny.shy.n0', 'data-id': item._id
else
button ".stars.control.teeny.n#{item.rank}.immutable.noupdate", 'data-id': item._id
if item.otype == 40
text otto.t.artist item: item, nostars: true
else if item.otype == 20
text otto.t.album item: item, nostars: true
else
song = item
text otto.t.enqueue_widget()
span ".song.id#{song._id}", { 'data-id': song._id }, song.song
span '.album.sep', song.album
span '.artist.sep', song.artist
span '.sep.shy', otto.t.format_time(song.length)
if song.owners
owner = song.owners[0].owner
span '.owner.sep.shy', owner
span '.filename.sep.shy', song.filename
if nostar
div '.nostars', 'Nothing starred yet'
templates.dirbrowser = coffeecup.compile ->
div '.dirbrowser', ->
ul '.path'
div '.subdirs'
div '.contents'
templates.dirbrowser_subdir = coffeecup.compile ->
ul ->
for dir in @data.dirs
li class: 'subdir', id: dir._id, 'data-filename': dir.filename, ->
dir.filename+'/'
templates.dirbrowser_item = coffeecup.compile ->
for dir in @data
li class: 'path', id: dir._id, -> dir.filename+'/'
templates.ouroboros = coffeecup.compile ->
div '.ouroboros', ->
modifiers = ''
modifiers += '.' + (@size || 'medium') # small, medium, large
modifiers += '.' + (@speed || 'normal') # slow, normal, fast
modifiers += '.' + (@direction || 'cw') # cw, ccw
modifiers += '.' + (@color || 'gray') # gray, blue
modifiers += '.' + (@background || 'dark') # dark, black
div ".ui-spinner#{modifiers}", ->
span '.side.left', ->
span '.fill', ''
span '.side.right', ->
span '.fill', ''
t.format_time = (seconds, minlen=4) ->
hours = parseInt(seconds / 3600)
seconds = seconds % 3600
minutes = parseInt(seconds / 60)
seconds = parseInt(seconds % 60)
if seconds < 10
seconds = '0' + seconds
else
seconds = '' + seconds
if minutes < 10 and (hours > 0 or minlen > 4)
minutes = '0' + minutes
else
minutes = '' + minutes
formatted = ''
if hours or minlen > 6
formatted = "#{hours}:#{minutes}:#{seconds}"
else
formatted = "#{minutes}:#{seconds}"
t.format_timestamp = (timestamp) ->
if timestamp
#d = new Date(timestamp * 1000)
#hours = d.getHours();
#minutes = d.getMinutes();
#seconds = d.getSeconds();
#day = d.getDate()
#month = d.getMonth()
#return moment(timestamp).fromNow() # i like this one
return moment(timestamp).format('ddd MMM Do YYYY ha')
else
return ''
templates.icon = coffeecup.compile ->
switch String @
when 'play' then span '.icon-play2', ''
when 'connect' then span '.icon-play', ''
when 'pause' then span '.icon-pause', ''
#when 'kill' then span '.icon-remove', ''
when 'kill' then span '.icon-minus', ''
when 'menu' then span '.icon-menu', ''
when 'chat' then span '.icon-bubble2', ''
when 'bigger' then span '.icon-zoomin', ''
when 'smaller' then span '.icon-zoomout', ''
when 'newest' then span '.icon-download', ''
when 'all' then span '.icon-grid', ''
when 'star' then span '.icon-star', ''
when 'users' then span '.icon-users', ''
when 'cubes' then span '.icon-stack2', ''
when 'close' then span '.icon-close', ''
when 'enqueue' then span '.icon-plus', ''
when 'unqueue' then span '.icon-minus', ''
when 'logout' then span '.icon-cancel-circle', ''
when 'fork' then span '.icon-fork', ''
when 'tag' then span '.icon-tag', ''
when 'tags' then span '.icon-tags', ''
#when 'output' then span '.icon-volume-medium', ''
when 'output' then span '.icon-volume-mute', ''
when 'outputmute' then span '.icon-volume-mute2', ''
when 'outputsel' then span '.icon-volume-mute', ''
when 'notifications' then span '.icon-bubble3', ''
when 'soundfx' then span '.icon-lightning', ''
when 'folder' then span '.icon-folder-open', ''
when 'info' then span '.icon-info', ''
else span '.icon-blocked', ''
t.plural = (count, single, plural) ->
return if count is 1 then single || '' else plural || 's'
console.log 'templates defined'
return templates
<|start_filename|>otto.coffee<|end_filename|>
fs = require 'fs'
path = require 'path'
#posix = require 'posix'
posix = require 'fs'
#require('epipebomb')()
global.otto = otto = {} # our namespace
require './otto.misc' # attaches to global.otto.misc
otto.MUSICROOT_SEARCHLIST =
[
{ dir: '~/Music', strip: 'Music' },
{ dir: '/otto/u', strip: false }
]
otto.SECRET = 'FiiY3Xeiwie3deeGahBiu9ja' # need to randomly generate this for each install FIXME
otto.OTTO_ROOT = path.dirname(fs.realpathSync(__filename))
otto.OTTO_BIN = otto.OTTO_ROOT + '/bin'
otto.OTTO_LIB = otto.OTTO_ROOT + '/lib'
if process.platform is 'darwin'
library = otto.misc.expand_tilde '~/Library/Otto'
if otto.misc.is_dirSync(library + '/var') # for backwards compatability
otto.OTTO_VAR = library + '/var'
else
otto.OTTO_VAR = library
else
otto.OTTO_VAR = otto.OTTO_ROOT + '/var'
otto.misc.assert_is_dir_or_create_itSync otto.OTTO_VAR
otto.OTTO_VAR_MPD = otto.OTTO_VAR + '/mpd'
otto.OTTO_VAR_MPD_MUSIC = otto.OTTO_VAR_MPD + '/music'
otto.MPD_EXECUTABLE = otto.OTTO_BIN + '/mpd'
otto.misc.assert_is_dir_or_create_itSync otto.OTTO_VAR_MPD
otto.misc.assert_is_dir_or_create_itSync otto.OTTO_VAR_MPD_MUSIC
otto.OTTO_VAR_MONGODB = otto.OTTO_VAR + '/mongodb'
otto.MONGOD_EXECUTABLE = otto.OTTO_BIN + '/mongod'
otto.misc.assert_is_dir_or_create_itSync otto.OTTO_VAR_MONGODB
# we should probably also test for the BINs
if process.env['USER'] is 'root'
# safer to not run as root
# and as root, mpd can't use file:///
# also: mpd can not use file:/// under Windows at all (not related to root)
# we need an option for which plain user switch to
# or we could just exit and recommend people use supervisord
# (the python process manager, not to be confused with node-supervisor)
otto.OTTO_SPAWN_AS_UID = posix.getpwnam('jon').uid # oh boy. FIXME
#if process.env['USER'] is 'root'
# try
# safeuser = 'jon'
# safeuserpw = posix.getpwnam(safeuser)
# console.log "switching to user '#{safeuser}'"
# process.setgid safeuserpw.gid
# process.setuid safeuserpw.uid
# console.log "new uid: #{process.getuid()}"
# catch err
# console.log 'failed to drop root privileges: ' + err
if process.platform is 'darwin'
try
posix.setrlimit('nofile', { soft: 10000, hard: 10000 })
catch error
#console.log '###'
#console.log '### setting file limit failed: ' + error
#console.log '###'
channels_json = otto.OTTO_VAR + '/channels.json'
if fs.existsSync channels_json
try
console.log "loading channels.json file (#{channels_json})"
otto.channelinfolist = JSON.parse(fs.readFileSync channels_json, 'utf8')
#console.log 'channelinfolist', otto.channelinfolist
catch error
console.log "### error reading channels.json file (#{channeld_json}): #{error}"
console.log '### using default channels'
otto.channelinfolist = false
if not otto.channelinfolist
otto.channelinfolist = [
{name: 'main', fullname: 'Main Channel', type: 'standard', layout: 'standard'}
{name: 'second', fullname: 'Second Channel', type: 'standard', layout: 'standard'}
{name: 'third', fullname: 'Third Channel', type: 'standard', layout: 'standard'}
]
require './otto.misc' # attaches to global.otto.misc
require './otto.events' # attaches to global.otto.events
require './otto.db' # etc...
require './otto.mpd'
require './otto.listeners'
require './otto.channels'
require './otto.loader'
require './otto.index'
require './otto.zeroconf'
require './otto.main'
# client side
require './otto.client' # must be first
require './otto.client.templates'
require './otto.client.misc'
require './otto.client.player'
require './otto.client.soundfx'
require './otto.client.cubes'
require './otto.server'
otto.exiting = false
otto.main()
<|start_filename|>otto.index.coffee<|end_filename|>
_ = require 'underscore'
coffeecup = require 'coffeecup'
otto = global.otto
global.otto.index = do -> # note 'do' calls the function
index = {}
index.links = [
{ rel: 'icon', type: 'image/png', href: 'static/images/favicon.png' }
#{ rel: 'shortcut icon', href: 'static/images/favicon.ico/favicon.ico' }
#{ rel: 'apple-touch-icon', href: 'static/images/ottoicon1024.png' }
{ rel: 'apple-touch-icon-precomposed', href: 'static/images/ottoicon1024.png' } # no reflective shine
{ rel: 'apple-touch-startup-image', href: 'static/images/ottoiphonesplash.png' }
]
index.metas = [
{ name: 'apple-mobile-web-app-capable', content: 'yes' }
{ name: 'mobile-web-app-capable', content: 'yes' } # android?
#{ name: 'viewport', content: 'width=device-width' }
#{ name: 'viewport', content: 'width=1470, user-scalable=no' }
#{ name: 'viewport', content: 'width=1270' }
{ name: 'viewport', content: 'initial-scale=1.0, user-scalable=no, minimal-ui' }
#{ name: 'apple-mobile-web-app-status-bar-style', content: 'black' }
#{ name: 'apple-mobile-web-app-status-bar-style', content: 'translucent' }
{ name: 'apple-mobile-web-app-status-bar-style', content: 'black-translucent' }
{ name: 'apple-mobile-web-app-title', content: 'Otto Client' }
{ name: 'format-detection', content: 'telephone=no' }
]
index.stylesheets = [
#'static/css/jquery-ui-1.8.17.custom.css'
'static/css/jquery-ui-1.10.3.custom.css'
#'static/css/reset.css'
#'static/css/jquery.terminal.css'
#'static/css/miniAlert.css'
'static/css/addtohomescreen.css'
'static/css/normalize.css'
'static/css/ouroboros.css'
'static/css/mmenu.css'
'static/fonts/icomoon.css' # mmenu.css messes up the icons!
'static/css/otto.css'
]
index.scripts = [
'socket.io/socket.io.js'
'zappa/jquery.js'
'zappa/zappa.js'
#'zappa/sammy.js'
#'static/js/jquery-ui-1.8.17.custom.min.js'
'static/js/jquery-ui-1.10.3.custom.min.js'
'static/js/jquery.scrollstop.js'
'static/js/jquery.mousewheel.js'
'static/js/jquery.idle-timer.js'
'static/js/jquery.lazyload.js'
'static/js/jquery-migrate-1.2.1.js'
'static/js/jquery.terminal.js'
'static/js/jquery.mmenu.min.js'
'static/js/restive.min.js'
'static/js/moment.min.js'
'static/js/addtohomescreen.min.js'
'static/js/toe.js'
'static/js/prefixfree.js'
'static/js/modernizr.custom.04062.js'
#'static/js/miniAlert.js'
'static/js/showdown.js'
'otto.client.templates.js'
'otto.client.misc.js' # non-dynamic module
#'http://jsconsole.com/remote.js?554C497C-216D-4803-8CC5-DD8656C25C8C' # for mobile debugging
'otto.client.js'
]
# we don't use live.js anymore, so i added prefixfree above
#if if process.env.NODE_ENV is 'development'
# #console.log 'adding live.js for debugging'
# #scripts.push 'static/js/live' #for debugging
#else
# console.log 'not adding live.js for debugging, adding prefixfree.js'
# scripts.push 'static/js/prefixfree' # off while debugging, it prevents live.js from working
index.template = coffeecup.compile ->
doctype 5
html '.nofouc', -> # .nofouc not really used currently
head ->
title @title if @title
if @links
for l in @links
if l.type?
link rel: l.rel, type: l.type, href: l.href
else
link rel: l.rel, href: l.href
link(rel: @link.rel, href: @link.href) if @link # non-plural version
if @metas
for m in @metas
meta name: m.name, content: m.content
meta(name: @meta.name, content: @meta.content) if @meta # non-plural version
if @stylesheets
for s in @stylesheets
link rel: 'stylesheet', href: s
link(rel: 'stylesheet', href: @stylesheet) if @stylesheet
style @style if @style
script 'document.documentElement.className=""' # http://www.paulirish.com/2009/avoiding-the-fouc-v3/
body @bodyclasses, ->
if @body
text @body
if @scripts
for s in @scripts
script src: s
script(src: @script) if @script # non-plural version
index.body_startup = coffeecup.compile ->
div '.startup-container', ->
div '.startup', ->
#text otto.templates.ouroboros size: 'large', direction: 'cw', speed: 'slow'
div '.ouroboros', ->
div '.ui-spinner.large.slow.cw.gray.dark', ->
span '.side.left', ->
span '.fill', ''
span '.side.right', ->
span '.fill', ''
index.render = (moreparams={}) ->
params = {
#host: if @req.headers.host? and (@req.headers.host is 'localhost' or @req.headers.host.indexOf('localhost:') is 0) then os.hostname() else @req.headers.host
#port: otto.port
title: "otto" + if process.env.NODE_ENV is 'development' then ' (development)' else ''
body: index.body_startup()
metas: index.metas
links: index.links
scripts: index.scripts
stylesheets: index.stylesheets
}
_.extend params, moreparams
return index.template params
return index
<|start_filename|>otto.channels.coffee<|end_filename|>
require './otto.events'
otto = global.otto
global.otto.channels = do -> # note 'do' calls the function
channels = {}
channels.channel_list = {}
channels.Channel = class Channel extends otto.events.EventEmitter
constructor: (@name, @info) ->
# valid events:
super ['*', 'time', 'queue', 'state', 'status', 'lineout', 'replaygain', 'outputs', 'started', 'finished', 'addtoqueue', 'killed', 'removed']
if channels.channel_list[@name]
throw new Error "channel name #{@name} already exists!"
channels.channel_list[@name] = @
@type = @info.type || 'standard'
@queue = []
@outputs = []
@lineout = 0
switch @type
when 'webcast', 'archive'
@autofill = false
else
@autofill = true
@autofill_min = 4
@autofill_pending = false
@mpd = null
attach_mpd: (callback) ->
@mpd = new otto.mpd.MPD(@name)
if @name is 'main' and process.platform isnt 'darwin'
@mpd.setautopause no
# otto.mpd has async events, establish event handlers before calling connect()
@mpd.on 'start', (eventname, mpd) =>
# webcast and archive clear their queues and then load them. to prevent interrupting
# webcasts and losing place on the archive during restarts we should check the queues
# to see if they really need to be reloaded before we stomp all over them FIXME
switch @type
when 'webcast'
if @info.urls?
urls = @info.urls
else
urls = @info.url
@mpd.play_url urls, =>
when 'archive'
@mpd.clear =>
otto.db.get_album @info.archivename, (album) =>
console.log 'filling archive queue'
mpdfilenames = []
if album and album.songs?
for song in album.songs
mpdfilenames.push channels.otto_filename_to_mpd(song.filename)
console.log mpdfilenames
if mpdfilenames
@mpd.play_archive mpdfilenames, =>
@mpd.on '*', (eventname, mpd, args...) =>
@mpd_event_handler eventname, mpd, args...
@mpd.connect callback
#@mpd.refresh() # this shouldn't be needed, just debugging something
mpd_event_handler: (eventname, mpd, args...) ->
switch eventname
when 'time'
@time = args[0]
@trigger 'time'
when 'playlist'
@playlist_changed args[0], =>
@trigger 'queue'
when 'state'
@state = args[0]
@trigger 'state'
if @type is 'webcast' and @state isnt 'play'
otto.misc.timeoutSet 1000, =>
mpd.playifnot ->
when 'status'
@status = args[0]
@trigger 'status'
when 'outputs'
@outputs = args[0]
for output in @outputs
if output.outputname is 'Otto Line Out'
if @lineout != output.outputenabled
@lineout = output.outputenabled
alllineout = {}
for name,channel of channels.channel_list
alllineout[name] = channel.lineout
@trigger 'lineout', alllineout
break
alloutputs = {}
for name,channel of channels.channel_list
alloutputs[name] = channel.outputs
@trigger 'outputs', alloutputs
when 'replaygain'
@replaygain = args[0]
@trigger 'replaygain'
when 'died'
@autofill_pending = false
@mpdids_invalid = true
refresh: ->
@mpd.refresh()
playlist_changed: (newplaylist, callback=no) ->
console.log "Channel#playlist_changed for #{@name}"
filename_list = []
for mpdsong in newplaylist
ottofilename = channels.mpd_filename_to_otto(mpdsong.file)
filename_list.push(ottofilename)
# correlate the mpd queue ids to the otto song list
otto.db.load_songs_by_filenames filename_list, (ottosongs) =>
for ottosong in ottosongs
for mpdsong in newplaylist
if ottosong.filename is channels.mpd_filename_to_otto(mpdsong.file) # not sure this works with "s
ottosong.mpdqueueid = mpdsong.Id
if @queue.length
# transfer the (currently) ephemeral requestor values to the new song list
if not @mpdids_invalid
for ottosong in ottosongs
if ottosong.mpdqueueid
for oldsong in @queue
if oldsong.mpdqueueid and ottosong.mpdqueueid is oldsong.mpdqueueid
if oldsong.requestor?
ottosong.requestor = oldsong.requestor
break
else
# old ids invalid, match by filename instead of mpdids
for ottosong in ottosongs
for oldsong in @queue
if ottosong.filename is oldsong.filename
if oldsong.requestor?
ottosong.requestor = oldsong.requestor
break
@mpdids_invalid = false
# see if the playing song has changed
previously_playing = @queue[0]
if previously_playing
if ottosongs.length is 0 or previously_playing.mpdqueueid != ottosongs[0].mpdqueueid
# check the killed flag to determine if it finished naturally
if not previously_playing.killed
@trigger 'finished', previously_playing
if newplaylist.songpos and ottosongs and newplaylist.songpos < ottosongs.length
ottosongs[newplaylist.songpos].nowplaying = true
previously_playing = false
for oldsong in @queue
if oldsong.nowplaying
previously_playing = oldsong
if not previously_playing or not previously_playing._id.equals( ottosongs[newplaylist.songpos]._id )
@trigger 'started', ottosongs[newplaylist.songpos]
# that might not work with featured playlists
# but probably neither does the @queue[0] bit about a dozen lines above
# this is broken now
#if requested_filename
# for song in ottosongs[1..ottosongs.length]
# if not song.requestor? and song.filename = requested_filename
# song.requestor = requestor
# break
@queue = ottosongs
callback()
if @autofill
@autofill_queue ->
# adds random songs to the queue if it's below the autofill_min
autofill_queue: (callback) ->
console.log 'autofill_queue'
if otto.db.emptydatabase
console.log 'empty database, skipping autofill_queue'
callback()
return
if @type is 'webcast'
console.log 'autofill ignored for webcast'
callback()
return
if @autofill_min > @queue.length and not @autofill_pending
howmany = @autofill_min - @queue.length
console.log 'howmany', howmany
@autofill_pending = true
console.log 'autofill_pending', @autofill_pending
switch @type
when 'standard'
otto.db.get_random_songs 300, (randomsongs) => # was 100
console.log 'auto filling queue with random songs'
vettedsongs = []
for song in randomsongs
genre = false
if song.genre?
genre = song.genre.toLowerCase()
if genre
if /book/.test(genre) then continue
if /audio/.test(genre) then continue
if /speech/.test(genre) then continue
if /spoken/.test(genre) then continue
if /podcast/.test(genre) then continue
if /academic/.test(genre) then continue
#if /comedy/.test(genre) then continue # also '57'
if genre in ['183', '184', '186', '101'] then continue
vettedsongs.push song
channels.pick_a_lucky_listener (luckylistener) =>
mpdfilenames = []
if luckylistener
for randomsong in vettedsongs
if randomsong.owners[0].owner is luckylistener
mpdfilenames.push channels.otto_filename_to_mpd(randomsong.filename)
if mpdfilenames.length >= howmany
break
if mpdfilenames.length < howmany
console.log "not enough songs for #{luckylistener}, backfilling"
for randomsong in vettedsongs
if channels.otto_filename_to_mpd(randomsong.filename) not in mpdfilenames
mpdfilenames.push channels.otto_filename_to_mpd(randomsong.filename)
if mpdfilenames.length >= howmany
break
console.log mpdfilenames
console.log 'before addsongs'
@mpd.addsongs mpdfilenames, =>
console.log 'after addsongs'
@autofill_pending = false
console.log 'autofill_pending', @autofill_pending
callback()
when 'limited'
otto.db.get_random_starred_songs howmany, @info.limiteduser, (newsongs) =>
console.log 'auto filling queue with limited songs'
mpdfilenames = []
if newsongs
for newsong in newsongs
mpdfilenames.push channels.otto_filename_to_mpd(newsong.filename)
#console.log mpdfilenames
@mpd.addsongs mpdfilenames, =>
@autofill_pending = false
callback()
else
callback()
else
console.log 'queue has enough songs, autofillpending =', @autofill_pending
callback()
add_to_queue: (id, user, callback) ->
console.log 'Channel#add_to_queue', id
if !id
if callback then callback() else return
otto.db.load_object id, no, (song) =>
mpdfilename = channels.otto_filename_to_mpd(song.filename)
if @queue and @queue.length
for queuesong, pos in @queue[1..] # skip the 'now playing' song
if not queuesong.requestor? # skip past any requests
break
pos+=1 # because we skipped the first one, ya see
else
pos=0 # queue is empty, insert song at the beginning
@mpd.addid mpdfilename, pos, (mpdresponse) =>
#console.log 'mpdresponse', mpdresponse
#console.log 'queue', @queue
@mpd.playlist (playlist) =>
@playlist_changed playlist, =>
if @queue.length
# set the requestor of the new song
found = false
for queuesong in @queue
if queuesong.mpdqueueid is mpdresponse[0].Id
queuesong.requestor = user
found = true
if not found
console.log 'error: unable to mark the requestor in the queue'
@trigger 'queue'
@trigger 'addtoqueue', song, user
if callback
callback()
remove_from_queue: (id, user) ->
# this appears to be messed up re: return values and async callbacks
if @queue
first = true
if id is '' and @queue[0]
id = @queue[0].mpdqueueid
for song in @queue
if Number(song.mpdqueueid) == Number(id)
song.killed = true
@mpd.deleteid id, =>
if first
@trigger 'killed', song, user
else
@trigger 'removed', song, user
return true
break
first = false
clear_queue: (id, user, callback) ->
if @queue
@mpd.clear callback
proxy_stream: (args...) ->
@mpd.proxy_stream args...
pause: (callback) ->
@mpd.pause callback
pauseifnot: (callback) ->
if @state is 'play'
@mpd.pause callback
else
callback()
# next is not currently used
next: (callback) ->
@mpd.next callback
# new calls added to support 'featured' channels
seek: (seconds, callback) ->
@mpd.seekcur seconds, callback
play: (position, callback) ->
@mpd.play position, callback
toggleplay: (callback) ->
if @state is 'play'
@mpd.pause callback
else
@mpd.play undefined, callback
#output state and manipulation
get_outputs: (callback) ->
@mpd.outputs callback
#lineout is just a specific input
set_lineout: (enable) ->
@mpd.outputs (r) =>
for output in r
if output.outputname is 'Otto Line Out'
if enable
@mpd.enableoutput output.outputid, ->
else
@mpd.disableoutput output.outputid, ->
break
toggle_lineout: ->
@mpd.outputs (r) =>
for output in r
if output.outputname is 'Otto Line Out'
if @lineout == '1'
@mpd.disableoutput output.outputid, ->
else
@mpd.enableoutput output.outputid, ->
break
toggle_crossfade: ->
@mpd.togglecrossfade()
toggle_replaygain: ->
@mpd.togglereplaygain()
#server side vol for line out (doesn't affect the streams thankfully)
setvol: (vol, callback) ->
@mpd.setvol Math.max( Math.min(vol, 100), 0), callback
channels.pick_a_lucky_listener = (callback) ->
# get a list of owners (people who have loaded music, or starred something)
otto.db.load_owner_list (owners) =>
ownerusernames = owners.map (owner) -> return owner.owner
# we still need to lookup the stars to include them in the lucky listeners picks FIXME
listeners = []
if otto.ourlisteners
list = otto.ourlisteners.get_list()
for id in list
listener = list[id]
# filter out old stale listeners junk from the listener list
if listener.socketids or listener.streams
# each listener only get one slot, even if they have multiple connections
if listener.user not in listeners
# this prevents non owner users from making things more random,
# but maybe we want a little bit of that?
if listener.user in ownerusernames
listeners.push listener.user
console.log 'eligible listeners', listeners
luckylistener = undefined
if listeners.length
luckylistener = listeners[Math.floor Math.random() * listeners.length]
# 15% of the time the lucky listener is actually unlucky
if luckylistener and Math.random() > 0.15
console.log 'lucky listener', luckylistener
else
console.log 'unluckly listener', luckylistener
notlucky = ownerusernames.filter (username) -> username isnt luckylistener
luckylistener = notlucky[Math.floor Math.random() * notlucky.length]
console.log 'lucky owner', luckylistener
if not luckylistener
# no listeners match owners/starred, randomly pick luckylistener
# a lucky owner from the owners list (this method should help balance out
# lopsided collections)
luckylistener = ownerusernames[Math.floor Math.random() * ownerusernames.length]
console.log 'lucky owner', luckylistener
callback luckylistener
channels.set_global_event_handler = (handler) ->
console.log 'set_global_event_handler'
channels.global_events_handler = handler
for own channelname, channel of otto.channels.channel_list
channel.on '*', handler
channels.init = (callback) ->
callcount = otto.channelinfolist.length
for channelinfo in otto.channelinfolist
console.log "creating channel #{channelinfo.name}"
channel = new otto.channels.Channel(channelinfo.name, channelinfo)
if channels.global_event_handler
channel.on '*', global_event_handler
channel.attach_mpd ->
if callcount-- == 1 and callback
callback()
#channel.refresh() # this didn't do what i expected
channels.mpd_filename_to_otto = (filename) ->
return filename
channels.otto_filename_to_mpd = (filename) ->
# files with " in them don't work, mpd can't handle 'em
return 'file://'+filename.replace('"', '\\"')
return channels
# saving this post_with_body snippet
#filename_params = querystring.stringify(filename: filename_list)
#jsonreq.post_with_body 'http://localhost:8778/load_songs', filename_params, (err, ottosongs) =>
<|start_filename|>static/fonts/icomoon.css<|end_filename|>
@font-face {
font-family: 'icomoon';
src:url('fonts/icomoon.eot?e4zx9r');
src:url('fonts/icomoon.eot?#iefixe4zx9r') format('embedded-opentype'),
url('fonts/icomoon.woff?e4zx9r') format('woff'),
url('fonts/icomoon.ttf?e4zx9r') format('truetype'),
url('fonts/icomoon.svg?e4zx9r#icomoon') format('svg');
font-weight: normal;
font-style: normal;
}
[class^="icon-"], [class*=" icon-"] {
font-family: 'icomoon';
speak: none;
font-style: normal;
font-weight: normal;
font-variant: normal;
text-transform: none;
line-height: 1;
/* Better Font Rendering =========== */
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
.icon-play:before {
content: "\e600";
}
.icon-play2:before {
content: "\e601";
}
.icon-play3:before {
content: "\e62c";
}
.icon-pause:before {
content: "\e602";
}
.icon-plus:before {
content: "\e603";
}
.icon-add:before {
content: "\e62d";
}
.icon-minus:before {
content: "\e604";
}
.icon-remove:before {
content: "\e62e";
}
.icon-close:before {
content: "\e605";
}
.icon-close2:before {
content: "\e62f";
}
.icon-cancel-circle:before {
content: "\e606";
}
.icon-blocked:before {
content: "\e607";
}
.icon-stack:before {
content: "\e608";
}
.icon-console:before {
content: "\e609";
}
.icon-menu:before {
content: "\e60a";
}
.icon-download:before {
content: "\e60b";
}
.icon-users:before {
content: "\e60c";
}
.icon-user:before {
content: "\e60d";
}
.icon-star:before {
content: "\e60e";
}
.icon-star2:before {
content: "\e60f";
}
.icon-star3:before {
content: "\e610";
}
.icon-radio-unchecked:before {
content: "\e611";
}
.icon-radio-checked:before {
content: "\e612";
}
.icon-fork:before {
content: "\e613";
}
.icon-tags:before {
content: "\e614";
}
.icon-tag:before {
content: "\e615";
}
.icon-screen:before {
content: "\e616";
}
.icon-laptop:before {
content: "\e617";
}
.icon-tv:before {
content: "\e618";
}
.icon-bubble:before {
content: "\e619";
}
.icon-bubble2:before {
content: "\e61a";
}
.icon-zoomin:before {
content: "\e61b";
}
.icon-zoomout:before {
content: "\e61c";
}
.icon-flag:before {
content: "\e61d";
}
.icon-dice:before {
content: "\e61e";
}
.icon-warning:before {
content: "\e61f";
}
.icon-notification:before {
content: "\e620";
}
.icon-info:before {
content: "\e621";
}
.icon-info2:before {
content: "\e622";
}
.icon-loop:before {
content: "\e623";
}
.icon-file:before {
content: "\e624";
}
.icon-bubble3:before {
content: "\e625";
}
.icon-qrcode:before {
content: "\e626";
}
.icon-mug:before {
content: "\e627";
}
.icon-lightning:before {
content: "\e637";
}
.icon-volume-medium:before {
content: "\e638";
}
.icon-volume-mute:before {
content: "\e639";
}
.icon-volume-mute2:before {
content: "\e63a";
}
.icon-folder-open:before {
content: "\e63b";
}
.icon-minu:before {
content: "\e630";
}
.icon-stack2:before {
content: "\e628";
}
.icon-grid:before {
content: "\e635";
}
.icon-grid2:before {
content: "\e636";
}
.icon-menu2:before {
content: "\e629";
}
.icon-file2:before {
content: "\e62a";
}
.icon-triangle:before {
content: "\e631";
}
.icon-triangle2:before {
content: "\e632";
}
.icon-triangle3:before {
content: "\e633";
}
.icon-triangle4:before {
content: "\e634";
}
.icon-bubble4:before {
content: "\e62b";
}
<|start_filename|>otto.mpd.coffee<|end_filename|>
_ = require 'underscore'
fs = require 'fs'
net = require 'net'
child_process = require 'child_process'
mpdsocket = require 'mpdsocket'
require './otto.misc' # attaches to global.otto.misc
require './otto.events'
otto = global.otto
global.otto.mpd = do -> # note 'do' calls the function
mpd = {}
mpd_list = {}
mpd_slots = []
mpd.kill_all_mpdsSync = ->
otto.misc.kill_from_pid_fileSync "#{otto.OTTO_VAR_MPD}/[0-9]*pid"
mpd.MPD = class MPD extends otto.events.EventEmitter
constructor: (@name, @slot=0) ->
super ['*', 'start', 'time', 'state', 'status', 'playlist', 'outputs', 'replaygain', 'died'] # valid events
if mpd_list[@name]
throw new Error "already an mpd with name #{@name}"
mpd_list[@name] = @
# slots start at 1
if not @slot
for mpd_slot, i in mpd_slots
if !mpd_slot
@slot = i+1
break
if !@slot
@slot = mpd_slots.length+1
#if @slot < 1 or @slot > 99 # perhaps we don't need to limit this to 99 (ha!)
# throw new Error "error: bad mpd slot number #{@slot}, can only be from 1 to 99"
mpd_slots[@slot-1] = @
@slotstr = "#{@slot}"
if @slotstr.length < 2 then @slotstr = '0'+@slotstr # convert 1..9 to 01..09
@cache = {}
@streamcount = 0
@autopause = yes
@conf_file = "#{otto.OTTO_VAR_MPD}/#{@slotstr}-mpd.conf"
# the '-' in the filename above is to make it appear first in a
# directory listing for a given slot's related files
@music_dir = "#{otto.OTTO_VAR_MPD_MUSIC}"
@db_file = "#{otto.OTTO_VAR_MPD}/#{@slotstr}database"
@log_file = "#{otto.OTTO_VAR_MPD}/#{@slotstr}log"
@pid_file = "#{otto.OTTO_VAR_MPD}/#{@slotstr}pid"
@state_file = "#{otto.OTTO_VAR_MPD}/#{@slotstr}state"
@control_port = (@slot-1)+6600 # not currently used
@control_socket = "#{otto.OTTO_VAR_MPD}/#{@slotstr}socket"
# for relability we should probably check if ports are free first
# and assign them more dynamically, but this would lose the easy
# mapping between slot numbers and their streaming port numbers
# which would make reusing existing mpd processes more difficult
# (we'd need to dig in to the on-disk conf file to find the correct
# streaming port numbers), so for now:
@stream_ports =
'mp3': (@slot-1)*3+8101, # 8101 for slot 1, 8104 for slot 2
'ogg': (@slot-1)*3+8102, # 8102 for slot 1, 8105 for slot 2
'wav': (@slot-1)*3+8103, # 8103 for slot 1, 8106 for slot 2
@stream_sockets = # not currently used
'mp3': "#{otto.OTTO_VAR_MPD}/#{@slotstr}socket_mp3"
'ogg': "#{otto.OTTO_VAR_MPD}/#{@slotstr}socket_ogg"
'wav': "#{otto.OTTO_VAR_MPD}/#{@slotstr}socket_wav"
@mpd_executable = "#{otto.MPD_EXECUTABLE}"
connect: (callback) ->
if @mpdsocket
throw new Error "mpd already connected for #{@name}"
@spawn =>
#@mpdsocket = new mpdsocket 'localhost', @control_port, false
@mpdsocket = new mpdsocket @control_socket, '', false
@mpdsocket.on 'connect', =>
#console.log "mpd connected on port #{@control_port}"
console.log "mpd connected for #{@name} on socket #{@control_socket}"
@setup callback
disconnect:
delete @mpdsocket
setup: (callback) ->
console.log 'mpd setup'
@mpdsocket.send 'repeat 0', =>
@mpdsocket.send 'random 0', =>
@mpdsocket.send 'single 0', =>
@mpdsocket.send 'consume 1', =>
#@mpdsocket.send 'crossfade 10', =>
@mpdsocket.send 'crossfade 5', =>
@mpdsocket.send 'replay_gain_mode track', =>
@start callback
if @name is 'main'
#@mpdsocket.send 'play 0', =>
#@mpdsocket.send 'pause 0', =>
#@playifnot ->
console.log ''
start: (callback) ->
#console.log "mpd starting for #{@name}"
# prime the pump
@refresh callback
# then setup the intervals
@status_interval = otto.misc.intervalSet 100, => @status_watchdog()
@playlist_interval = otto.misc.intervalSet 200, => @playlist_watchdog()
@outputs_interval = otto.misc.intervalSet 1000, => @outputs_watchdog()
@replaygain_interval = otto.misc.intervalSet 1000, => @replaygain_watchdog()
@trigger 'start'
if callback
callback()
stop: (callback) ->
clearInterval @status_interval
clearInterval @playlist_interval
clearInterval @outputs_interval
clearInterval @replaygain_interval
if callback
callback()
send: (req, callback) ->
try
@mpdsocket.send req, callback
catch mpdsocketNotOpenExeption
if not @revive_pending
@revive_pending = true
# mpd seems to have died
console.log "********\n******** mpd #{@slot} died! trying to revive it\n********"
@reset =>
@trigger 'died'
@revive =>
@revive_pending = false
## try sending the failed command again (not sure this is a good idea)
#@send req, callback
# we're just going to abandon all callbacks and let the upper layer reset itself
# with the 'died' event above
else
#callback() # not gonna work (won't have the right arguments, like r for example)
reset: (callback) ->
# stop everything, clear out the state file
# and delete the mpdsocket
@stop()
try
fs.unlinkSync @state_file
catch ENOENT
delete @mpdsocket
if callback
callback()
restoreoutputs: (cache, callback) ->
if cache.metavolume?
@setvol cache.metavolume, ->
console.log 'restoring outputs'
if cache.outputs
for output in cache.outputs
if output.outputenabled is '1'
@send "enableoutput #{output.outputid}", ->
else
@send "disableoutput #{output.outputid}", ->
callback()
restorereplaygain: (cache, callback) ->
if cache.metavolume?
@setvol cache.metavolume, ->
console.log 'restoring replaygain'
if cache.replaygain
@send "replay_gain_mode #{cache.replaygain}", ->
callback()
revive: (callback) ->
# try to revive a dead mpd by clearing out it's state file
# to remove the potentially bad track and then restore it's state
# we don't currently restore the rest of the playlist FIXME
wasplaying = @cache.state is 'play'
oldcache = @cache
@connect =>
@restoreoutputs oldcache, =>
@restorereplaygain oldcache, =>
# restore the playlist (minus the suspect song)
if oldcache.playlist
console.log 'restoring playlist'
newplaylist = []
for song in oldcache.playlist
newplaylist.push 'file://'+song.file
if oldcache.status?.song?
newplaylist.splice(oldcache.status.song, 1)
else
newplaylist.splice(0, 1) # gotta remove something :)
for file in newplaylist
@addid file, null, ->
console.log 'done restoring playlist'
if wasplaying
# hack: give it some time to get the queue filled
# (currently fails because picking songs is so damn slow)
# (bumped it up from 500ms to 3000ms)
# (wouldn't be an issue if we restored the playlist) FIXME
#otto.misc.timeoutSet 3000, =>
# @play 0, ->
@play 0, ->
callback()
#####
##### simple direct mpd protocol commands
#####
status: (callback) ->
@send 'status', callback
playlistinfo: (callback) ->
@send 'playlistinfo', callback
outputs: (callback) ->
@send 'outputs', callback
replaygainstatus: (callback) ->
@send 'replay_gain_status', (r) ->
callback r[0].replay_gain_mode
replaygainmode: (mode, callback) ->
@send "replay_gain_mode #{mode}", callback
@replaygain_watchdog()
pause: (callback) ->
@send 'pause', callback
@status_watchdog()
seekcur: (seconds, callback) ->
# newer mpds have a seekcur command, let's fake it
@status (r) =>
songpos = r[0].song
console.log 'songpos', songpos
console.log 'seek', songpos, seconds
@send "seek #{Number(songpos)} #{Number(seconds)}", callback
@status_watchdog()
next: (callback) ->
@send 'next', callback
@playlist_watchdog()
enableoutput: (id, callback) ->
@status (r) =>
# help hide mpd's '-1' volume when no output is enabled
if r[0].volume == '-1'
@send "enableoutput #{id}", =>
@setvol @cache.metavolume, callback
@outputs_watchdog()
else
@send "enableoutput #{id}", callback
@outputs_watchdog()
disableoutput: (id, callback) ->
@send "disableoutput #{id}", callback
@outputs_watchdog()
setvol: (vol, callback) ->
@send "setvol #{vol}", callback
# help hide mpd's '-1' volume when no output enabled
@cache.metavolume = vol
@status_watchdog()
togglecrossfade: ->
if @cache.status.xfade is '0'
@send 'crossfade 5', ->
else
@send 'crossfade 0', ->
@status_watchdog()
togglereplaygain: ->
if @cache.replaygain is 'off'
@send 'replay_gain_mode track', ->
else
@send 'replay_gain_mode off', ->
@replaygain_watchdog()
# attempt to get mpd to load new files it might not have in it's database yet
update: (filename, callback) ->
filename = '"'+filename+'"'
@send "update #{filename}", callback
addid: (filename, pos=undefined, callback) ->
filename = '"'+filename+'"'
pos = if pos? then ' '+pos else ''
@send "addid #{filename}#{pos}", callback
#@playlist_watchdog()
deleteid: (id, callback) ->
@send "deleteid #{id}", callback
@playlist_watchdog()
repeat: (flag, callback) ->
@send "repeat #{flag}", callback
@status_watchdog()
single: (flag, callback) ->
@send "single #{flag}", callback
@status_watchdog()
consume: (flag, callback) ->
@send "consume #{flag}", callback
@status_watchdog()
clear: (callback) ->
@send 'clear', callback
@status_watchdog()
@playlist_watchdog()
play: (pos=undefined, callback) ->
#console.log "play! for #{@name} pos #{pos}"
pos = if pos? then ' '+pos else ''
@send 'play'+pos, callback
@status_watchdog()
######
###### more complex commands
######
play_url: (urls, callback) ->
@autopause = no
# set repeat 1? single 1? consume 0?
@repeat 1, =>
@single 1, =>
@consume 0, =>
@clear =>
urllist = [].concat(urls)
addurls = =>
if urllist.length
oneurl = urllist.shift()
#console.log 'adding url', oneurl
#console.log 'urllist', urllist
#console.log 'urls', urls
@addid oneurl, 0, (mpdresponse) => addurls()
else
@play 0, callback
addurls()
play_archive: (filenames, callback) ->
@autopause = no
# set repeat 1 single 0? consume 0
@repeat 1, =>
@single 0, =>
@consume 0, =>
@clear =>
filenamelist = [].concat(filenames)
@addsongs filenamelist, =>
@play undefined, callback
playifnot: (callback) ->
@status (r) =>
#console.log r[0].state, '<<<<'
if r[0].state is 'play'
callback()
else if r[0].state is 'stop'
@play undefined, callback
else if r[0].state is 'pause'
@pause callback # pause doesn't need a delay, stream sockets exist while paused
addsongs: (mpdfilenames, callback) ->
addonesong = (filename, singlecallback) =>
# first we execute a mpd db update command on the single file
# to make sure it's in it's perhaps incomplete database
#@update filename, =>
# EXCEPT this isn't needed when using unix domain sockets and
# when using the full filename in addid (not file:/// apparently?)
@addid filename, null, singlecallback
i = 0
console.log 'addsongs', mpdfilenames.length
recurse = ->
addonesong mpdfilenames[i], ->
if ++i < mpdfilenames.length
console.log 'recurse', i
recurse()
else
console.log 'callback', i
callback()
recurse()
#####
##### state change watchdogs
#####
refresh: (callback) ->
@cache = {}
callcount = 4
@status_watchdog ->
if callcount-- == 1 and callback
callback()
@playlist_watchdog =>
if callcount-- == 1 and callback
callback()
@trigger 'playlist', @cache.playlist # triggers autofill
@outputs_watchdog ->
if callcount-- == 1 and callback
callback()
@replaygain_watchdog ->
if callcount-- == 1 and callback
callback()
status_watchdog: (callback) ->
@status (r) =>
newtime = r[0].time
if not _.isEqual newtime, @cache.time
@cache.time = newtime
@trigger 'time', @cache.time
newstate = r[0].state
if not _.isEqual newstate, @cache.state
@cache.state = newstate
@trigger 'state', @cache.state
newstatus = _.omit r[0], ['elapsed']
# also consider omitting bitrate, time, playlist{,length}, nextsong{,id}
# work around a feature of mpd that messes up our ui
# if output isn't enabled the volume status is reported as -1
# let's hide that
# one side effect of this is that when the server is restarted
# channels that don't have an output enabled have their volume
# bar set wrong until the output is enabled. to FIXME we'll need
# to store the metavolume in the database
# or maybe it's the "cannot call methods on slider prior to initialization"
# error i'm getting in the console
if newstatus.volume == '-1'
newstatus.volume = @cache.metavolume
else
@cache.metavolume = newstatus.volume
if not _.isEqual newstatus, @cache.status
@cache.status = newstatus
@trigger 'status', @cache.status
if callback
callback
playlist: (callback) ->
@status (r1) =>
songpos = r1[0].song
@playlistinfo (r2) =>
r2.songpos = songpos
callback r2
playlist_watchdog: (callback) ->
@playlist (r) =>
if not _.isEqual(r, @cache.playlist)
@cache.playlist = r
@trigger 'playlist', @cache.playlist
if callback
callback
outputs_watchdog: (callback) ->
@outputs (r) =>
newtime = r[0].time
if not _.isEqual r, @cache.outputs
@cache.outputs = r
@trigger 'outputs', @cache.outputs
if callback
callback
replaygain_watchdog: (callback) ->
@replaygainstatus (r) =>
newtime = r[0].time
if not _.isEqual r, @cache.replaygain
@cache.replaygain = r
@trigger 'replaygain', @cache.replaygain
if callback
callback
#####
##### other stuff
#####
setautopause: (flag=yes) ->
@autopause = flag # why didn't = flag? work?
spawn: (callback) ->
# see if there is an existing mpd by testing a connection to the socket
testsocket = net.connect @control_socket, =>
testsocket.destroy()
# mpd process already exists, don't spawn and just use the existing socket
console.log "using existing mpd for slot #{@slot} on #{@control_socket}"
callback()
# error means we need to spawn an mpd process
testsocket.on 'error', (err) =>
testsocket.destroy()
console.log "no existing mpd found for slot #{@slot}, spawning a new one on #{@control_socket}"
console.log "...using executable #{@mpd_executable}"
# generate and write the conf file
@generate_conf_file_text()
fs.writeFile @conf_file, @conf_file_text, (err) =>
if err then throw err
opts =
detached: true
env :
DYLD_FALLBACK_LIBRARY_PATH: otto.OTTO_LIB
LD_LIBRARY_PATH: otto.OTTO_LIB
if otto.OTTO_SPAWN_AS_UID
opts.uid = otto.OTTO_SPAWN_AS_UID
child = child_process.spawn @mpd_executable, ['--no-daemon', @conf_file], opts
child.unref()
mpd_says = (data) =>
console.log "mpd#{@slotstr}: " + data # i could make this a different color. fun!
child.stdout.on 'data', mpd_says
child.stderr.on 'data', mpd_says
child.on 'exit', (code, signal) ->
return if otto.exiting
console.log "mpd #{@slot} exited with code #{code}"
if signal then console.log "...and signal #{signal}"
# when mpd crashes, we should consider blowing away the state file
# and removing the currently playing song from the queue and then
# firing things up again (and perhaps recording a problem with that file)
# (we have since added code on 'send' to detect a dead mpd and try
# to revive it there. perhaps it would be better done here)
otto.misc.wait_for_socket @control_socket, 500, (err) ->
if err then throw new Error err
callback()
generate_conf_file_text: ->
@conf_file_text = """
# auto generated (and regenerated) by otto, don't edit
# for channel #{@name}
music_directory "#{@music_dir}"
db_file "#{@db_file}"
log_file "#{@log_file}"
pid_file "#{@pid_file}"
state_file "#{@state_file}"
bind_to_address "#{@control_socket}"
#port "#{@control_port}"
#bind_to_address "localhost"
#zeroconf_enabled "yes"
#zeroconf_name "Otto Music Player #{@name}"
volume_normalization "yes"
input {
plugin "curl"
}
audio_output {
type "httpd"
name "Otto HTTP MP3 Stream #{@name}"
encoder "lame"
#bind_to_address "#{@stream_sockets['mp3']}"
port "#{@stream_ports['mp3']}"
bind_to_address "localhost"
#quality "5.0" # do not define if bitrate is defined
bitrate "128" # do not define if quality is defined
format "44100:16:1"
#max_clients "0"
}
audio_output {
type "httpd"
name "Otto HTTP OGG Stream #{@name}"
encoder "vorbis"
#bind_to_address "#{@stream_sockets['ogg']}"
port "#{@stream_ports['ogg']}"
bind_to_address "localhost"
#quality "5.0" # do not define if bitrate is defined
bitrate "128" # do not define if quality is defined
format "44100:16:1"
#max_clients "0"
}
audio_output {
type "httpd"
name "Otto HTTP WAV Stream #{@name}"
encoder "wave"
#bind_to_address "#{@stream_sockets['wav']}"
port "#{@stream_ports['wav']}"
bind_to_address "localhost"
format "44100:16:1"
#max_clients "0"
}
# having the null output seems to avoid a bug in mpd when no listeners are connected
audio_output {
type "null"
name "Otto Null Output"
mixer_type "none"
}
""" #" #" fixes emacs coffee mode brain damage (caused by "'s or #'s in above block)
#if @name is 'main'
if process.platform is 'darwin'
enabled = if @name is 'main' then 'yes' else 'no'
@conf_file_text += """
audio_output {
type "osx"
name "Otto Line Out"
mixer_type "software"
enabled "#{enabled}"
}
""" #"
proxy_stream: (req, res, add_stream_callback=no, remove_stream_callback=no, format='mp3') ->
#if typeof onempd is 'string'
# onempd = mpd_list[onempd]
console.log 'MPD#proxy_stream format', format
headers = assemble_headers(req.headers)
outsocket = res.socket
@playifnot => # this has to go first so the streams are created
console.log 'playing...'
open_callback = =>
@streamcount++
if add_stream_callback
add_stream_callback(req, @, format)
close_callback = =>
#if --@streamcount is 0 and @autopause
# @pause =>
# console.log 'no listeners left, mpd paused'
if remove_stream_callback
remove_stream_callback(req, @, format)
port = @stream_ports[format]
host = 'localhost'
otto.misc.wait_for_socket {port: port, host: host}, 200, (err) ->
if err
console.log "warning: we never saw the socket on #{host}:#{port} open up!"
res.send('stream not found!', 503)
else
console.log 'about to proxy_raw_icy_stream'
proxy_raw_icy_stream outsocket, headers, open_callback, close_callback, port, host
##### end of class MPD
assemble_headers = (headers) ->
CRLF = '\r\n';
messageHeader = 'GET / HTTP/1.1' + CRLF
store = (field, value) ->
messageHeader += "#{field}: #{value}" + CRLF
if headers
keys = Object.keys(headers)
isArray = (Array.isArray(headers))
for key in keys
if isArray
field = headers[key][0]
value = headers[key][1]
else
field = key
value = headers[key]
if Array.isArray(value)
store(field, val) for val in value
else
store(field, value)
return messageHeader + CRLF;
# note: the ICY response codes break the node+connect http parsing code
# so we just jam the sockets together and keep our nose out of it
proxy_raw_icy_stream = (outsocket, headers, open_callback, close_callback, port=8101, host='localhost') ->
console.log 'proxy_raw_icy_stream'
insocket = net.connect port, host, ->
console.log 'net connected'
insocket.write headers
console.log 'headers written'
#insocket.addListener 'data', (data) ->
# console.log(data.length)
open_callback()
# close when the client disconnects or else we are just
# going to buffer up the stream until we run out of memory!
outsocket.on 'close', ->
insocket.end()
close_callback()
# this is dirty. and ugly. we write directly to the socket and keep
# http.js from writing out it's standard implicit headers
# we rely on the headers being sent from mpd instead.
# we just wanna give the client exactly what mpd sends
# we also attempt to inject this one extra header to make jplayer think it can play faster
# https://groups.google.com/forum/#!msg/jplayer/nSM2UmnSKKA/bC-l3k0pCPMJ
#outsocket.write 'Accept-Ranges:bytes\r\n' # doesn't work, appears before the HTTP result code line, of course
insocket.pipe(outsocket)
##### end of class otto.mpd.MPD
return mpd
# this was my first attempt to proxy the mp3 stream
# hoo boy! this worked nice until mpd tried to respond
# with an ICY response code which broke the node http parser.
# it's a very nice way to proxy non-ICY web requests
# proxy = http.createClient 8101, 'localhost'
# proxy_request = proxy.request request.request.method, request.request.url, request.request.headers
# proxy_request.addListener 'response', (proxy_response) ->
# proxy_response.addListener 'data', (chunk) ->
# request.response.write chunk, 'binary'
# proxy_response.addListener 'end', ->
# request.response.end()
# request.response.writeHead proxy_response.statusCode, proxy_response.headers
# request.request.addListener 'data', (chunk) ->
# proxy_request.write chunk, 'binary'
# request.request.addListener 'end', ->
# proxy_request.end()
#stashing this more typical proxy code here just to keep it around
#nothing to do with mpd or streaming, was used to call our python api
#proxy_api_request = (request) ->
# console.log 'proxy_api_request', request.request.url
# proxy = http.createClient 8778, 'localhost'
# proxy_request = proxy.request request.request.method, request.request.url, request.request.headers
#
# #request.request.pipe(proxy_request)
#
# proxy_request.addListener 'response', (proxy_response) ->
# proxy_response.addListener 'data', (chunk) ->
# request.response.write chunk, 'binary'
# proxy_response.addListener 'end', ->
# request.response.end()
# request.response.writeHead proxy_response.statusCode, proxy_response.headers
# #proxy_request.pipe(request.response)
#
# request.request.addListener 'data', (chunk) ->
# proxy_request.write chunk, 'binary'
# request.request.addListener 'end', ->
# proxy_request.end()
#
# proxy_request.on 'error', ->
# console.log 'proxy_request error!'
# i don't remember what this was, perhaps useful to reference when i want to fork mpd processes
#child = new (forever.Monitor)(forever.fork)
#fork = require 'fork'
#forever = require 'forever'
#if child
# forever.startServer child
# child.on 'exit', ->
# console.log 'child exited'
# child.start()
# forever.list false, (err, data) ->
# if err
# console.log 'Error running `forever.list()`'
# console.dir err
# console.log 'Data returned from `forever.list()`'
# console.dir data
# return ''
<|start_filename|>static/js/miniAlert.coffee<|end_filename|>
#
# miniAlert, an alert plugin for jQuery
# Instructions: http://minijs.com/plugins/10/alert
# By: <NAME>, http://www.mynameismatthieu.com, @mattaussaguel
# Version: v1.0 stable
# More info: http://minijs.com/
#
jQuery ->
$.miniAlert = (element, options) ->
# default plugin settings
@defaults =
text: 'x' # close button text content
cssClass: 'close' # close button css class
position: 'before' # close button position: 'before' or 'after'
effect: 'basic' # closing effect: 'basic' or fade' or 'slide'
duration: 100 # hide animation duration in milliseconds
onLoad: -> # callback called when the close button has been added
onHide: -> # callback called when close button is clicked
onHidden: -> # callback called when alert message is hidden
@state = ''
@settings = {}
@$element = $ element
setState = (@state) ->
@getState = -> state
@getSetting = (settingKey) -> @settings[settingKey]
@callSettingFunction = (functionName, args = [@$element, @$button]) ->
@settings[functionName].apply(this, args)
removeElement = =>
@$element.remove()
setState 'hidden'
@callSettingFunction 'onHidden', []
addButton = =>
options = { class: @settings.cssClass, text: @settings.text }
@$button = $('<button />', options)
if @settings.position is 'after'
@$button.appendTo @$element
else
@$button.prependTo @$element
bindButtonEvent = =>
@$button.bind 'click', (e) =>
e.preventDefault()
setState 'hiding'
@callSettingFunction 'onHide'
if @settings.effect is 'fade'
@$element.fadeOut @settings.duration, removeElement
else if @settings.effect is 'slide'
@$element.slideUp @settings.duration, removeElement
else
removeElement()
init = =>
setState 'loading'
@settings = $.extend({}, @defaults, options)
addButton()
bindButtonEvent()
setState 'loaded'
@callSettingFunction 'onLoad'
init()
this
$.fn.miniAlert = (options) ->
this.each ->
if undefined == ($ this).data('miniAlert')
plugin = new $.miniAlert this, options
($ this).data 'miniAlert', plugin
<|start_filename|>otto.main.coffee<|end_filename|>
otto = global.otto
require './otto.livedev' if process.env.NODE_ENV is 'development'
otto.main = ->
console.log 'node version ' + process.version
if process.versions['node-webkit']
console.log 'node-webkit version ' + process.versions['node-webkit']
cleanup_processes = ->
console.log 'killing mpds'
otto.mpd.kill_all_mpdsSync()
console.log 'killing mongod'
otto.db.kill_mongodSync()
crash_handler = (err) ->
console.log 'exception: ' + err
console.log err.stack
process.removeListener 'on', crash_handler
# we should capture the exception to a file for debugging
otto.exiting = true
cleanup_processes()
#throw new Error err
#console.trace()
process.exit(1)
# ctrl-c
process.on 'SIGINT', ->
cleanup_processes()
process.exit()
# kill (default)
process.on 'SIGTERM', ->
# don't cleanup processes when in development mode
# this let's mpd (and mongo) keep running when the
# supervisor node module reloads us do to a file change
# (we prefer supervisor to nodemon these days)
if process.env.NODE_ENV isnt 'development'
cleanup_processes()
process.exit()
# nodemon detected a file change
process.once 'SIGUSR2', ->
cleanup_processes()
process.kill(process.pid, 'SIGUSR2')
# crashes
#!#process.on 'uncaughtException', crash_handler
# we still need to catch and deal with ENOACCESS and other problems opening the http port
# (though ENOACCESS is less important now that we decided to not run on port 80)
otto.db.init ->
otto.channels.init ->
otto.server()
<|start_filename|>otto.db.coffee<|end_filename|>
_ = require 'underscore'
fs = require 'fs'
net = require 'net'
mongodb = require 'mongodb'
child_process = require 'child_process'
otto = global.otto
module.exports = global.otto.db = do -> # note the 'do' causes the function to be called
db = {}
mongo = null
c = {}
#collections_inuse = ['objects', 'connections', 'images', 'accesslog', 'listeners', 'queues', 'events']
collections_inuse = ['objects', 'connections', 'images', 'events']
filenamecache = null
db.assemble_dbconf = ->
db.dbconf =
db: 'otto'
host: otto.OTTO_VAR + '/mongod.sock'
domainSocket: true
#host: 'localhost'
#port: 8777
#username: 'admin' # optional
#password: '<PASSWORD>' # optional
collection: 'sessions' # only for connect-mongo, optional, default: sessions
file: "#{otto.OTTO_VAR}/mongodb.conf"
db_directory: "#{otto.OTTO_VAR_MONGODB}"
#log_file: "#{otto.OTTO_VAR}/mongod.log"
pid_file: "#{otto.OTTO_VAR}/mongod.pid"
socket_file: "#{otto.OTTO_VAR}/mongod.sock" # must end in .sock for pymongo to work
#bind_ip: "localhost"
port: 8777 # not really used when using a unix domain socket (but still required?)
mongod_executable: "#{otto.MONGOD_EXECUTABLE}"
db.dbconf.text = """
# auto generated (and regenerated) by otto, don't edit
dbpath = #{db.dbconf.db_directory}
pidfilepath = #{db.dbconf.pid_file}
bind_ip = #{db.dbconf.socket_file}
#bind_ip = #{db.dbconf.bind_ip}
port = #{db.dbconf.port} # not really used, socket file on previous line is used instead
nounixsocket = true # suppresses creation of a second socket in /tmp
nohttpinterface = true
journal = on
noprealloc = true
noauth = true
#verbose = true
quiet = true
profile = 0 # don't report slow queries
slowms = 2000 # it still prints them to stdout though, this'll cut that down
""" # blank line at the end is so conf file has a closing CR (but not a blank line)
return db.dbconf
db.spawn = (callback) ->
# see if there is an existing mongod by testing a connection to the socket
testsocket = net.connect db.dbconf.socket_file, ->
# mongod process already exists, don't spawn another one
console.log "using existing mongod on #{db.dbconf.socket_file}"
testsocket.destroy()
callback()
testsocket.on 'error', (err) ->
#console.log 'error', err
testsocket.destroy()
console.log "no existing mongod found, spawning a new one on #{db.dbconf.socket_file}"
console.log "...using executable #{db.dbconf.mongod_executable}"
# we wait until now to write the conf file so we don't step on existing conf files for an existing mongod
fs.writeFile db.dbconf.file, db.dbconf.text, (err) ->
if err then throw err
opts =
#stdio: [ 'ignore', 'ignore', 'ignore' ]
detached: true
#env :
# DYLD_FALLBACK_LIBRARY_PATH: otto.OTTO_LIB
# LD_LIBRARY_PATH: otto.OTTO_LIB
if otto.OTTO_SPAWN_AS_UID
opts.uid = otto.OTTO_SPAWN_AS_UID
child = child_process.spawn db.dbconf.mongod_executable, ['-f', db.dbconf.file], opts
child.unref()
mongod_says = (data) ->
process.stdout.write 'mongod: ' + data # i could also color this differently, fun!
child.stdout.on 'data', mongod_says
child.stderr.on 'data', mongod_says
child.on 'exit', (code, signal) ->
return if otto.exiting
console.log "mongod exited with code #{code}"
if signal then console.log "...and signal #{signal}"
throw new Error 'mongod went away!' # i guess we could wait and try reconnecting? FIXME
otto.misc.wait_for_socket db.dbconf.socket_file, 1500, (err) -> # needed to be > 500 for linux
if err then throw new Error err
callback()
db.kill_mongodSync = ->
# needs to be Sync so we finish before event loop exits
otto.misc.kill_from_pid_fileSync otto.OTTO_VAR + '/mongod.pid'
db.init = (callback) ->
db.assemble_dbconf()
db.spawn ->
db.connect db.dbconf.db, db.dbconf.host, db.dbconf.port, (err) ->
if err
"mongodb does not appear to be running"
throw err
#process.nextTick -> # not sure this is really necessary
callback()
db.connect = (database='otto', hostname='localhost', port=27017, callback=no) ->
mongo = new mongodb.Db(database, new mongodb.Server(hostname, port, {}), {safe:true, strict:false})
mongo.open (err, p_client) ->
if err
if callback then callback "error trying to open database #{database} on #{hostname}:#{port}: #{err}"
return
attach_collections collections_inuse, ->
c.objects.count (err, count) ->
if err then throw new Error "database error trying to count 'objects' collection: #{err}"
console.log "connected to database #{database} on #{hostname}:#{port}"
s = if count != 1 then 's' else ''
console.log "#{count} object#{s}"
if count < 5
console.log 'we have an empty database!'
db.emptydatabase = true
else
db.emptydatabase = false
if count > 200000
console.log 'we have a large database!'
db.largedatabase = true
else
db.largedatabase = false
#if not c.events.isCapped()
# console.log 'events collection is not capped'
#else
# console.log 'events collection is capped'
# couldn't get this to work. perhaps runCommand is missing from my mongodb driver?
#if not c.events.isCapped
# console.log 'capping events collection'
# p_client.runCommand {"convertToCapped": "events", size: 100000}
#console.dir p_client
#p_client.createCollection 'events', {'capped':true, 'size':100000}, ->
#p_client.createCollection 'events', ->
# if not c.events.isCapped
# console.log 'events collection is not capped'
# else
# console.log 'events collection is capped'
if callback
callback()
# lookup a list of connections by name and assign them to c.<collection_name>
attach_collections = (collection_names, callback) ->
lookupcount = collection_names.length
if lookupcount
for name in collection_names
do (name) ->
mongo.collection name, (err, collection) ->
if err then throw new Error "database error trying to attach to collection '#{name}': #{err}"
c[name] = collection
if --lookupcount is 0
callback()
else
callback()
db.save_event = (e, callback) ->
_id = c.events.save e, (err, eSaved) ->
callback eSaved._id
db.save_object = (o, callback) ->
if not o.otype? and o.otype
throw new Error 'object need an otype to be saved'
oid = c.objects.save o, (err, oSaved) ->
callback oSaved._id
db.load_object = (ids=null, load_parents=no, callback) ->
# otypes:
# 1 owner
# 5 dir
# 10 song
# 20 album
# 30 artist
# 40 fileunder
# 50 list
if not ids
console.log "load_object: no id(s) given"
ids = []
if ids instanceof Array
returnarray = true
else
returnarray = false
ids = [ids]
bids = ids.map (id) -> new mongodb.ObjectID(String(id)) # get_random_songs needed this for some odd reason!
q = { '_id': { '$in': bids } }
c.objects.find(q).toArray (err, objects) ->
if err then throw new Error "database error trying to load objects #{ids}: #{err}"
if not objects
callback null
return
for object in objects
object.oid = object['_id'] # for backwards compatability
if load_parents
lookupcount = objects.length
for object in objects
db.load_subobjects object, load_parents, yes, [5,6], ->
lookupcount--
if lookupcount is 0
if returnarray
callback objects
else
callback objects[0]
else
if returnarray
callback objects
else
callback objects[0]
# alias because i keep mistyping it
db.load_objects = db.load_object
db.load_subobjects = (objectlist, subotype, parents=no, filteroutctypes=[5,6], callback) ->
if not objectlist then throw new Error "load_object: you must supply the object(s)"
objects = [].concat objectlist # make array of array or single object
lookupcount = objects.length
# we should optimize this to be a single query instead of this loop FIXME
if not objects.length
callback objectlist
else
for o in objects
do (o) -> # makes a closure so we can preserve each version of 'o' across the async calls below
if parents
q = { child: o._id }
else
q = { parent: o._id }
# sort on _id, rank here? or do we need to sort after since they are not joined? TODO
c.connections.find(q).toArray (err, results) ->
if err then throw new Error "database error fetching list of subobjects for #{o._id}: #{err}"
subids = results.map (i) -> if parents then i.parent else i.child
q = { '_id': { '$in': subids } }
if subotype
q.otype = Number(subotype)
c.objects.find(q).toArray (err, subobjects) ->
if err then throw new Error "database error loading subobjects for #{o._id}: #{err}"
for subobject in subobjects
subobject.oid = subobject['_id'] # for backward compability
switch Number(subotype)
when 40 then o.fileunder = subobjects
when 30 then o.artists = subobjects
when 20 then o.albums = subobjects
when 10 then o.songs = subobjects
when 5 then o.dirs = subobjects
when 1 then o.owners = subobjects
lookupcount--
if lookupcount is 0
callback objectlist
db.load_image = (id, size, callback) ->
bid = new mongodb.ObjectID(id)
if not size then size = 'orig'
fields = {}
fields["sizes.#{size}"] = 1
c.images.findOne { _id: bid }, fields, (err, image) ->
if err
callback null
return
if image and image.sizes and image.sizes[size]
callback image.sizes[size].buffer
return
if size == 'orig'
callback null
return
console.log "image size #{size} not found, trying orig"
c.images.findOne { _id: bid }, { 'sizes.orig': 1 }, (err, image) ->
if err
callback null
return
if image and image.sizes and image.sizes.orig
callback image.sizes.orig.buffer
return
callback null
db.add_to_connections = (parent, child, ctype, rank, callback) ->
# ctypes:
# 1 dirs, songs, albums, artists to owner
# 2 songs to albums
# 3 songs to artists
# 4 artists to albums
# 5 primary albums to artists and/or 'various'
# 6 secondary albums to artists and/or 'various'
# 7 primary albums to fileunder
# 8 secondary albums to fileunder
# 10 dirs to dirs
# 11 files (songs) to directory
# 12 lists
# FIXME need to extend this to handle rank and duplicates
# upsert pattern yanked from https://github.com/mongodb/node-mongodb-native/issues/29
#id = mongodb.bson_serializer.ObjectID(null)
id = mongodb.ObjectID(null)
doc =
'_id': id
'ctype': ctype
'parent': parent
'child': child
'rank': rank
c.connections.update {'_id': id}, doc, upsert: true, (err, connection) ->
if err
callback null
else
console.log 'connection._id', connection._id # this isn't the way to get the new _id
callback connection
db.remove_from_connections = (id, callback) ->
c.connections.remove {'_id': id}, (err, num) ->
if err then throw new Error err
callback num
# load album (or artist, or fileunder) details
db.album_details = (oid, callback) ->
result = null
lookupcount = 1
# lookup the id, see if it's an album or an artist or a fileunder
db.load_object oid, false, (object) ->
result = object
if not result
callback result
return
if result.otype in [30, 40] # artist or fileunder
db.load_subobjects result, 20, no, [5,6], phasetwo
else if result.otype == 20
phasetwo [result]
else
phasetwo result
phasetwo = (subobjects) ->
if result.otype in [30, 40]
further = result.albums
else
further = [result]
lookupcount = 3*further.length
if not further.length
callback result
else
for o in further
db.load_subobjects o, 10, no, [5,6], phasethree
db.load_subobjects o, 30, yes, [5,6], phasethree
db.load_subobjects o, 1, yes, [5,6], phasethree
phasethree = (subobjects) ->
lookupcount--
if lookupcount is 0
callback result
db.starts_with = (value, attribute, otype, nochildren, callback) ->
elapsed = new otto.misc.Elapsed()
filter = {}
params = []
filteroutctypes = [4,6]
order = {}
order[attribute] = 1
if value.length == 1 and value.toLowerCase() in 'abcdefghijklmnopqrstuvwxyz0123456789'
filter[attribute] = ///^#{value}///i
else switch value
when 'num'
filter[attribute] = ///^[0-9]///
when 'other'
filter[attribute] = {'$in': [ ///^[^0-9a-z]///i, 'unknown' ]}
when 'st'
filter[attribute] = ///\bsoundtrack\b///i
filteroutctypes = False
when 'va'
filter[attribute] = {
'$in': [
///\bvarious\b///i,
///\bartists\b///i,
///\bvarious artists\b///i
]
} # why yes, i *do* know the third regexp is redundant
filteroutctypes = False
when 'all'
filter = {}
order = {year: 1, album: 1}
filter['otype'] = otype
# i don't think this does anything when used on the objects collection (only connections have ctypes)
if filteroutctypes
filter['ctype'] = { '$nin': filteroutctypes }
objects = []
object_lookup = {}
# maybe someday we can figure out how to 'stream' these results (or maybe page 'em)
# ...actually, the bottleneck seems to be in rendering large result sets on the client
# side, not in getting the results from mongo or transmitting them to the client
c.objects.find(filter).sort(order).each (err, object) ->
if err then throw new Error "error searching for objects with #{attribute} #{value}: #{err}"
if object
object.oid = object['_id']
objects.push(object)
object_lookup[object._id] = object
else
console.log "#{elapsed.seconds()} objects loaded"
subotype = no
if otype == 40
subotype = 20
else if otype > 10
subotype = otype - 10
else
subotype = null
if !subotype or nochildren
callback objects
return
oids = objects.map (o) -> o._id
parents = no # huh? perhaps overriding the value from above?
if parents
q = { 'child': {'$in': oids} }
else
q = { 'parent': {'$in': oids} }
if filteroutctypes then q.ctype = {'$nin': filteroutctypes}
c.connections.find(q).toArray (err, connections) ->
if err then throw new Error "database error fetching list of subobjects for starts_with #{attribute} #{value} #{err}"
console.log "#{elapsed.seconds()} connections loaded"
subattribute = db.attach_point subotype
suboids = connections.map (i) -> if parents then i.parent else i.child
subobjects = []
subobject_lookup = {}
#missing sorting on rank here? FIXME
c.objects.find({ '_id': { '$in': suboids }, 'otype': subotype }).each (err, subobject) ->
if err then throw new Error "database error loading subobjects for starts_with #{attribute} #{value}: #{err}"
if subobject
subobject.oid = subobject['_id']
subobjects.push(subobject)
subobject_lookup[subobject._id] = subobject
else
for connection in connections
if parents
obj = object_lookup[connection.child]
sub = subobject_lookup[connection.parent]
else
obj = object_lookup[connection.parent]
sub = subobject_lookup[connection.child]
if not sub
continue
if obj[subattribute]
obj[subattribute].push(sub)
else
obj[subattribute] = [sub]
console.log "#{elapsed.seconds()} subobjects loaded"
console.log subobjects && subobjects.length
callback objects
db.attach_point = (otype) ->
switch Number(otype)
when 40 then return 'fileunder'
when 30 then return 'artists'
when 20 then return 'albums'
when 10 then return 'songs'
when 5 then return 'dirs'
when 1 then return 'owners'
return ''
db.attach_parents = (objects, options, callback) ->
ids = objects.map (o) -> o._id
# match connections against child to find parents
find = { 'child': {'$in': ids}, 'ctype': {'$nin':[4,6]} }
c.connections.find(find).toArray (err, connections) ->
if err then throw new Error "error fetching parent connections: #{err}"
console.log connections.length, 'connections'
parentids = connections.map (i) -> i.parent
otypes = [].concat (options.otype || 40)
find = { '_id': { '$in': parentids }, 'otype': {'$in':otypes} }
sort = { 'child': 1, 'rank':1 }
c.objects.find(find).sort(sort).toArray (err, parents) ->
if err then throw new Error "error loading parent objects: #{err}"
#console.log parents.length, 'parents'
object_lookup = {}
for object in objects
object_lookup[object._id] = object
parent_lookup = {}
for parent in parents
parent_lookup[parent._id] = parent
# attach 'em
for connection in connections
object = object_lookup[connection.child]
parent = parent_lookup[connection.parent]
if not parent
continue
attribute = db.attach_point parent.otype
if attribute
if not object[attribute]
object[attribute] = []
object[attribute].push parent
callback objects
db.attach_children = (objects, options, callback) ->
ids = objects.map (o) -> o._id
# match connections against parent to find children
find = { 'parent': {'$in': ids}, 'ctype': {'$nin':[4,6]} }
c.connections.find(find).toArray (err, connections) ->
if err then throw new Error "error fetching child connections: #{err}"
#console.log connections.length, 'connections'
childids = connections.map (i) -> i.child
otypes = [].concat (options.otype || 1)
find = { '_id': { '$in': childids }, 'otype': {'$in':otypes} }
sort = { 'child': 1, 'rank':1 }
c.objects.find(find).sort(sort).toArray (err, children) ->
if err then throw new Error "error loading child objects: #{err}"
#console.log children.length, 'children'
object_lookup = {}
for object in objects
object_lookup[object._id] = object
child_lookup = {}
for child in children
child_lookup[child._id] = child
# attach 'em
for connection in connections
object = object_lookup[connection.parent]
child = child_lookup[connection.child]
if not child
continue
attribute = db.attach_point child.otype
if attribute
if not object[attribute]
object[attribute] = []
object[attribute].push child
callback objects
db.all_albums = (callback) ->
c.objects.find( { otype: 20 } ).toArray (err, results) ->
throw err if err
callback results
db.all_albums_by_year = (callback) ->
filter = {}
filter['otype'] = 20
order = {year: 1, album: 1}
#order = {album: 1}
#order = {genre: 1, album: 1}
c.objects.find(filter).sort(order).toArray (err, objects) ->
if err then throw new Error "error loading all_albums: #{err}"
db.attach_parents objects, { otype: [40,1] }, ->
console.log objects.length, 'objects'
console.log 'swapping yearless albums to the end'
for o, i in objects
break if o.year?
if i < objects.length
objects = [].concat objects[i..], objects[...i]
callback objects
db.all_albums_by_fileunder = (callback) ->
filter = {}
filter['otype'] = 20
c.objects.find(filter).toArray (err, objects) ->
if err then throw new Error "error loading all_albums: #{err}"
db.attach_parents objects, { otype: [40,1] }, ->
console.log objects.length, 'objects'
console.log 'sorting by fileunder'
objects.sort (a, b) ->
if not a.fileunder or not a.fileunder[0] or not a.fileunder[0].key
return 1
if not b.fileunder or not b.fileunder[0] or not b.fileunder[0].key
return -1
if a.fileunder[0].key is b.fileunder[0].key
return 0
else if a.fileunder[0].key > b.fileunder[0].key
return 1
else
return -1
console.log 'done'
callback objects
db.get_filename = (id, callback) ->
bid = new mongodb.ObjectID(id)
if filenamecache
callback filenamecache[bid] # [parseInt(id)] <- not anymore!
else
console.log 'loading filename cache...'
# if anyone else calls this while it is loading the cache the first time, it
# will have a race condition and return undefined FIXME
filenamecache = {}
tempcache = {}
c.objects.find({ filename: {'$exists':1} }, {_id:1, filename:1}).each (err, items) ->
if item
tempcache[item._id] = item.filename
#if bid == _id
# console.log "<<<<<<<<< #{item.filename}"
else
filenamecache = tempcache
console.log 'finished loading filename cache'
callback filenamecache[bid]
db.load_songs_by_filenames = (filenames, callback) ->
# this doesn't return the results in order which fuxxors the queue display FIXME
filter = { 'otype': 10, 'filename': {'$in': filenames} }
c.objects.find(filter).toArray (err, objects) ->
if err then throw new Error "database error trying to load_songs_by_filenames #{filenames}: #{err}"
for object in objects
object.oid = object['_id']
if objects and objects.length # was crashing when queue was empty
otype = object.otype
lookupcount = 0
if otype is 10 then lookupcount += objects.length
if otype < 20 then lookupcount += objects.length
if otype < 30 then lookupcount += objects.length
debug_limit = 20;
finisher = ->
lookupcount--
if lookupcount is 0
# now we need to put the results back in the order they were asked for
ordered = []
for filename in filenames
found = false
for object,i in objects
if object.filename is filename
found = true
ordered.push object
objects.splice i, 1
break
if not found
#console.log 'warning: mpd queue item not found in database:', filename
console.log 'warning: mpd queue item not found in database'
# should be make a fake object to return so the queue prints something?
object =
filename: filename
song: filename
_id: 0
ordered.push object
# objects really should be empty now
for object in objects
console.log 'could not match result object with requested filename!'
#console.log object.filename
#console.log filenames
callback ordered
for object in objects
if otype is 10 then db.load_subobjects object, 1, yes, [5,6], finisher # owner
if otype < 20 then db.load_subobjects object, 20, yes, [5,6], finisher # album
if otype < 30 then db.load_subobjects object, 30, yes, [5,6], finisher # artist
else
callback objects # empty
class Sequence
constructor: (@items, @each_callback) ->
@n = 0
next: ->
if @n<@items.length
@each_callback @items[@n++]
else
@done_callback @items
go: (@done_callback) -> @next()
db.count_otypes = (ids, otype, callback) ->
c.objects.find( {_id: {'$in': ids}, otype: otype} ).count (err, howmany) ->
throw err if err
callback howmany
db.load_ownerX = (username, callback) ->
if username
c.objects.findOne { otype: 1, owner: username }, {}, (err, owner) ->
if err then throw err
callback owner
else # none specified, load 'em all
c.objects.find( { otype: 1 } ).sort( { owner: 1 } ).toArray (err, owners) ->
if err then throw err
seq = new Sequence owners, (owner) ->
c.connections.find({ parent: owner._id, ctype: 1 }).toArray (err, connections) =>
ids = for connection in connections then connection.child # don't use 'c'!
###
# use map/reduce here FIXME
tally = {}
db.count_otypes ids, 5, (count) =>
tally.dirs = count
db.count_otypes ids, 10, (count) =>
tally.songs = count
db.count_otypes ids, 20, (count) =>
tally.albums = count
db.count_otypes ids, 30, (count) =>
tally.artists = count
db.count_otypes ids, 40, (count) =>
tally.fileunders = count
db.count_otypes ids, 50, (count) =>
tally.lists = count
_.extend owner, tally
###
mapFunc = -> emit this.otype, 1
reduceFunc = (key, values) ->
count = 0
count += values[i] for i of values
return count
c.objects.mapReduce mapFunc, reduceFunc, { query: {_id: {'$in': ids}}, out: { inline: 1 }}, (err, results) =>
for result in results
name = false
switch String(result._id)
when '5' then name = 'dirs'
when '10' then name = 'songs'
when '20' then name = 'albums'
when '30' then name = 'artists'
if name
owner[name] = result.value
c.connections.find( { parent: owner._id, ctype: 12 } ).count (err, howmany) =>
throw err if err
owner.stars = howmany
@next()
seq.go ->
callback owners
db.load_owner_list = (callback) ->
c.objects.find( { otype: 1 } ).sort( { owner: 1 } ).toArray (err, owners) ->
throw err if err
callback owners
# still slow, let's try again
db.load_owner = (username, callback) ->
if username
q = { otype: 1, owner: username }
else
q = { otype: 1}
#c.objects.findOne q, {}, (err, owner) -> #oops
c.objects.find(q, {}).toArray (err, owner) ->
if err then throw err
callback owner
db.load_users = (callback) ->
# load list of owners
elapsed = new otto.misc.Elapsed()
db.load_owner_list (owners) ->
console.log "#{elapsed.seconds()} owners loaded"
console.log 'owners.length', owners.length
c.connections.find { ctype: 1 }, (err, cursor) =>
throw err if err
console.log "#{elapsed.seconds()} connections loaded"
#console.log 'connections.length', connections.length
owner_lookup = {}
owner_stats = {}
cursor.each (err, connection) ->
throw err if err
if connection
owner_lookup[connection.child] = connection.parent
else
console.log "#{elapsed.seconds()} owner_lookup built"
c.objects.find {}, {_id: 1, otype: 1, length: 1}, (err, cursor) ->
#c.objects.find({}, {_id: 1, otype: 1, length: 1}).toArray (err, objects) ->
throw err if err
console.log "#{elapsed.seconds()} objects loaded"
#console.log 'objects.length', objects.length
cursor.each (err, object) ->
if object
owner_id = owner_lookup[object._id]
if owner_id
if not owner_stats[owner_id]
owner_stats[owner_id] = {}
name = false
switch object.otype
when 5 then name = 'dirs'
when 10 then name = 'songs'
when 20 then name = 'albums'
when 30 then name = 'artists'
if name
owner_stats[owner_id][name] = 0 if not owner_stats[owner_id][name]
owner_stats[owner_id][name] += 1
if object.otype is 10 and object['length']
owner_stats[owner_id].seconds = 0 if not owner_stats[owner_id].seconds
owner_stats[owner_id].seconds += object['length']
else
for owner in owners
if owner_stats[owner._id]
_.extend owner, owner_stats[owner._id]
console.log "#{elapsed.seconds()} stats totaled"
seq = new Sequence owners, (owner) ->
c.connections.find( { parent: owner._id, ctype: 12, rank: {'$gt': 0} } ).count (err, howmany) =>
throw err if err
owner.stars = howmany
@next()
seq.go ->
console.log "#{elapsed.seconds()} stars totaled. done."
callback owners
db.find_or_create_owner = (username, callback) ->
db.load_owner username, (owner) ->
if owner[0]
callback owner[0]
else
c.objects.save { otype: 1, owner: username }, (err, newowner) ->
if err then throw err
callback newowner
db.load_list = (listname, callback) ->
if listname
c.objects.findOne { otype: 50, listname: listname }, {}, (err, list) ->
if err then throw err
callback list
else # non specified, load 'em all
c.objects.find( { otype: 1 } ).sort( { owner: 1 } ).toArray (err, lists) ->
if err then throw err
callback lists
db.find_or_create_list = (listname, callback) ->
db.load_list listname, (list) ->
if list
callback list
else
c.objects.save { otype: 50, listname: listname }, (error, newlist) ->
if err then throw err
callback newlist
db.load_all_lists = (loadobjectstoo, callback) ->
_build_results = (alllistitems, objects) ->
lists = {}
for item in alllistitems
if not lists[item._id]?
lists[item._id] = []
if objects
for object in objects
if item.child is object._id
lists[item._id].push object
break
else
lists[item._id].push item.child
return lists
c.connections.find( { ctype: 12 } ).sort( { parent: 1, rank: 1 } ).toArray (err, alllistitems) ->
if err then throw err
if loadobjectstoo
loadids = []
for item in alllistitems
loadids.push item.child
load_object loadids, no, (objects) ->
callback _build_results(alllistitems, objects)
else
callback _build_results(alllistitems)
db.load_stars = (username, loadobjectstoo, callback) ->
console.log 'load_stars', username
db.load_owner username, (owners) ->
if not owners
return callback({})
ownerids = []
if owners not instanceof Array
owners = [owners]
for owner in owners
ownerids.push owner._id
#console.log 'ownerids', ownerids
q = ctype: 12, parent: { $in: ownerids }
c.connections.find( q ).sort( { ownerids: 1, rank: -1 } ).toArray (err, allstarreditems) ->
#console.log 'allstarreditems', allstarreditems
if err then throw err
if loadobjectstoo
loadids = []
for item in allstarreditems
loadids.push item.child
db.load_object loadids, no, (objects) ->
callback db._load_stars_build_results(owners, allstarreditems, objects)
else
callback db._load_stars_build_results(owners, allstarreditems)
db._load_stars_build_results = (owners, allstarreditems, objects) ->
#console.log 'owners', owners
#console.log 'allstarreditems', allstarreditems
#console.log 'objects', objects
stars = {}
for owner in owners
stars[owner.owner] = []
for item in allstarreditems
if owner._id.equals(item.parent)
if objects
for object in objects
if object._id.equals(item.child)
_.extend item, object
break
stars[owner.owner].push item
return stars
db.add_to_user_list = (username, _id, rank, callback) ->
db.find_or_create_owner username, (owner) ->
db.load_object _id, no, (object) ->
if object
c.connections.findOne { ctype: 12, parent: owner._id, child: object._id }, (err, connection) ->
if err then throw err
if connection
connection.rank = rank
c.connections.update {'_id': connection._id}, connection, (err) ->
if err then throw err
callback(true)
else
db.add_to_connections owner._id, object._id, 12, rank, ->
callback(true)
else
callback(false)
db.remove_from_user_list = (username, oid, callback) ->
db.load_owner username, (owner) ->
if owner
db.remove_from_connections owner[0]._id, oid, 12, ->
callback true
else
callback false
# too slow (mainly because of toArray it seems) (<- really?)
db.Xget_all_song_ids = (callback) ->
c.objects.find( { otype: 10 }, { '_id': 1 } ).toArray (err, results) ->
ids = []
for row in results
ids.push row._id
callback ids
# too slow, see above
db.Xget_random_songs = (howmany=1, callback) ->
console.log 'get_random_songs', howmany
db.get_all_song_ids (ids) ->
console.log 'got all song ids'
picked_ids = []
while howmany--
console.log 'pick', howmany
picked = Math.floor Math.random() * ids.length
picked_ids.push ids[picked]
console.log 'loading song objects', picked_ids
db.load_object oids=picked_ids, no, (picked_songs) ->
console.log 'loaded.'
# we now shuffle the order of the returned songs since mongodb will
# not return them in the order we asked (which was random), but in
# 'natural' order. thus there will be a bias in the order of the random
# picks being in the order in which they were loaded into the database
shuffle = []
console.log 'shuffling picks'
while picked_songs.length
console.log 'shuffle', picked_songs.length
n = Math.floor Math.random() * picked_songs.length
shuffle.push picked_songs[n]
picked_songs.splice(n, 1)
console.log 'done picking random songs'
callback shuffle
db.count_all_song_ids = (callback) ->
c.objects.find({ otype: 10 }, { '_id': 1 }).count (err, count) ->
callback count
db.get_song_id_n = (n, callback) ->
c.objects.find({ otype: 10 }, { '_id': 1 }).skip(n).limit(1).toArray (err, song) ->
callback song[0]._id
db.Xget_random_songs = (howmany=1, callback) ->
console.log 'get_random_songs', howmany
elapsed = new otto.misc.Elapsed()
song_ids = []
c.objects.find({ otype: 10 }, {_id: 1}).each (err, song) ->
if song
song_ids.push song._id
else
console.log "#{elapsed} all song_ids"
count = song_ids.length
console.log 'song ids count', count
return [] if not count
picked_ids = []
howmany = if howmany < count then howmany else count
while howmany
picked = Math.floor Math.random() * count
if song_ids[picked] not in picked_ids
picked_ids.push(song_ids[picked])
howmany--
console.log "#{elapsed} picked 'em"
db.load_object oids=picked_ids, no, (picked_songs) ->
console.log "#{elapsed} song objects loaded"
db.attach_parents picked_songs, { otype: 1 }, ->
console.log "#{elapsed} parents attached"
# we now shuffle the order of the returned songs since mongodb will
# not return them in the order we asked (which was random), but in
# 'natural' order. thus there will be a bias in the order of the random
# picks being in the order in which they were loaded into the database
shuffle = []
console.log 'shuffling picks'
while picked_songs.length
n = Math.floor Math.random() * picked_songs.length
shuffle.push picked_songs[n]
picked_songs.splice(n, 1)
console.log "#{elapsed} shuffeled results. done."
callback shuffle
db.get_random_songs = (howmany=1, callback) ->
console.log 'get_random_songs', howmany
elapsed = new otto.misc.Elapsed()
song_ids = []
# pick a slate of random songs, skipping anything over 15mins long
c.objects.find({ otype: 10, length: {$lt: 900} }, { _id: 1 }).count (err, count) ->
console.log "#{elapsed} count songs: #{count}"
randomWhere = "(Math.random() > #{(count-howmany)/count})"
c.objects.find( {otype: 10, length: {$lt: 900}, $where: randomWhere} ).toArray (err, picked_songs) ->
throw err if err
console.log "#{elapsed} randomWhere done, #{picked_songs.length} picked_songs"
db.attach_parents picked_songs, { otype: 1 }, ->
console.log "#{elapsed} parents attached"
# shuffle the order of the returned songs since mongodb will
# return them in 'natural' order. thus there will be a bias in the order of the random
# picks being in the order in which they were loaded into the database
shuffle = []
while picked_songs.length
n = Math.floor Math.random() * picked_songs.length
shuffle.push picked_songs[n]
picked_songs.splice(n, 1)
callback shuffle
db.get_random_starred_songs = (howmany=1, username, callback) ->
#console.log 'get_random_starred_songs', howmany, 'for', username
db.load_stars username, true, (stars) ->
objects = stars[username]
if not objects
return callback []
#console.log objects.length, 'objects'
songs = []
for object in objects
if not object.rank > 0 then continue
if object.otype is 10
songs.push object
# expand albums into songs (still need to handle artists also)
async_count = 0;
for object in objects
if not object.rank > 0 then continue
if object.otype is 20
async_count += 1
db.load_subobjects object, 10, no, [5,6], (object) ->
async_count -= 1
if object.songs?
for song in object.songs
songs.push song
if async_count is 0
callback db.pick_random_songs_from_array howmany, songs
if object.otype is 40 # not actually working yet
async_count += 1
db.load_subobjects object, 10, no, [5,6], (objects) ->
console.log '^^^^^^^^^^^^^^ otype 40 objects', objects
async_count -= 1
if object.songs?
for song in object.songs
songs.push song
if async_count is 0
callback db.pick_random_songs_from_array howmany, songs
if async_count is 0
callback db.pick_random_songs_from_array howmany, songs
db.pick_random_songs_from_array = (howmany, songs) ->
#console.log 'picking random', howmany, 'songs from', songs.length, 'total songs'
if howmany > songs.length then howmany = songs.length
picked = []
attempts = 0
while picked.length < howmany and attempts < songs.length
attempts++
#console.log 'picking'
candidate = songs[Math.floor Math.random() * songs.length]
alreadypicked = false
for pick in picked
if candidate.id is pick.id
alreadypicked = true
break
if alreadypicked
#console.log 'already'
continue
#console.log 'picked'
picked.push candidate
#console.log 'done. got', picked.length
return picked
db.get_newest_albums = (callback) ->
c.objects.find( {otype : 20} ).sort( {_id:-1} ).limit(1000).toArray (err, albums) ->
throw err if err
for album in albums
album.timestamp = Number(album._id.getTimestamp())
db.attach_parents albums, { otype: [1, 30] }, ->
callback albums
db.get_album = (albumname, callback) ->
#console.log 'get_album for', albumname
c.objects.findOne {otype: 20, album: albumname}, (err, album)->
if err then throw new Error "error: db.get_album - #{err}"
if album
db.load_subobjects album, 10, no, [5,6], ->
callback album
else
callback album
db.search = (value, callback) ->
# (fileunders, albums, songs) = db.search2([[40, value, ['key', 'name', 'artist'], None],
# [20, value, 'album', None],
# [10, value, 'song', None]])
# #other = db.search(10, value, None,
# # ['name', 'artist', 'album', 'song', 'filename', 'title', '.nam', '.ART', '.alb'])
# #results = {'fileunders': fileunders, 'albums': albums, 'songs': songs, 'other': other}
# results = {'fileunders': fileunders, 'albums': albums, 'songs': songs}
# self.finish(json.dumps(results))
regexp = RegExp("\\b#{value}", 'i')
c.objects.find({otype: 40, name: regexp}).sort({name: 1}).toArray (err, fileunders)->
if err then throw new Error "error: db.search - #{err}"
db.load_subobjects fileunders, 20, no, [5,6], ->
c.objects.find({otype: 20, album: regexp}).sort({album: 1}).toArray (err, albums)->
if err then throw new Error "error: db.search - #{err}"
# err! this doesn't take a list of objects (yet)
db.load_subobjects albums, 30, yes, [5,6], ->
c.objects.find({otype: 10, song: regexp}).sort({song: 1}).toArray (err, songs)->
if err then throw new Error "error: db.search - #{err}"
for song in songs
song.oid = song._id # for backwards compatability
c.objects.find({otype: 10, "tags.©wrt": regexp}).sort({"tags.©wrt": 1}).toArray (err, songcomposers)->
if err then throw new Error "error: db.search - #{err}"
c.objects.find({otype: 10, "tags.TCOM": regexp}).sort({"tags.TCOM": 1}).toArray (err, songcomposers2)->
if err then throw new Error "error: db.search - #{err}"
songcomposers = songcomposers.concat songcomposers2
for song in songcomposers
song.oid = song._id # for backwards compatability
callback null, fileunders: fileunders, albums: albums, songs: songs, songcomposers: songcomposers
db.load_fileunder = (artistid, callback) ->
#console.log 'load_fileunder', artistid
db.load_object artistid, load_parents=40, (artist) ->
callback artist.fileunder
return db
<|start_filename|>static/css/ouroboros.css<|end_filename|>
/*
from <NAME>
http://atomeye.com/sass-css-spinner.html
https://github.com/tomgenoni/ouroboros
*/
@-webkit-keyframes ui-spinner-rotate-cw-right {
0% {
-webkit-transform: rotate(0deg);
}
25% {
-webkit-transform: rotate(180deg);
}
50% {
-webkit-transform: rotate(180deg);
}
75% {
-webkit-transform: rotate(360deg);
}
100% {
-webkit-transform: rotate(360deg);
}
}
@-webkit-keyframes ui-spinner-rotate-cw-left {
0% {
-webkit-transform: rotate(0deg);
}
25% {
-webkit-transform: rotate(0deg);
}
50% {
-webkit-transform: rotate(180deg);
}
75% {
-webkit-transform: rotate(180deg);
}
100% {
-webkit-transform: rotate(360deg);
}
}
@-moz-keyframes ui-spinner-rotate-cw-right {
0% {
-moz-transform: rotate(0deg);
}
25% {
-moz-transform: rotate(180deg);
}
50% {
-moz-transform: rotate(180deg);
}
75% {
-moz-transform: rotate(360deg);
}
100% {
-moz-transform: rotate(360deg);
}
}
@-moz-keyframes ui-spinner-rotate-cw-left {
0% {
-moz-transform: rotate(0deg);
}
25% {
-moz-transform: rotate(0deg);
}
50% {
-moz-transform: rotate(180deg);
}
75% {
-moz-transform: rotate(180deg);
}
100% {
-moz-transform: rotate(360deg);
}
}
@keyframes ui-spinner-rotate-cw-right {
0% {
transform: rotate(0deg);
}
25% {
transform: rotate(180deg);
}
50% {
transform: rotate(180deg);
}
75% {
transform: rotate(360deg);
}
100% {
transform: rotate(360deg);
}
}
@keyframes ui-spinner-rotate-cw-left {
0% {
transform: rotate(0deg);
}
25% {
transform: rotate(0deg);
}
50% {
transform: rotate(180deg);
}
75% {
transform: rotate(180deg);
}
100% {
transform: rotate(360deg);
}
}
/******************************************/
@-webkit-keyframes ui-spinner-rotate-ccw-right {
0% {
-webkit-transform: rotate(0deg);
}
25% {
-webkit-transform: rotate(-180deg);
}
50% {
-webkit-transform: rotate(-180deg);
}
75% {
-webkit-transform: rotate(-360deg);
}
100% {
-webkit-transform: rotate(-360deg);
}
}
@-webkit-keyframes ui-spinner-rotate-ccw-left {
0% {
-webkit-transform: rotate(0deg);
}
25% {
-webkit-transform: rotate(0deg);
}
50% {
-webkit-transform: rotate(-180deg);
}
75% {
-webkit-transform: rotate(-180deg);
}
100% {
-webkit-transform: rotate(-360deg);
}
}
@-moz-keyframes ui-spinner-rotate-ccw-right {
0% {
-moz-transform: rotate(0deg);
}
25% {
-moz-transform: rotate(-180deg);
}
50% {
-moz-transform: rotate(-180deg);
}
75% {
-moz-transform: rotate(-360deg);
}
100% {
-moz-transform: rotate(-360deg);
}
}
@-moz-keyframes ui-spinner-rotate-ccw-left {
0% {
-moz-transform: rotate(0deg);
}
25% {
-moz-transform: rotate(0deg);
}
50% {
-moz-transform: rotate(-180deg);
}
75% {
-moz-transform: rotate(-180deg);
}
100% {
-moz-transform: rotate(-360deg);
}
}
@keyframes ui-spinner-rotate-ccw-right {
0% {
transform: rotate(0deg);
}
25% {
transform: rotate(-180deg);
}
50% {
transform: rotate(-180deg);
}
75% {
transform: rotate(-360deg);
}
100% {
transform: rotate(-360deg);
}
}
@keyframes ui-spinner-rotate-ccw-left {
0% {
transform: rotate(0deg);
}
25% {
transform: rotate(0deg);
}
50% {
transform: rotate(-180deg);
}
75% {
transform: rotate(-180deg);
}
100% {
transform: rotate(-360deg);
}
}
.ouroboros .ui-spinner {
display: inline-block;
position: relative;
border-radius: 100%;
}
.ouroboros .ui-spinner .side {
width: 50%;
height: 100%;
overflow: hidden;
position: absolute;
}
.ouroboros .ui-spinner .side .fill {
border-radius: 999px;
position: absolute;
width: 100%;
height: 100%;
-webkit-animation-iteration-count: infinite;
-moz-animation-iteration-count: infinite;
-ms-animation-iteration-count: infinite;
-o-animation-iteration-count: infinite;
animation-iteration-count: infinite;
-webkit-animation-timing-function: linear;
-moz-animation-timing-function: linear;
-ms-animation-timing-function: linear;
-o-animation-timing-function: linear;
animation-timing-function: linear;
}
.ouroboros .ui-spinner .side.left {
left: 0;
}
.ouroboros .ui-spinner .side.left .fill {
left: 100%;
border-top-left-radius: 0;
border-bottom-left-radius: 0;
-webkit-transform-origin: 0 50%;
-moz-transform-origin: 0 50%;
-ms-transform-origin: 0 50%;
-o-transform-origin: 0 50%;
transform-origin: 0 50%;
}
.ouroboros .ui-spinner.cw .side.left .fill {
-webkit-animation-name: ui-spinner-rotate-cw-left;
-moz-animation-name: ui-spinner-rotate-cw-left;
-ms-animation-name: ui-spinner-rotate-cw-left;
-o-animation-name: ui-spinner-rotate-cw-left;
animation-name: ui-spinner-rotate-cw-left;
}
.ouroboros .ui-spinner.ccw .side.left .fill {
-webkit-animation-name: ui-spinner-rotate-ccw-left;
-moz-animation-name: ui-spinner-rotate-ccw-left;
-ms-animation-name: ui-spinner-rotate-ccw-left;
-o-animation-name: ui-spinner-rotate-ccw-left;
animation-name: ui-spinner-rotate-ccw-left;
-webkit-transform-origin: 0 50%;
}
.ouroboros .ui-spinner .side.right {
left: 50%;
}
.ouroboros .ui-spinner .side.right .fill {
left: -100%;
border-top-right-radius: 0;
border-bottom-right-radius: 0;
-webkit-transform-origin: 100% 50%;
-moz-transform-origin: 100% 50%;
-ms-transform-origin: 100% 50%;
-o-transform-origin: 100% 50%;
transform-origin: 100% 50%;
}
.ouroboros .ui-spinner.cw .side.right .fill {
-webkit-animation-name: ui-spinner-rotate-cw-right;
-moz-animation-name: ui-spinner-rotate-cw-right;
-ms-animation-name: ui-spinner-rotate-cw-right;
-o-animation-name: ui-spinner-rotate-cw-right;
animation-name: ui-spinner-rotate-cw-right;
}
.ouroboros .ui-spinner.ccw .side.right .fill {
-webkit-animation-name: ui-spinner-rotate-ccw-right;
-moz-animation-name: ui-spinner-rotate-ccw-right;
-ms-animation-name: ui-spinner-rotate-ccw-right;
-o-animation-name: ui-spinner-rotate-ccw-right;
animation-name: ui-spinner-rotate-ccw-right;
}
/* speed */
.ouroboros .ui-spinner.slow .side .fill {
-webkit-animation-duration: 5s;
-moz-animation-duration: 5s;
-ms-animation-duration: 5s;
-o-animation-duration: 5s;
animation-duration: 5s;
}
.ouroboros .ui-spinner.normal .side .fill {
-webkit-animation-duration: 2s;
-moz-animation-duration: 2s;
-ms-animation-duration: 2s;
-o-animation-duration: 2s;
animation-duration: 2s;
}
.ouroboros .ui-spinner.fast .side .fill {
-webkit-animation-duration: 1.5s;
-moz-animation-duration: 1.5s;
-ms-animation-duration: 1.5s;
-o-animation-duration: 1.5s;
animation-duration: 1.5s;
}
/* :after is the hole in the middle */
/* and also the reason we can't just
make the background transparent */
.ouroboros .ui-spinner:after {
content: "";
position: absolute;
border-radius: 50%;
display: block;
}
.ouroboros .ui-spinner .side .fill {
/*opacity: 0.8;*/
opacity: 1;
}
/* fill color */
.ouroboros .ui-spinner.gray .side .fill {
background: gray;
}
.ouroboros .ui-spinner.blue .side .fill {
background: blue;
}
/* background */
.ouroboros .ui-spinner.black {
background: black;
}
.ouroboros .ui-spinner.black:after { /* hole in the middle */
background: black;
}
.ouroboros .ui-spinner.dark {
background: #333;
}
.ouroboros .ui-spinner.dark:after { /* hole in the middle */
background: #333;
}
/* size */
.ouroboros .ui-spinner.small {
//width: 20px;
//height: 20px;
width: 18px;
height: 18px;
}
.ouroboros .ui-spinner.small:after {
//width: 12px;
//height: 12px;
//top: 4px;
//left: 4px;
width: 10px;
height: 10px;
top: 4px;
left: 4px;
}
.ouroboros .ui-spinner.medium {
width: 26px;
height: 26px;
}
.ouroboros .ui-spinner.medium:after {
width: 16px;
height: 16px;
top: 5px;
left: 5px;
}
.ouroboros .ui-spinner.large {
width: 100px;
height: 100px;
}
.ouroboros .ui-spinner.large:after {
width: 60px;
height: 60px;
top: 20px;
left: 20px;
}
<|start_filename|>static/css/mmenu.css<|end_filename|>
/*
Animations
*/
html.mmenu-opened .mmenu-page,
html.mmenu-opened #mmenu-blocker,
.mmenu.mmenu-horizontal ul
{
-webkit-transition-duration: 0.5s;
-moz-transition-duration: 0.5s;
transition-duration: 0.5s;
-webkit-transition-timing-function: ease;
-moz-transition-timing-function: ease;
transition-timing-function: ease;
}
html.mmenu-opened .mmenu-page,
html.mmenu-opened #mmenu-blocker
{
-webkit-transition-property: top, right, bottom, left, margin;
-moz-transition-property: top, right, bottom, left, margin;
transition-property: top, right, bottom, left, margin;
}
.mmenu.mmenu-horizontal ul
{
-webkit-transition-property: margin-left;
-moz-transition-property: margin-left;
transition-property: margin-left;
}
html.mmenu-accelerated.mmenu-opening .mmenu-page,
html.mmenu-accelerated.mmenu-opening #mmenu-blocker,
html.mmenu-accelerated.mmenu-opening .mmenu.mmenu-horizontal ul
{
-webkit-transform: translate3d(0,0,0);
-moz-transform: translate3d(0,0,0);
transform: translate3d(0,0,0);
}
/* top */
html.mmenu-top.mmenu-opened .mmenu-page,
html.mmenu-top.mmenu-opened #mmenu-blocker
{
margin-top: 0px;
top: 0%;
left: 0;
}
html.mmenu-top.mmenu-opening .mmenu-page,
html.mmenu-top.mmenu-opening #mmenu-blocker
{
margin-top: -65px;
top: 100%;
}
/* right */
html.mmenu-right.mmenu-opened .mmenu-page,
html.mmenu-right.mmenu-opened #mmenu-blocker
{
margin-right: 0px;
right: 0%;
top: 0;
}
html.mmenu-right.mmenu-opening .mmenu-page,
html.mmenu-right.mmenu-opening #mmenu-blocker
{
margin-right: -65px;
right: 100%;
}
/* bottom */
html.mmenu-bottom.mmenu-opened .mmenu-page,
html.mmenu-bottom.mmenu-opened #mmenu-blocker
{
margin-bottom: 0px;
bottom: 0%;
left: 0;
}
html.mmenu-bottom.mmenu-opening .mmenu-page,
html.mmenu-bottom.mmenu-opening #mmenu-blocker
{
margin-bottom: -65px;
bottom: 100%;
}
/* left */
html.mmenu-left.mmenu-opened .mmenu-page,
html.mmenu-left.mmenu-opened #mmenu-blocker
{
margin-left: 0px;
left: 0%;
top: 0;
}
html.mmenu-left.mmenu-opening .mmenu-page,
html.mmenu-left.mmenu-opening #mmenu-blocker
{
margin-left: -65px;
left: 100%;
}
/*
Menu, submenus, items
- Sizing and positioning
*/
html.mmenu-opened .mmenu-page,
.mmenu.mmenu-horizontal ul,
.mmenu div.mmenu-search,
.mmenu div.mmenu-search input
{
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
}
/* html/body */
html.mmenu-opened,
html.mmenu-opened body
{
height: 100%;
width: 100%;
overflow: hidden;
}
html.mmenu-opened body
{
position: relative;
}
/* menu */
.mmenu
{
display: none;
width: 100%;
height: 100%;
position: absolute;
top: 0;
left: 0;
z-index: 0;
overflow: scroll;
overflow-x: hidden;
overflow-y: auto;
-webkit-overflow-scrolling: touch;
}
.mmenu:after {
content: '';
display: block;
clear: both;
}
.mmenu.mmenu-opened
{
display: block;
}
.mmenu.mmenu-opened + .mmenu.mmenu-opened,
.mmenu.mmenu-opened ~ .mmenu.mmenu-opened
{
display: none;
}
/* ul/li */
.mmenu ul,
.mmenu li
{
list-style: none;
}
.mmenu li
{
position: relative;
}
.mmenu ul,
.mmenu li,
.mmenu li > a,
.mmenu li > span
{
display: block;
padding: 0;
margin: 0;
}
.mmenu > ul
{
background-color: inherit;
padding: 10px 0 40px 0;
}
.mmenu.mmenu-top > ul
{
padding-bottom: 85px;
}
.mmenu.mmenu-right > ul
{
padding-left: 65px;
}
.mmenu.mmenu-bottom > ul
{
padding-top: 75px;
}
.mmenu.mmenu-left > ul
{
padding-right: 65px;
}
/* items */
.mmenu li > a,
.mmenu li > span
{
text-indent: 20px;
text-overflow: ellipsis;
white-space: nowrap;
line-height: 40px;
overflow: hidden;
padding-right: 5px;
}
/* subopen/close */
.mmenu a.mmenu-subopen
{
border-left-width: 1px;
border-left-style: solid;
width: 40px;
height: 40px;
padding: 0;
position: absolute;
right: 0;
top: 0;
z-index: 2;
}
.mmenu a.mmenu-subopen.mmenu-fullsubopen
{
border-left: none;
width: 100%;
}
.mmenu a.mmenu-subclose
{
background: rgba( 0, 0, 0, 0.2 );
text-indent: 40px;
padding-top: 10px;
margin-top: -10px;
}
.mmenu a.mmenu-subopen + a,
.mmenu a.mmenu-subopen + span
{
padding-right: 45px;
}
/* page + blocker */
html.mmenu-opened .mmenu-page
{
background: inherit;
height: 100%;
overflow: hidden;
position: absolute;
}
#mmenu-blocker
{
background: black;
opacity: 0.3;
display: none;
width: 100%;
height: 100%;
position: absolute;
z-index: 9999;
}
html.mmenu-opened #mmenu-blocker,
html.mmenu-blocking #mmenu-blocker
{
display: block;
}
/* vertical submenu */
.mmenu.mmenu-vertical ul ul
{
display: none;
padding: 10px 0 10px 10px;
}
.mmenu.mmenu-vertical li.mmenu-opened > ul
{
display: block;
}
.mmenu.mmenu-vertical ul ul li:last-child
{
border-bottom-width: 0;
}
.mmenu.mmenu-vertical li.mmenu-opened.mmenu-selected > a.mmenu-subopen + a,
.mmenu.mmenu-vertical li.mmenu-opened.mmenu-selected > a.mmenu-subopen + span
{
padding-right: 5px;
margin-right: 40px;
}
.mmenu.mmenu-vertical li.mmenu-opened.mmenu-selected > em.mmenu-counter + a.mmenu-subopen + a,
.mmenu.mmenu-vertical li.mmenu-opened.mmenu-selected > em.mmenu-counter + a.mmenu-subopen + span
{
margin-right: 75px;
}
/* horizontal submenu */
.mmenu.mmenu-horizontal
{
width: 500%;
}
.mmenu.mmenu-horizontal ul
{
width: 20%;
margin-left: 0%;
float: left;
}
.mmenu.mmenu-horizontal ul.mmenu-submenu
{
display: none;
}
.mmenu.mmenu-horizontal ul.mmenu-submenu.mmenu-opened
{
display: block;
}
.mmenu.mmenu-horizontal ul.mmenu-subopening
{
margin-left: -20%;
height: 100%;
overflow: hidden;
}
/*
Menu, submenus, items
- Styling (default: dark background)
*/
.mmenu
{
background: #333;
}
.mmenu,
.mmenu *
{
-webkit-text-size-adjust: none;
font-family: Arial, Helvetica, sans-serif;
//font-size: 14px;
}
.mmenu *
{
text-shadow: 0 1px 1px rgba( 0, 0, 0, 0.4 );
}
.mmenu li
{
/*
border-top: 1px solid rgba( 255, 255, 255, 0.1 );
border-bottom: 1px solid rgba( 0, 0, 0, 0.3 );
*/
border-bottom: 2px solid black;
}
.mmenu li:first-child
{
border-top-width: 0;
}
.mmenu li,
.mmenu li > a,
.mmenu li > a:hover,
.mmenu li > span
{
/*color: rgba( 255, 255, 255, 0.6 );*/
color: #6C6C6C;
font-size: 20px;
text-decoration: none;
}
.mmenu li > span:hover {
color: #999;
}
.mmenu li.mmenu-selected > a,
.mmenu li.mmenu-selected > span
{
background: rgba( 0, 0, 0, 0.2 );
}
.mmenu li.mmenu-selected > a.mmenu-subopen
{
background: transparent;
}
/* subopen/close */
.mmenu a.mmenu-subopen
{
border-left-color: rgba( 0, 0, 0, 0.3 );
}
.mmenu a.mmenu-subclose
{
background: rgba( 0, 0, 0, 0.2 );
}
/* vertical submenu */
.mmenu.mmenu-vertical li.mmenu-opened > a.mmenu-subopen,
.mmenu.mmenu-vertical li.mmenu-opened > ul
{
background: rgba( 255, 255, 255, 0.05 );
}
/* page + blocker */
html.mmenu-opened .mmenu-page
{
box-shadow: 0 0 20px rgba( 0, 0, 0, 0.8 );
}
/*
Labels
- Sizing and positioning
*/
.mmenu li.mmenu-label
{
text-transform: uppercase;
text-indent: 20px;
line-height: 20px;
}
/*
Labels
- Styling
*/
.mmenu li.mmenu-label
{
background: rgba( 255, 255, 255, 0.1 );
font-size: 11px;
color: rgba( 255, 255, 255, 0.5 );
}
/*
Counters
- Sizing and positioning
*/
.mmenu em.mmenu-counter
{
text-indent: 0;
text-align: center;
text-shadow: none;
line-height: 20px;
display: block;
min-width: 20px;
height: 20px;
padding: 0;
position: absolute;
right: 40px;
top: 10px;
}
.mmenu em.mmenu-counter + a.mmenu-subopen
{
padding-left: 35px;
}
.mmenu em.mmenu-counter + a.mmenu-subopen + a,
.mmenu em.mmenu-counter + a.mmenu-subopen + span
{
padding-right: 80px;
}
.mmenu em.mmenu-counter + a.mmenu-fullsubopen + a,
.mmenu em.mmenu-counter + a.mmenu-fullsubopen + span
{
padding-right: 70px;
}
/*
Counters
- Styling
*/
.mmenu em.mmenu-counter
{
border-radius: 5px;
background: rgba( 255, 255, 255, 0.1 );
box-shadow: 0 0 2px rgba( 0, 0, 0, 0.3 );
font-size: 11px;
font-weight: bold;
font-style: normal;
color: rgba( 255, 255, 255, 0.4 );
}
/*
Arrows
- Sizing and positioning
*/
.mmenu a.mmenu-subopen:after,
.mmenu a.mmenu-subclose:before
{
content: '';
border-width: 4px;
border-style: solid;
display: block;
width: 6px;
height: 6px;
position: absolute;
-webkit-transform: rotate( -45deg );
transform: rotate( -45deg );
}
.mmenu a.mmenu-subopen:after
{
border-top: none;
border-left: none;
right: 16px;
top: 16px;
}
.mmenu a.mmenu-subclose:before
{
border-right: none;
border-bottom: none;
left: 20px;
top: 25px;
}
.mmenu.mmenu-vertical li.mmenu-opened > a.mmenu-subopen:after
{
-webkit-transform: rotate( 45deg );
transform: rotate( 45deg );
}
/*
Arrows
- Styling
*/
.mmenu a.mmenu-subopen:after,
.mmenu a.mmenu-subclose:before
{
border-color: rgba( 255, 255, 255, 0.3 );
}
/*
Search
- Sizing and positioning
*/
.mmenu div.mmenu-search
{
width: 20%;
height: 50px;
padding: 10px;
position: absolute;
top: 0;
z-index: 1;
}
.mmenu.mmenu-vertical div.mmenu-search
{
width: 100%;
}
.mmenu.mmenu-right div.mmenu-search
{
padding-left: 75px;
}
.mmenu.mmenu-bottom div.mmenu-search
{
top: 65px;
}
.mmenu.mmenu-left div.mmenu-search
{
padding-right: 75px;
}
.mmenu div.mmenu-search input
{
border: none;
border-radius: 15px;
line-height: 30px;
outline: none;
display: block;
width: 100%;
height: 30px;
margin: 0;
padding: 0 0 0 10px;
}
.mmenu li.mmenu-noresults
{
border: none;
display: none;
padding-top: 30px;
}
.mmenu.mmenu-noresults li.mmenu-noresults
{
display: block;
}
.mmenu div.mmenu-search ~ ul
{
margin-top: 50px;
}
.mmenu .mmenu-noresult,
.mmenu .mmenu-nosubresult > a.mmenu-subopen,
.mmenu .mmenu-nosubresult > em.mmenu-counter
{
display: none;
}
.mmenu .mmenu-nosubresult > a.mmenu-subopen + a,
.mmenu .mmenu-nosubresult > a.mmenu-subopen + span
{
padding-right: 5px;
}
/*
Search
- Styling
*/
.mmenu div.mmenu-search
{
background: rgba( 0, 0, 0, 0.2 );
border-bottom: 1px solid rgba( 255, 255, 255, 0.1 );
}
.mmenu div.mmenu-search input
{
background: rgba( 255, 255, 255, 0.3 );
color: rgba( 255, 255, 255, 0.9 );
}
.mmenu li.mmenu-noresults
{
color: rgba( 255, 255, 255, 0.4 );
text-align: center;
}
/*
Bugfix for browsers without support for overflowscrolling
- Android < 3
*/
html.mmenu-no-overflowscrolling.mmenu-opened,
html.mmenu-no-overflowscrolling.mmenu-opened body
{
overflow: visible;
}
html.mmenu-no-overflowscrolling.mmenu-opened body
{
overflow-x: hidden;
}
html.mmenu-no-overflowscrolling.mmenu-opened .mmenu-page
{
min-height: 200%;
position: fixed;
top: 0;
z-index: 3;
}
html.mmenu-no-overflowscrolling .mmenu
{
height: auto;
min-height: 100%;
overflow: default;
overflow-x: default;
overflow-y: default;
position: relative;
left: auto;
top: auto;
}
html.mmenu-no-overflowscrolling.mmenu-right .mmenu
{
position: absolute;
}
/*
Sizing and positioning for larger screens
*/
@media all and (min-width: 400px) {
.mmenu.mmenu-left.mmenu-vertical,
.mmenu.mmenu-right.mmenu-vertical,
.mmenu.mmenu-left div.mmenu-search,
.mmenu.mmenu-right div.mmenu-search
{
width: 400px;
}
.mmenu.mmenu-right.mmenu-vertical
{
left: auto;
right: 0;
}
.mmenu.mmenu-left.mmenu-horizontal,
.mmenu.mmenu-right.mmenu-horizontal
{
width: 2500px;
}
.mmenu.mmenu-right.mmenu-horizontal
{
left: auto;
right: -2000px;
}
html.mmenu-left.mmenu-opening .mmenu-page,
html.mmenu-left.mmenu-opening #mmenu-blocker
{
left: 400px;
}
html.mmenu-right.mmenu-opening .mmenu-page,
html.mmenu-right.mmenu-opening #mmenu-blocker
{
right: 400px;
}
}
<|start_filename|>static/css/otto.css<|end_filename|>
html, body {
height: 100%;
}
body, td {
font-family: Verdana, "Helvetica Neue", Helvetica, Arial, sans-serif;
color: #6C6C6C;
background-color: #333;
/* Improved handling of kerning pairs and ligatures in modern browsers */
/* http://aestheticallyloyal.com/public/optimize-legibility/ */
text-rendering: optimizeLegibility;
}
body.disconnected {
opacity: 0.2;
}
input .disconnected {
disabled: disabled;
}
html.mmenu-opened body {
//opacity: 0.2;
}
#mmenu-blocker
{
opacity: 0.5;
}
.startup-container {
position: absolute;
top: 40%;
bottom: 0;
left: 0;
right: 0;
width: 100%;
margin-left: 50%;
height: 100%;
}
.startup {
width: 103px;
}
.startup .ouroboros {
display: inline-block;
width: 103px;
margin-left: -50%;
padding: 0;
}
.mmenu-page {
position: absolute;
top: 0;
bottom: 0;
left: 0;
right: 0;
}
#mainpage {
min-height: 100%;
height: auto !important;
height: 100%;
margin: 0 auto -145px;
}
.footer-container, .footer-backer { /* sticky footer thanks to <NAME> http://ryanfait.com/ */
display: block;
height: 45px;
margin-top: 100px;
width: 100%;
//clear: both;
}
.cursor-hider { /* a place to park the cursor to make it go away in full screen mode */
display: block;
position: absolute;
top: 0;
bottom: 0;
left: 0;
width: 5px;
height: 100%;
z-index: 20;
}
.greeting {
visibility: hidden; /* helps with a FOUC from dynamically loading the welcome screen css */
}
@media (min-width: 1900px) {
/* i got tired of the damn cursor hiding bugs that make my cursor disappear */
/* so this is a clumsy attempt to restrict to just fullscreen mode */
.cursor-hider {
cursor: none;
}
}
ul {
margin:0;
padding:0;
border:0;
outline:0;
font-size:100%;
font:inherit;
vertical-align:baseline;
}
li {
list-style-type: none;
}
li.h {
float: left;
white-space: normal;
}
/* clear fix */
.cf:before,
.cf:after {
content: " "; /* 1 */
display: table; /* 2 */
}
.cf:after {
clear: both;
}
.albumcontainer {
margin-right: 80px;
}
/**************************************************/
.artistlist {
position: relative;
margin-left: -62px;
margin-right: 12px;
}
.artistname-container {
margin-left: 62px;
}
.artistname {
font-size: 36px;
line-height: 36px;
color: #6C6C6C;
font-weight: 100;
}
.expand:hover, .gotothere:hover {
cursor: pointer;
/*text-shadow: 2px 2px 20px white;*/
color: #999;
}
.currenttrack .gotothere:hover {
/*text-shadow: 1px 1px 5px white, 1px 1px 10px white;*/
}
.expand.active, .expand.active:hover, .expand:active, .gotothere.active, .gotothere.active:hover, .gotothere:active
{
color: #AAA;
color: #BF6F04;
text-shadow: none;
}
.albumname {
min-height: 40px;
padding: 0px;
margin-top: 8px;
display: inline-block;
font-size: 24px;
line-height: 40px;
color: #6C6C6C;
vertical-align: bottom;
font-weight: 100;
}
.albumname > .stars {
margin-top: 8px;
}
.thumbnails {
display: inline-block;
position: relative;
margin-left: 32px;
top: 4px;
}
.thumb {
display: inline-block;
vertical-align: top;
margin-top: 2px;
margin-right: 2px;
color: #6C6C6C;
background-color: #222; /* none more black */
//border: 3px solid rgba(0,0,0,0);
border: 3px solid #333;
}
.thumb:hover {
cursor: pointer;
border-color: #999;
border: 3px solid #999;
}
.thumb.active {
border-color: #AAA;
color: #BF6F04;
border: 3px solid #AAA;
border: 3px solid #BF6F04;
}
.thumb.px40 {
width: 40px;
height: 40px;
//border: 2px solid rgba(0,0,0,0);
border: 2px solid #333;
}
.thumb.px40:hover {
border: 2px solid #999;
}
.thumb.px40.active
{
border: 2px solid #AAA;
border: 2px solid #BF6F04;
}
.thumb.px120 {
width: 120px;
height: 120px;
}
.thumb.px200 {
width: 200px;
height: 200px;
}
.thumb.px200 img { /* we need to precompute 200px images FIXME */
width: 200px;
height: 200px;
}
.thumb.px300 {
width: 300px;
height: 300px;
}
.noimg {
display: inline-block;
vertical-align: top;
background-color: #222;
font-family: Courier, Times;
overflow: hidden;
}
.noimg.px40 {
width: 40px;
height: 40px;
font-size: 8px;
font-family: Verdana, "Helvetica Neue", Helvetica, Arial, sans-serif;
}
.noimg.px120 {
width: 110px;
height: 110px;
font-size: 15px;
padding: 5px;
}
.noimg.px200 {
width: 200px;
height: 200px;
font-size: 15px;
padding: 5px;
}
.noimg.px300 {
width: 270px;
height: 270px;
font-size: 26px;
font-family: Courier;
line-height: 1em; /* something was mucking it up. artist? album? */
color: #555;
padding: 15px;
}
.noimgspacer {
height: 16px;
}
.albumimg.lazy {
opacity: 0;
// background-color: #222; /* none more black */
// //border: 1px solid rgba(0,0,0,0); /* FIXME why is this needed to make background color work when using a datauri?? */
}
img.albumimg {
-webkit-transition: opacity 0.5s;
}
.sep:before {
content: '▪';
padding-left: 0.6em;
padding-right: 0.6em;
}
.sepgray:before {
content: '▪';
padding-left: 0.6em;
padding-right: 0.6em;
color: #6C6C6C;
-webkit-font-smoothing: antialiased;
}
.enlist + .time.sep {
padding-left: 0;
}
.sep {
position: relative;
color: #6C6C6C;
}
.highlight {
/* font-weight: 700; */
color: white;
}
.currenttrack {
position: relative;
min-height: 50px;
max-height: 90px;
width:100%;
overflow: hidden;
font-size: 19px;
color: white;
/* this allows two lines when things are long: */
/*max-height: 1.99em;*/
/*overflow: hidden;*/
/* instead, let us try just letting things go as wide as they want: */
/* max-height: 1.5em; */
/* white-space: nowrap; */
/*
text-overflow: ellipsis;
o-text-overflow: ellipsis;
ms-text-overflow: ellipsis;
*/
}
//.size1 .currenttrack {
// max-height: 50px;
//}
.currenttrack.song {
font-size: 14px;
font-weight: 700;
}
.playing.control, .playing.control:hover {
color: white;
}
#time, #count {
display: inline-block;
position: relative;
top: -2px;
margin-left: 8px;
font-size: 12px;
color: #6C6C6C;
}
.filename, .dirpath {
position: absolute;
font-size: 12px;
max-height: 1.5em;
white-space: nowrap;
/*overflow: scroll;*/
overflow: hidden;
color: #6C6C6C;
/*margin-left: 23px;*/
}
.playing-container {
position: relative;
border-bottom: 2px solid #111;
}
.play-container {
width: 38px;
margin: 0 auto;
padding-top: 10px;
}
.connect-container {
width: 40px;
height: 31px;
margin: 0 auto;
padding-top: 10px;
}
.volume-container, .volumelineout-container {
position: relative;
top: -32px;
left: 90px;
width: 100px;
height: 0px;
margin: 0 auto;
cursor: crosshair; /* other candidates: text, col-resize, s-resize */
}
.volume, .volumelineout {
display: inline-block;
position: relative;
left: 8px;
top: 2px;
height: 4px;
width: 85px;
}
.lineout-container {
display: inline-block;
position: relative;
top: 7px;
}
.size-container {
position: absolute;
top: 20px;
right: 92px;
}
#size.bigger, #size.smaller {
//position: relative;
position: absolute;
left: 5px;
z-index: 2;
}
.size1 #size.smaller, .size2 #size.bigger {
visibility: hidden !important;
}
#size.bigger .icon-zoomin {
position: relative;
//font-size: 20px;
//top: -2px;
//left: -2px;
}
.next-container.size2 {
position: absolute;
top: 94px;
left: 20px;
}
.next-container.size1 {
position: absolute;
top: 48px;
left: 20px;
}
.icon-remove {
}
.icon-play2 {
//-webkit-font-smoothing: subpixel-antialiased;
}
.currenttrack-container.size2 {
margin: 46px 400px 0 65px;
min-height: 320px;
}
.currenttrack-container.size1 {
margin: 0px 200px 0 65px;
min-height: 220px;
}
.doubler {
zoom: 2.0;
}
@media (min-width: 1900px) { /* >1905 doesn't work in app */
html {
zoom: 1.86;
//zoom: 1.675;
}
}
.size2 .currenttrack-binder {
min-height: 270px;
}
.size1 .currenttrack-binder {
min-height: 170px;
}
.currentsong-container {
}
.timeprogress-container {
width: 100%;
height: 29px;
padding-right: 36px;
margin-left: 1px;
margin-top: 6px;
margin-bottom: 12px;
overflow: hidden;
}
.progress-maximum {
padding-right: 100px;
}
.timeprogress-binder {
display: inline-block;
width: 100%;
white-space: nowrap;
}
.time-container, .count-container {
display: inline-block;
position: relative;
top: -1px;
margin-left: 10px;
}
.progress-container {
display: inline-block;
}
.progress {
width: 100%;
display: inline-block;
height: 10px;
border: 2px solid #6C6C6C;
border-radius: 2px;
cursor: crosshair; /* other candidates: text, col-resize, s-resize */
}
.progress-indicator {
height: 10px;
background-color: #6C6C6C;
/* transition: transform 1s linear !important; */
}
.currentalbum-container {
display: inline-block;
}
.currentyear-container {
display: inline-block;
}
.currentartist-container {
margin-top: 2px;
}
.size2 .currenterrata-container {
font-size: 12px;
margin-top: 40px;
}
.size1 .currenterrata-container {
font-size: 12px;
margin-top: 10px;
}
.currenterrata-container .requestor-container {
display: inline-block;
}
.currenterrata-container .requestor-container .requestor {
color: white;
}
.owner-container {
display: inline-block;
position: relative;
left: 1px;
}
.filename-container {
display: inline-block;
//position: absolute; /* keeps it from being clipped by it's container */
margin-left: 2px;
margin-top: 8px;
}
.filename-clipper {
position: absolute;
left: 0;
width: 100%;
height: 15px;
overflow: hidden;
}
.filename-container .filename {
margin-left: 66px;
}
.size2 .currentalbum-container, .size2 .currentartist-container {
font-size: 24px;
color: #6C6C6C;
}
.size1 .currentalbum-container, .size1 .currentartist-container {
font-size: 18px;
color: #6C6C6C;
}
.currentyear-container .year {
display: inline-block;
color: #6C6C6C;
}
.size2 .currentyear-container .year {
margin-left: 20px;
font-size: 19px;
}
.size1 .currentyear-container .year {
margin-left: 14px;
font-size: 14px;
}
.currentcover-container {
position: absolute;
cursor: pointer;
}
.size2 .currentcover-container {
top: 50px;
right: 50px;
}
.size1 .currentcover-container {
top: 50px;
right: 30px;
}
.size1 .thumb.px300 {
width: 120px;
height: 120px;
}
.size1 .thumb.px300 img {
width: 120px;
height: 120px;
}
.size1 .noimg.px300 {
width: 110px;
height: 110px;
font-size: 15px;
font-family: Courier, Times;
padding: 5px;
}
.search .section {
margin-top: 20px;
margin-bottom: 5px;
font-size: 20px;
font-wieght: 700;
clear: both;
}
.search .albumlist {
clear: both;
}
.search .filename {
margin-top: 4px;
}
.search .noresults {
margin-top: 20px;
margin-bottom: 5px;
font-size: 24px;
font-wieght: 700;
}
.search .noresults:after {
content: 'Nothing found';
margin-left: 50px;
}
.ondeck-container {
min-height: 71px;
max-height: 350px;
padding: 10px 0;
overflow: hidden;
border-bottom: 2px solid #111;
}
.ondeck-container.featured {
height: 100%;
border: none;
}
.ondeck
{
height: 100%;
clear: both;
position: relative;
margin-top: 10px;
margin-bottom: 10px;
font-size: 14px;
}
.ondeck td {
max-height: 1.5em;
white-space: nowrap;
overflow: hidden;
padding: 0;
}
.ondeck .requestor-container {
display: inline-block;
position: relative;
top: 2px;
vertical-align: top;
width: 100%;
height: 16px;
text-align: right;
font-size: 11px;
min-width: 46px;
overflow: hidden;
}
.ondeck .requestor {
margin-right: 6px;
margin-left: 6px;
}
.ondeck .song.requested {
color: white;
}
.ondeck .filename:before { /* fix the sep */
font-size: 14px;
}
.songs {
margin-top: 10px;
}
.songs li {
margin-left: 26px;
max-height: 1.5em;
white-space: nowrap;
overflow: hidden;
}
.albuminlist {
display: block;
//max-height: 52px !important;
//height: 52px !important;
min-height: 52px;
}
.starslist {
margin-left: 25px;
}
.starslist .section {
margin-top: 25px;
}
.download.control {
margin-left: 8px;
}
.stars.control.teeny {
margin-left: 4px;
padding-left: 4px;
padding-right: 4px;
}
.nostars, .none {
margin-top: 20px;
margin-bottom: 5px;
font-size: 24px;
font-wieght: 700;
}
.songs .song {
/* font-weight: 700; */
}
.letterbar-container {
display: inline-block;
width: 0;
}
.letterbar {
margin-left: 20px;
-webkit-user-select: none;
-moz-user-select: none;
}
.letterbar li {
display: inline-block;
opacity: 1;
font-weight: 900;
padding: 6px 4px 12px 4px;
color: #6C6C6C;
border: none;
outline: none;
cursor: pointer;
text-decoration: none;
/*text-transform: lowercase;*/
}
.letterbar .gap {
margin-left: 15px;
}
.letterbar li:hover {
color: #999;
}
.letterbar .active, .letterbar .active:hover, .letterbar:active {
color: #AAA;
color: #BF6F04;
}
.channelbar-container {
position: relative;
min-height: 80px;
width: 100%;
background-color: #333;
overflow: hidden;
white-space: nowrap;
border-bottom: 2px solid #111;
}
.channelbar {
width: 100%;
height: 100%;
padding-top: 6px;
vertical-align: top;
}
.channelbar-left {
position: absolute;
top: 0;
left: 0;
width: 64px;
height: 100%;
}
.channelbar-center {
position: absolute;
top: 0;
left: 0;
right: 0;
margin-top: 9px;
margin-left: 64px;
margin-right: 64px;
}
.channelbar-right {
position: absolute;
top: 0;
right: 0;
width: 64px;
height: 100%;
}
.channelbar-lower {
padding-top: 50px;
left: 0;
right: 0;
margin-left: 64px;
margin-right: 10px;
}
.topcontrols-container {
position: absolute;
top: 0px;
right: 10px;
}
.channelbar .notificationstoggle {
position: relative;
top: -3px;
margin-left: 15px;
-webkit-transform: rotate(180deg);
}
.channelbar .soundfxtoggle {
position: relative;
top: 3px;
}
.channelbar .notificationstoggle .icon-bubble3 {
}
.listeners-container {
}
.listeners {
color: #111;
/*font-size: 12px; for three rows*/
/*font-size: 16px; /*for two rows*/
font-size: 14px;
font-style: normal;
white-space: normal;
}
.listeners .count {
white-space: nowrap;
}
.listeners .sep {
color: #111;
font-style: normal;
}
.listener {
display: inline-block;
}
.listener.streaming {
font-weight: 800;
}
.listener.inchat {
color: #6C6C6C;
color: #BF6F04;
}
.listener.typing {
text-shadow: 2px 2px 20px #6C6C6C;
}
.listener.idle {
font-style: italic;
}
.browse-container {
margin-top: 17px;
margin-left: 66px;
}
.browsecontrols-container {
min-height: 56px;
white-space: nowrap;
overflow: hidden;
}
.search-container {
display: inline-block;
}
.searchform {
margin-top: 4px;
margin-right: 2px;
}
.searchtext {
width: 42px;
padding: 3px 9px;
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
font-size: 13px;
font-weight: normal;
line-height: 1; /*huh? no units?*/
color: rgba(255, 255, 255, 0.75);
background: #333;
border: 2px solid #6C6C6C;
border-radius: 2px;
transition: width 0.2s;
}
.searchtext:focus {
width: 280px;
}
.searchtext-cancel {
}
.searchtext :-moz-placeholder {
color: #6C6C6C;
}
.searchtext ::-webkit-input-placeholder {
color: #6C6C6C;
}
.searchtext:hover {
color: white;
//background-color: #6C6C6C;
border-color: #999;
}
.searchtext:focus, .searchtext.focused {
//padding: 5px 10px;
//background-color: #6C6C6C;
//color: #999;
//border: 0;
outline: 0;
border-color: #AAA;
border-color: #BF6F04;
}
/*
::-webkit-input-placeholder { color:#111; }
input:-moz-placeholder { color:#111; }
::-webkit-input-placeholder:focus { color:blue; }
input:-moz-placeholder:focus { color:blue; }
*/
::-webkit-input-placeholder { color:#6C6C6C; font-weight: 700; }
input:-moz-placeholder { color:#6C6C6C; font-weight: 700; }
/*::-webkit-input-placeholder:focus { color:blue; }*/
/*input:-moz-placeholder:focus { color:blue; }*/
.login-container {
width: 100%;
height: 150px;
transition: height 0.2s, border-width 0s;
border-bottom: 2px solid #111;
overflow: hidden;
}
body.loggedin .login-container {
height: 0px;
border-bottom: 0px solid #111;
transition: height 0.2s, border-width 0s ease 0.14s;
}
body.loggedout .ondeck-container,
body.loggedout .browse-container,
body.loggedout .next-container,
body.loggedout .ondeckchattoggle-container {
display: none;
}
.login {
padding-top: 50px;
padding-bottom: 60px;
padding-left: 250px;
}
.loginform {
margin-top: 4px;
margin-right: 2px;
}
.logintext {
width: 226px;
padding: 3px 9px;
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
font-size: 13px;
font-weight: normal;
line-height: 1; /*huh? no units?*/
color: rgba(255, 255, 255, 0.75);
background: #333;
border: 2px solid #6C6C6C;
border-radius: 2px;
transition: none;
}
.logintext :-moz-placeholder {
color: #6C6C6C;
}
.logintext ::-webkit-input-placeholder {
color: #6C6C6C;
}
.logintext:hover {
color: white;
//background-color: #6C6C6C;
border-color: #999;
}
.logintext:focus, .logintext.focused {
//padding: 5px 10px;
//background-color: #6C6C6C;
//color: #999;
//border: 0;
outline: 0;
border-color: #AAA;
border-color: #BF6F04;
}
.ouroboros-container {
display: inline-block;
position: fixed;
z-index: 2;
//top: 10px;
//right: 50px;
bottom: 18px;
right: 70px;
}
.debug-container {
display: inline-block;
}
.debug {
margin-left: 75px;
}
.mycaret {
display: inline-block;
width: 0;
height: 0;
vertical-align: top;
filter: alpha(opacity=30);
opacity: 0.3;
content: "\2193";
}
.mycaret:after {
content: "▼";
}
.dropdown .mycaret {
margin-top: 8px;
margin-left: 2px;
*margin-top: 7px;
}
.dropdown:hover .mycaret {
/*text-shadow: 1px 1px #BBB, 2px 2px #777, 3px 3px #444;*/
text-shadow: 2px 2px 20px white
}
.browseresults-container {
display: block;
width: 100%;
margin-top: 20px;
}
.buttonless {
visibility: hidden;
position: absolute;
left: 0px;
width: 0px;
height: 0px;
padding: 0px;
margin: 0px;
}
.control {
font-family: 'icomoon';
color: #6C6C6C;
background-color: rgba(0,0,0,0);
display: inline-block;
font-size: 16px;
text-align: center;
text-shadow: none !important;
cursor: pointer;
border: none;
margin: 0;
/*padding: 0;*/
vertical-align: top;
/*letter-spacing: 0.01em;*/
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
.control.enabled, .control.enabled:hover {
visibility: visible !important;
color: #BF6F04;
}
.control.large {
height: 30px;
width: 40px;
border-radius: 3px;
border-width: 3px;
border-color: #6C6C6C;
border-style: solid;
}
.control.wide {
width: 110px;
}
.control:focus {
outline: none;
}
.control:hover {
color: #999;
border-color: #999;
}
.control:active {
color: #AAA;
color: #BF6F04;
border-color: #AAA;
border-color: #BF6F04;
}
.control.immutable {
pointer-events: none;
cursor: inherit;
}
.control.medium {
font-size: 32px;
width: 38px;
}
.control.medium2 {
font-size: 28px;
width: 38px;
}
.control.small {
font-size: 18px;
width: 26px;
}
.control.tiny {
padding: 2px 2px 2px 2px; /* trbl */
}
.control.teeny {
position: relative;
padding: 0px 0px 0px 0px; /* trbl */
top: 1px;
}
.control.enqueue + span, .control.unqueue + span {
margin-left: 0.3em;
}
.control.enqueue:before {
content: '\e603';
-webkit-font-smoothing: antialiased;
}
.control.unqueue:before {
content: '\e604';
-webkit-font-smoothing: antialiased;
}
.control.enqueue.unqueue:before {
content: '\e604';
-webkit-font-smoothing: antialiased;
}
//.control.enqueue.unqueue.first:before {
// content: '\e62e';
//}
.control.stars.n0:before {
content: '\e610\e610\e610';
-webkit-font-smoothing: antialiased;
}
.control.stars.n1:before {
content: '\e60f\e610\e610';
visibility: visible !important;
-webkit-font-smoothing: antialiased;
}
.control.stars.n2:before {
content: '\e60e\e610\e610';
visibility: visible !important;
-webkit-font-smoothing: antialiased;
}
.control.stars.n3:before {
content: '\e60e\e60f\e610';
visibility: visible !important;
-webkit-font-smoothing: antialiased;
}
.control.stars.n4:before {
content: '\e60e\e60e\e610';
visibility: visible !important;
-webkit-font-smoothing: antialiased;
}
.control.stars.n5:before {
content: '\e60e\e60e\e60f';
visibility: visible !important;
-webkit-font-smoothing: antialiased;
}
.control.stars.n6:before {
content: '\e60e\e60e\e60e';
visibility: visible !important;
-webkit-font-smoothing: antialiased;
}
li.showall, li.showusers, li.shownewest, li.showcubes {
position: relative;
top: 2px;
}
.userlist .owner {
font-size: 14px;
text-align: right;
}
.userlist td {
padding-right: 30px;
padding-bottom: 2px;
text-align: right;
}
li.showstars {
position: relative;
top: 1px;
}
li.showall {
font-size: 28px;
width: 16px;
top: -3px;
}
li.showall .icon-grid {
position: relative;
left: -6px;
}
li.showcubes {
font-size: 20px;
width: 16px;
top: 0px;
}
li.showcubes .icon-stack2 {
position: relative;
left: -2px;
}
.shy {
/* things that hide until their parent is hovered over */
visibility: hidden;
}
body.noshy .shy
{
visibility: visible;
}
li:hover > .shy, div:hover > .shy, td:hover > .shy, span:hover > .shy, .reveal:hover .shy
{
visibility: visible;
}
.albumdetails:hover .stars-container .stars {
visibility: visible;
}
.channelbar .channelname-container {
position: absolute;
}
.channelbar .channelname {
display: inline-block;
color: #6C6C6C;
min-width: 150px;
font-size: 26px;
margin-right: 20px;
}
.hostname {
font-size: 14px;
padding-left: 2px;
color: #111;
}
.channeltoggle {
position: absolute;
top: 7px;
left: 15px;
z-index: 10;
}
.channeltoggle .icon-close {
font-size: 24px;
}
.ondeckchattoggle-container {
position: relative;
}
.chattoggle-container {
position: absolute;
right: 10px;
bottom: -1px;
height: 4px;
width: 44px;
z-index: 9;
}
.chattoggle-container:hover, .ondeckchattoggle-container:hover .chattoggle-container {
background-color: #333;
border-left: 2px solid #111;
border-right: 2px solid #111;
}
.ondeckchattoggle-container:hover .chattoggle.shy {
visibility: visible;
}
.chattoggle {
position: relative;
top: -15px;
z-index: 10;
}
.inchat .chattoggle-container {
display: none;
}
.thisisme {
position: relative;
}
.listener:hover > .thisisme > .you {
visibility: hidden;
}
.listener:hover > .thisisme > .logout {
visibility: visible;
}
.you {
padding-left: 4px;
}
.logout {
position: absolute;
visibility: hidden;
left: 0px;
top: -3px;
z-index: 10;
}
.logo-container {
width: 100%;
}
.channelbar .logo-container {
margin-top: 5px;
}
.logo {
display: block;
margin: 0 auto;
width: 115px;
height: 30px;
//background-image: url('/static/images/otto-6C-460x100.png');
background-image: url('/static/images/otto-BF6F04-10-460x100.png');
//background-image: url('/static/images/otto-BF6F04-6C-460x100.png');
background-size: contain;
background-repeat: no-repeat;
}
.footer-logo-container {
margin-left: -33px; /* half of the browse-container left margin */
margin-top: 100px;
margin-bottom: 15px;
}
.footer-logo {
cursor: n-resize;
}
#connect {
}
#connect .ouroboros {
position: relative;
top: 1px;
}
#connect .icon-play {
position: relative;
top: -1px;
left: 2px;
font-size: 21px;
-webkit-font-smoothing: subpixel-antialiased;
}
#play {
/* the play symbol is too small, so we bump it up... */
/*font-size: 20px;*/
}
#next {
/* symbol is too small, so we bump it up... */
/*font-size: 20px;*/
}
#next.webcast {
display: none;
}
#play.webcast {
//margin-left: 55px;
//margin-right: 55px;
}
.currenttrack-container .stars {
display: inline-block;
//position: absolute;
//top: 58px;
//left: -32px;
position: relative;
top: 12px;
//top: 3px;
}
input[type="checkbox"] {
display: none;
}
input[type="checkbox"] + label {
font-family: 'icomoon';
color: #6C6C6C;
margin-right: 20px;
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: moz-none;
-ms-user-select: none;
user-select: none;
}
#notifications {
display: block;
font-size: 14px;
}
input[type="checkbox"]:checked + #notifications {
visibility: visible !important;
}
#fx {
display: block;
position: relative;
top: -3px;
font-size: 14px;
}
input[type="checkbox"]:checked + #fx {
visibility: visible !important;
}
#lineout {
display: inline-block;
position: relative;
margin-left: 8px;
top: 1px;
left: 12px;
margin-top: 5px;
font-size: 14px;
}
input[type="checkbox"]:checked + #lineout {
visibility: visible !important;
}
input[type="checkbox"] + label span {
display: inline-block;
position: relative;
left: 0.3em;
font-family: Verdana, "Helvetica Neue", Helvetica, Arial, sans-serif;
font-size: 1.2em;
color: #6C6C6C;
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: moz-none;
-ms-user-select: none;
user-select: none;
}
input[type="checkbox"] + label:before {
content: '\e611';
-webkit-font-smoothing: antialiased;
}
input[type="checkbox"]:checked + label:before {
content: '\e612';
-webkit-font-smoothing: antialiased;
}
input[type="checkbox"] + label:hover, input[type="checkbox"] + label:hover span {
color: #999;
}
.mmenu.mmenu-horizontal ul /* why isn't this working */
{
-webkit-transition-duration: 0.4s;
-moz-transition-duration: 0.4s;
transition-duration: 0.4;
}
.mmenu * {
text-shadow: none;
}
.mmenu li > div:hover {
color: #999;
cursor: pointer;
}
.ui-slider .ui-slider-handle {
width: 14px;
height: 14px;
border-radius: 14px;
top: -5px;
background-color: #6C6C6C;
background-image: none;
}
.ui-slider .ui-slider-handle:focus {
outline: none;
/*
background-color: #999;
border-color: #999;
*/
}
.ui-slider .ui-slider-horizontal {
}
.ui-slider.ui-widget-content {
background-image: none;
background-color: #333;
border: 2px solid #6C6C6C;
border-radius: 2px;
}
.ui-slider .ui-slider-range {
background-image: none;
background-color: #6C6C6C;
}
.ui-slider:hover .ui-slider-handle {
background-color: #999;
border-color: #999;
}
.ui-slider:hover.ui-widget-content {
border-color: #999;
}
.ui-slider:hover .ui-slider-range {
background-color: #999;
border-color: #999;
}
.ui-resizable-handle {
background-color: #6C6C6C;
/*padding-left: 40px;
padding-right: 40px;*/
}
.ui-resizable-handle-s {
}
.ui-resizable { position: relative;}
.ui-resizable-handle { position: absolute;font-size: 0.1px;z-index: 99999; display: block; }
.ui-resizable-disabled .ui-resizable-handle, .ui-resizable-autohide .ui-resizable-handle { display: none; }
.ui-resizable-n { cursor: n-resize; height: 7px; width: 100%; top: -5px; left: 0; }
.ui-resizable-s { cursor: hand; height: 7px; width: 100%; bottom: -5px; left: 0; }
.ui-resizable-e { cursor: e-resize; width: 7px; right: -5px; top: 0; height: 100%; }
.ui-resizable-w { cursor: w-resize; width: 7px; left: -5px; top: 0; height: 100%; }
.ui-resizable-se { cursor: se-resize; width: 12px; height: 12px; right: 1px; bottom: 1px; }
.ui-resizable-sw { cursor: sw-resize; width: 9px; height: 9px; left: -5px; bottom: -5px; }
.ui-resizable-nw { cursor: nw-resize; width: 9px; height: 9px; left: -5px; top: -5px; }
.ui-resizable-ne { cursor: ne-resize; width: 9px; height: 9px; right: -5px; top: -5px;}
.ui-resizable-s {
display: block; text-indent: -99999px; overflow: hidden; background-repeat: no-repeat; /* ui-icon */
position: relative;
top: 24px;
left: 50%;
width: 16px; height: 7px; background-image: url(images/ui-icons_222222_256x240.png);
background-position: -16px -229px;
}
.varioussep {
height: 2px;
width: 100%;
margin-bottom: 25px;
background-color: #111;
}
/**** from albumdetails.html template ****/
/* needs to be integrated in to the above */
.albumlist-container {
display: block;
position: relative;
background-color: #333;
float: left;
clear: both;
margin-top: 10px;
margin-right: 100px;
margin-bottom: 25px;
border: 2px solid #111;
border-radius: 3px;
padding: 10px;
// z-index:999;
}
.albumlist-container td {
background-color: #333;
}
.albumlist {
}
.albumlist .albumname {
margin-left: 14px;
}
.newestowner {
margin-top: 40px;
margin-bottom: 10px;
// margin-left: 62px;
}
.albumdetails {
position: relative;
margin-top: 10px;
padding-bottom: 30px;
min-height: 250px;
overflow: hidden;
}
.albuminfo > .album .stars {
margin-top: 4px;
}
.albumcover-container {
position: absolute;
top: 30px;
left: 40px;
}
.albumcover .stars-container {
position: relative;
width: 100%;
}
.albumcover .stars-container .stars {
top: 11px;
left: -6px;
float: left;
padding-top: 0px;
padding-right: 5px;
font-size: 16px;
font-weight: 700;
z-index: 10;
}
.albumcover .year-container {
position: relative;
width: 100%;
}
.albumcover .year {
float: right;
padding-top: 12px;
padding-right: 5px;
font-size: 14px;
font-weight: 700;
}
.albuminfo {
padding-top: 38px;
padding-left: 285px;
min-width: 400px;
}
.albuminfo .artist {
font-size: 24px;
font-weight: 700px;
position: relative;
}
.albuminfo .album {
font-size: 24px;
font-weight: 700px;
color: white;
position: relative;
}
.albuminfo .owner {
font-size: 12px;
}
.albumsongs {
position: relative;
left: -21px;
margin-top: 20px;
}
.albumsongs td {
line-height: normal !important;
font-size: 16px;
}
.albummisc {
clear: both;
margin-left: 10px;
font-family: fenwick-1, fenwick-2, serif;
font-family: Andale Mono, monospace;
font-size: 14px;
}
.albumdir {
position: absolute;
bottom: 5px;
left: 288px;
}
.inqueue {
color: white
}
.ondeck .inqueue {
color: #6C6C6C;
}
.inqueue.first {
font-weight: 700;
}
.closer:after {
content: 'x'
}
.closer {
position: absolute;
top: 4px;
left: 4px;
width: 18px;
height: 18px;
border-radius: 20px;
border: solid;
border-width: 2px;
border-color: #6C6C6C;
background-color: transparent;
opacity: 0.70;
font-size: 14px;
font-weight: 800;
text-align: center;
}
.close {
position: absolute;
top: 6px;
left: 6px;
opacity: 0.50;
z-index: 10;
}
.close.lower {
top: inherit;
bottom: 6px;
z-index: 10;
}
#dirbrowser #path li {
color: green;
}
#dirbrowser #subdirs li {
color: yellow;
}
#playlist {
/* overflow: auto; */
/*padding-bottom: 20px;*/
border-bottom: 2px solid #111;
}
#playlist.featured {
border-bottom: none;
}
#playlist ul {
}
#playlist .control {
margin-left: 3px;
margin-right: 3px;
}
#playlist-more {
position: relative;
float: left;
bottom: 24px;
width: 100%;
height: 30px;
z-index: 100;
background-color: #111;
background: -moz-linear-gradient(top, rgba(0,0,0,0) 0%, rgba(0,0,0,1) 100%); /* FF3.6+ */
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,rgba(0,0,0,0)), color-stop(100%,rgba(0,0,0,1))); /* Chrome,Safari4+ */
background: -webkit-linear-gradient(top, rgba(0,0,0,0) 0%,rgba(0,0,0,1) 100%); /* Chrome10+,Safari5.1+ */
background: -o-linear-gradient(top, rgba(0,0,0,0) 0%,rgba(0,0,0,1) 100%); /* Opera 11.10+ */
background: -ms-linear-gradient(top, rgba(0,0,0,0) 0%,rgba(0,0,0,1) 100%); /* IE10+ */
background: linear-gradient(top, rgba(0,0,0,0) 0%,rgba(0,0,0,1) 100%); /* W3C */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#000000', endColorstr='#00000000',GradientType=0 ); /* IE6-9 */
}
.console-container {
display: none;
position: relative;
width: 100%;
height: 176px;
background-color: #333;
font: inherit;
border-bottom: 2px solid #111;
}
.console-container:focus {
outline: none;
}
.console-container #input {
font-size: 14px;
font-family: DejaVu Sans Mono, monospace;
}
.console-container .chattoggle {
position: absolute;
top: 0px;
right: 14px;
z-index: 10;
background-color: rgba(51,51,51,0.6);
}
.console-container .chattoggle .icon-close {
font-size: 24px;
}
.output-container {
margin-left: 18px;
}
.output {
width: 100%;
height: 145px;
word-wrap: break-word;
overflow-y: auto;
overflow-x: hidden;
}
.input-container {
position: absolute;
left: 53px;
right: 0px;
bottom: 0px;
// width: 100%;
//padding-left: 53px;
}
.inputl {
position: absolute;
bottom: 1px;
background-color: #333;
}
#prompt {
display: inline-block;
position: relative;
bottom: 5px;
padding-left: 2px;
background-color: #111;
font-size: 18px;
/*line-height: 18px;*/
font-weight: 800;
background-color: #333;
-webkit-font-smoothing: none;
}
#prompt:before {
content: ']';
}
.inputr-container {
position: absolute;
bottom: 0;
left: 13px;
right: 0px;
//margin-left: 13px;
//width: 100%;
background-color: #333;
}
#inputr { /* this breaks if changed to .inputr! */
display: inline-block;
background-color: #333;
}
#input {
top: 1px;
width: 100%;
height: 1.3em;
border: none;
outline: none;
background-color: #111;
color: #fff;
font: inherit;
margin: 0;
padding: 0;
resize: none;
position: absolute;
overflow: hidden;
}
#inputcopy {
display: none;
white-space: pre-wrap;
min-height: 2em;
padding: 0;
margin: 0;
word-wrap: break-word;
}
.output em {
font-style: italic;
}
.output strong {
font-weight: 800;
}
.output .event {
white-space: nowrap;
}
.event .user {
padding-right: 1em;
}
.event .timestamp {
display: inline-block;
min-width: 40px;
}
.event p { /* markdown converter emits 'p's */
margin: 0;
padding: 0;
}
.output .timestamp , .output .user {
display: inline-block;
vertical-align: top;
}
.output .timestamp {
font-size: 10px;
padding-top: 5px;
color: #555;
padding-right: 8px;
}
.output .sep:after {
color: #555;
}
.output .user {
font-size: 12px;
padding-top: 3px;
color: #555;
}
.output .sep {
white-space: nowrap;
}
.output .message, .output .name {
display: inline-block;
padding-right: 200px; /* this doesn't work when usernames get big :( FIXME */
font-size: 14px;
white-space: normal;
color: #6C6C6C;
}
.output p {
}
.cmd .clipboard {
position: absolute;
bottom: 0;
left: 0;
opacity: 0.01;
filter: alpha(opacity = 0.01);
filter: progid:DXImageTransform.Microsoft.Alpha(opacity=0.01);
width: 2px;
}
.cmd > .clipboard {
position: fixed;
}
.cmd {
padding: 10px;
position: relative;
overflow: hidden;
}
.cmd {
padding: 0;
margin: 0;
height: 1.3em;
}
#terminal .terminal-output div {
display: block;
}
/*
.cmd, #terminal .terminal-output, #terminal .terminal-output div,
#terminal .terminal-output div div, .cmd, .cmd span, .cmd div {
font-family: monospace;
color: #6C6C6C;
background-color: #000;
font-size: 16px;
line-height: 16px;
}
*/
.cmd span {
float: left;
}
.cmd span.inverted {
background-color: #6C6C6C;
color: #000;
}
.cmd div::-moz-selection, #terminal span::-moz-selection {
background-color: #6C6C6C;
color: #000;
}
.cmd div::selection, #terminal span::selection {
background-color: #6C6C6C;
color: #000;
}
#terminal .terminal-output div.error, #terminal .terminal-output div.error div {
color: red;
}
.tilda {
position: fixed;
top: 0;
left: 0;
width: 100%;
z-index: 1100;
}
.clear {
clear: both;
}
.cmd {
font-family: monospace;
color: #6C6C6C;
background-color: #000;
font-size: 16px;
line-height: 16px;
}
#webcast-container {
display: block;
position: relative;
height: 800px;
/*width: 1300px;*/
overflow: hidden;
}
#webcast-background {
display: block;
position: absolute;
top: -400px;
left: -300px;
height: 1200px;
width: 1600px;
z-index: 0;
}
#webcast-background-attribution {
position: fixed;
bottom: 30px;
right: 60px;
color: #6C6C6C;
font-size: 18px;
font-weight: 800;
padding: 10px 40px;
background-color: rgba(51, 52, 51, 0.8);
border-radius: 100px;
z-index: 1;
}
#webcast-background-link:link, #webcast-background-link:visited, #webcast-background-link:active {
color: #6C6C6C;
text-decoration: none;
background: none;
}
#webcast-background-link:hover {
color: #999;
}
#webcast-overlay {
display: block;
position: absolute;
top: 64px;
left: 95px;
padding: 10px 80px;
background-color: rgba(51, 52, 51, 0.8);
border-radius: 100px;
z-index: 1;
}
#webcast-controls {
display: inline-block;
position: relative;
top: -10px;
margin-right: 30px;
}
.webcast-title {
display: inline-block;
width: 100%;
color: lightgray;
text-align: center;
font-size: 40px;
margin-top: 15px;
margin-bottom: 10px;
}
#webcast-compatability, #webcast-chatpointer {
display: inline-block;
width: 100%;
color: #6C6C6C;
text-align: center;
font-size: 16px;
}
#archive-container {
display: block;
position: relative;
height: 1200px;
width: 1600px;
overflow: hidden;
}
#archive-background {
display: block;
position: absolute;
top: -400px;
left: -300px;
height: 1200px;
width: 1600px;
z-index: 0;
}
#archive-background-attribution {
position: fixed;
bottom: 20px;
right: 20px;
color: #6C6C6C;
font-size: 18px;
font-weight: 800;
background-color: none;
z-index: 1;
}
#archive-background-link:link {
color: #6C6C6C;
decoration: none;
background: none;
}
#archive-background-link:visited {
color: #6C6C6C;
decoration: none;
background: none;
}
#archive-background-link:active {
color: #6C6C6C;
decoration: none;
background: none;
}
#archive-background-link:hover {
color: #6C6C6C;
decoration: none;
background: none;
}
#archive-overlay {
display: block;
position: absolute;
margin-top: 30px;
margin-left: 50px;
/*background-color: rgba(0, 0, 0, 0.3);*/
background-color: none;
z-index: 1;
}
.archive-title {
color: lightgray;
font-size: 40px;
margin-left: 80px;
}
.artistlist > .stars {
display: inline-block;
position: absolute;
top: 24px;
}
.starreditem {
margin-top: 1px;
margin-left: 25px;
}
.starredartist > .stars {
top: 12px;
}
.starredalbum > .stars {
top: 20px;
}
.starreditem .artistlist {
display: inline-block;
margin-top: 2px;
margin-left: -43px;
margin-bottom: 5px;
}
.starredalbum {
margin-top: -1px;
margin-bottom: 7px;
}
.starreditem .albumlist {
display: inline-block;
margin-left: -12px;
}
.search .artistlist {
margin-left: -15px;
}
.search .albumlist {
margin-left: 15px;
}
.albumall {
display: inline-block;
position: relative;
}
.albumall .stars {
display: inline-block;
position: absolute;
top: 106px;
left: 64px;
//background-color: rgba(0,0,0,0.3);
z-index: 10;
}
.channellist-container {
}
.channellistheader {
height: 81px;
border-bottom: 2px solid black;
}
/*
.channellistheader .channeltoggle .span {
font-size: 100px;
}
*/
.channelselect {
position: relative;
top: 1px;
padding-top: 10px;
margin-left: 40px;
margin-right: 60px;
}
.channelselect .channelname {
height: 23px;
}
.currentchannel .channelselect {
color: #BF6F04;
}
.channellisteners {
min-height: 23px;
line-height: 18px;
//font-weight: 300;
}
.channellisteners .count {
margin-right: 14px;
}
.channelsettings {
position: absolute;
width: 10px;
padding: 10px 25px 35px 8px;
top: 1px;
left: 3px;
}
.channelsettings .icon-info {
font-size: 20px;
}
.open .channelsettings {
visibility: visible;
}
.changechannel .settings {
position: relative;
top: 2px; /* the extra 2px hides border */
height: 0;
margin: 10px 16px 0 15px;
padding: 0px 0 0 20px;
//border-top: 2px solid #111;
overflow: hidden;
transition: height 0.2s ease 0.14s;
}
.open.changechannel .settings {
height: 70px;
}
.changechannel .settings .channelsettings {
position: absolute;
top: 2px;
left: 274px;
}
.changechannel .settings .channelplay {
position: relative;
top: 22px;
left: 1px;
}
.changechannel.playing .settings .channelplay .icon-play2:before {
content: "\e602";
}
.changechannel .settings .volumelineout-container {
top: -12px;
left: -35px;
}
.changechannel .channelerrata-container {
position: absolute;
width: 100%;
top: 1px;
left: 5px;
color: #111;
font-size: 12px;
}
.changechannel .time-container {
position: relative;
width: 113px;
height: 18px;
top: 0px;
}
.changechannel .time {
position: absolute;
top: 2px;
right: 0px;
}
.changechannel #time,
.changechannel #total-time {
color: #111;
}
.changechannel .audio {
position: absolute;
top: 0px;
left: 170px;
color: #111;
font-size: 12px;
}
.changechannel .bitrate {
position: absolute;
top: 0px;
right: 78px;
color: #111;
font-size: 12px;
}
.changechannel .crossfade, .changechannel .replaygain {
position: relative;
bottom: 6px;
font-size: 14px;
}
.changechannel .crossfade {
left: 175px;
}
.changechannel .replaygain {
left: 190px;
}
.changechannel .settings .channelsettings .icon-close {
font-size: 16px;
}
.channeloutput {
position: absolute;
width: 10px;
padding: 10px 35px 35px 8px;
top: 2px;
right: 0px;
z-index: 10;
}
.lineout .channeloutput {
visibility: visible;
}
.lineout .channeloutput .icon-volume-mute:before {
visibility: visible;
content: "\e638";
}
.currentchannel .channeloutput .icon-volume-mute:before {
visibility: visible;
font-weight: 700;
color: #BF6F04;
content: "\e63a";
}
.currentchannel.lineout .channeloutput .icon-volume-mute:before {
content: "\e638";
}
@media (max-width: 480px) {
.chattoggle { display: none; }
.logo-container { display: none; }
.listeners-container { display: none; }
.shy {
visibility: visible;
}
.channelbar-container {
border: none;
height: 150px;
}
.channeltoggle {
left: -20px;
}
.maincontrols-center { /* not used anymore */
position: absolute;
top: 80px;
left: 50%;
margin-left: -200px;
}
.control {
-webkit-transform: scale(1.3, 1.3);
}
.channeltoggle {
-webkit-transform: scale(1.2, 1.2);
}
.next-container {
position: absolute;
top: -242px;
left: -2px;
}
.currenttrack-container {
margin-top: 350px;
}
.currentcover-container {
position: absolute;
top: -380px;
width: 300px;
-webkit-transform: scale(0.5, 0.5);
left: 50%;
margin-left: -150px;
}
.currentcover-container img {
}
.time-container {
display: block;
position: absolute;
top: -22px;
left: 120px;
-webkit-transform: scale(1.5, 1.5);
}
.progress-container {
position: absolute;
top: -50px;
left: 80px;
-webkit-transform: scale(2.0, 2.0);
}
.progress {
width: 140px !important;
}
.currentsong-container {
position: absolute;
top: -140px;
left: 20px;
}
.currentalbum-container {
position: absolute;
top: -110px;
left: 20px;
}
.currentartist-container {
position: absolute;
top: -85px;
left: 20px;
}
.currentyear-container {
position: absolute;
top: -238px;
right: 20px;
}
.currenttrack {
overflow: visible;
}
.currenttrack .stars {
position: absolute;
top: -150px;
left: 230px;
}
}
@media (min-width: 481px) {
}
<|start_filename|>static/css/about.css<|end_filename|>
body {
margin: 0px;
font-family: 'Lato', helvetica, arial;
font-size: 24px;
font-weight: 300;
color: #333;
background-color: #333c46;
background-color: #F38C1D;
background-color: #E37C0D;
background-color: #A33F17;
background-color: #BF6F04;
}
.content {
max-width: 727px;
margin: 100px auto;
}
h1 {
text-align: center;
font-size: 60px;
font-weight: 300;
margin-bottom: 10px;
}
h2 {
font-size: 36px;
font-weight: 900;
}
h3 {
font-size: 24px;
font-weight: 600;
text-align: center;
margin-top: 10px;
margin-bottom: 10px;
letter-spacing: 1px;
}
.betatag {
display: inline-block;
position: relative;
padding: 0px 5px;
color: darkred;
top: -10px;
left: 5px;
font-size: 12px;
border: 2px solid;
border-radius: 3px;
}
.note {
margin-top: 60px;
margin-bottom: 40px;
margin-left: 3.5em;
text-indent: -3.5em;
}
.notetag {
color: darkred;
font-weight: bold;
}
.mainscreenshot {
display: block;
margin: 50px auto 55px auto;
width: 720px;
height: 384px;
//border-radius: 10px;
}
.screenshot-container {
display: inline-block;
margin: 40px 0px 10px 40px;
}
.screenshot-container > p {
margin: 0px auto;
text-align: center;
font-weight: bold;
}
.screenshot {
width: 300px;
height: 247px;
border-radius: 10px;
}
.spacer {
height: 30px;
}
.gap {
display: inline-block;
width: 30px;
}
.homepagelink {
text-decoration: none;
color: #333;
}
.homepagelink:hover {
text-decoration: underline;
}
.button-cluster {
margin-top: 15px;
}
.button-cluster td:first-of-type {
padding-right: 30px;
}
.small-button {
height: 20px;
width: 80px;
display: inline-block;
background-color: #BF6F04;
//color: darkred;
//background-color: rgba(0,0,0,0);
color: #6C6C6C;
color: #333;
font-size: 11px;
line-height: 8px;
font-weight: bold;
text-align: center;
cursor: hand; //fixme
border: none;
margin: 0;
vertical-align: top;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
border-radius: 3px;
border-width: 2px;
border-color: #404040;
border-style: solid;
}
.small-button:focus {
outline: none;
}
.small-button:hover {
color: #555;
border-color: #555;
}
.small-button:active {
color: #666;
border-color: #666;
}
<|start_filename|>otto.client.player.coffee<|end_filename|>
###############
### client side (body of otto.client.player.coffee served as /otto.player.js)
###############
global.otto.client.player = ->
window.otto.client.player = do -> # note the 'do' causes the function to be called
$('head').append '<script src="static/js/jquery.jplayer.min.js">'
#$('head').append '<script src="static/js/jquery.jplayer.inspector.js">'
player = {}
$(window).on 'unload', ->
player.destroy_jplayer()
player.connect = (channelname) ->
player.channelname = channelname
console.log 'creating jplayer for channel', player.channelname
player.state 'connecting'
player.create_jplayer()
#$('#results').jPlayerInspector({jPlayer:$("#jplayer")})
#$('#jplayer_inspector_toggle_0').click()
player.disconnect = ->
player.state 'disconnected'
player.destroy_jplayer()
player.destroy_jplayer = ->
player.$jplayer.remove() if player.$jplayer
player.create_jplayer = ->
player.destroy_jplayer()
player.$jplayer = $ '<div id="jplayer">'
$(document.body).prepend player.$jplayer
player.$jplayer.jPlayer
swfPath: '/static/js'
supplied: 'mp3,oga' # historical note: live streaming from traktor uses oga
#supplied: 'wav'
wmode: 'window'
errorAlerts: false
warningAlerts: false
volume: otto.current_volume / 100
solution: 'flash, html' # as opposed to 'html, flash'
preload: 'none'
timeupdate: player.jplayer_event_handler
progress: player.jplayer_event_handler
ended: player.jplayer_event_handler
error: player.jplayer_event_handler
ready: player.jplayer_event_handler
#play: player.jplayer_event_handler
#pause: player.jplayer_event_handler
player.jplayer_event_handler = (e) ->
if e.type not in ['jPlayer_timeupdate', 'jPlayer_progress']
console.log "jplayer event #{e.type} state #{player.state()}"
switch e.type
when 'jPlayer_ready'
player.$jplayer.jPlayer 'setMedia',
title: "Otto"
mp3: "/stream/#{player.channelname}/mp3"
ogg: "/stream/#{player.channelname}/ogg"
#wav: "/stream/#{player.channelname}/wav"
player.$jplayer.jPlayer 'play'
player.state 'buffering'
when 'jPlayer_ended'
if player.state() isnt 'disconnected'
player.state 'reconnecting'
nextTick ->
player.create_jplayer()
when 'jPlayer_error'
if e.jPlayer.error.type in [$.jPlayer.error.URL, $.jPlayer.error.FLASH]
timeoutSet 1000, ->
if player.state() isnt 'disconnected'
player.state 'reconnecting'
player.create_jplayer()
else
console.log "jplayer error #{e.jPlayer.error.type}"
when 'jPlayer_timeupdate'
if e.jPlayer.status.currentTime and player.state() not in ['connected', 'disconnected']
player.state 'connected'
player.timeupdate e.jPlayer.status.currentTime, e.jPlayer.status.duration
when 'jPlayer_progress'
return #!#
#if player.state() in ['buffering', 'underrun']
#if e.jPlayer.status.duration != progress_last
#$('#connect').html otto.templates.ouroboros size: 'small', direction: 'cw', speed: 'normal'
#progress_last = e.jPlayer.status.duration
#if not player.buffering_state
# player.buffering_state = true
# #$('#connect').html otto.templates.ouroboros size: 'small', direction: 'cw', speed: 'normal'
# $('#connect .ouroboros .ui-spinner').addClass('cw').removeClass('ccw')
player.timeupdate = do ->
lasttime = 0
cycles = 4 # slows it down this many timeupdate events
cycle = cycles # setup so first one causes a change
(currenttime, duration) ->
#console.log "timeupdate! #{currenttime} #{duration} #{lasttime} #{cycles} #{cycle}"
if currenttime != lasttime
lasttime = currenttime
if cycle < cycles # slow it down
cycle += 1
else
cycle = 0
# this is where the old streaming in progress indicator was, do we want a new one? FIXME
#console.log 'pulse'
else
if player.state() is 'connected'
if e.jPlayer.status.currentTime + 1 > duration
console.log 'setting state to underrun'
player.state 'underrun'
else
player.state 'skipping' # a very odd state jplayer gets into
console.log 'jplayer skipping detected, restarting jplayer'
player.create_jplayer()
player.state = (newstate) ->
return otto.connect_state if not newstate
console.log 'player state', newstate
otto.connect_state = newstate
switch newstate
when 'disconnected'
#$('#connect').html otto.templates.icon 'disconnected' # put correct play icon back FIXME
#$('#connect').html '<img src="static/images/disconnected.svg" height="20" width="20">'
$('#connect').html otto.templates.icon 'connect'
when 'connecting'
# the ouroboros might already be there from when the module was being loaded
if not $('#connect>:first-child').is '.ouroboros'
$('#connect').html otto.templates.ouroboros size: 'small', direction: 'cw', speed: 'slow'
when 'connected'
#$('#connect').html otto.templates.icon 'connected'
$('#connect').html '<img src="static/images/connected.svg" height="20" width="20">'
when 'reconnecting'
$('#connect').html otto.templates.ouroboros size: 'small', direction: 'cw', speed: 'slow'
when 'skipping'
$('#connect').html otto.templates.ouroboros size: 'small', direction: 'cw', speed: 'slow'
#when 'buffering'
#when 'underrun'
player.setvolume = (volume) ->
otto.current_volume = volume
if player.$jplayer
player.$jplayer.jPlayer 'option', 'volume', otto.current_volume / 100
return player
<|start_filename|>otto.loader.coffee<|end_filename|>
fs = require 'fs'
net = require 'net'
#posix = require 'posix'
posix = require 'fs'
jsonstream = require 'JSONStream'
child_process = require 'child_process'
require './otto.misc' # attaches to global.otto.misc
otto = global.otto
global.otto.loader = do -> # note the 'do' causes the function to be called
loader = {}
loading = false
child = false
loader.load = (zappa, path) ->
console.log 'loader.load'
console.log 'path', path
#shell_cmd_debug('pwd')
#shell_cmd_debug('env')
if loading
zappa.io.sockets.emit 'loader', 'started' # just for dev? perhaps not!
return 'currently loading'
else
loading = true
opts =
#detached: true
#env :
# DYLD_FALLBACK_LIBRARY_PATH: otto.OTTO_LIB
# LD_LIBRARY_PATH: otto.OTTO_LIB
console.log 'spawning scan.py'
args = ['-u', otto.OTTO_ROOT + '/scan.py', '-j']
if path then args.push path
child = child_process.spawn otto.OTTO_BIN + '/python', args, opts
#console.log 'child', child
#child.unref()
console.log child.pid
console.log 'loader started'
parser = jsonstream.parse([true]) # we'll take anything
#res.send '<pre>'
zappa.io.sockets.emit 'loader', 'started'
loader_says = (data) ->
console.log 'loader: ' + data
#res.send String(data)
zappa.io.sockets.emit 'loader', String(data)
child.stdout.pipe(parser)
child.stderr.on 'data', loader_says
starter = []
parser.on 'data', (data) ->
#if data.stdout
# loader_says(data.stdout)
#console.log 'loader: ', data
zappa.io.sockets.emit 'loader', data
if data.album
console.log 'loader says album:', data
if data.songs
for song in data.songs
if song.song and hash_code2(song.song) in [-647063660, -1208355988]
starter.push song
console.log 'spotted one!', song.song
child.on 'exit', (code, signal) ->
child = false
loading = false
return if otto.exiting
console.log "loader exited with code #{code}"
if signal then console.log "...and signal #{signal}"
#res.end()
wasempty = otto.db.emptydatabase
otto.db.emptydatabase = false
firstchannel = false
for own channelname of otto.channels.channel_list
channel = otto.channels.channel_list[channelname]
if not firstchannel then firstchannel = channelname
do (channelname) ->
if channelname is firstchannel
if wasempty and starter.length
id = starter[Math.floor Math.random() * starter.length]._id
channel.add_to_queue id, 'otto', ->
console.log 'starter', id
status = if code or signal then 'error' else 'finished'
zappa.io.sockets.emit 'loader', status
else
channel.autofill_queue ->
console.log 'initial autofill done, channelname ', channelname
zappa.io.sockets.emit 'loader', if code or signal then 'error' else 'finished'
else
channel.autofill_queue ->
console.log 'initial autofill done, channelname ', channelname
loader.cancel = (zappa) ->
if loading and child
child.kill()
hash_code = (str) ->
hash = 0
for char in str
hash = ((hash<<5)-hash)+char.charCodeAt(0)
hash = hash & hash # Convert to 32bit integer
return hash
hash_code2 = (str) ->
hashstr = ''
for c in str.toLowerCase()
if /[abdf-prstv-z]/.test(c)
hashstr = hashstr + c
if hashstr.length > 9
break
return hash_code(hashstr)
shell_cmd_debug = (cmd, args, callback) ->
child = child_process.spawn(cmd, args)
buffer = ''
child.stdout.on 'data', (output) -> buffer += output
child.stdout.on 'end', -> if callback then callback buffer else console.log 'shell_cmd_debug', cmd, ':\n', buffer
return loader
<|start_filename|>otto.server.coffee<|end_filename|>
fs = require 'fs'
path = require 'path'
zappajs = require 'zappajs'
jsonreq = require 'jsonreq'
zipstream = require 'zipstream'
querystring = require 'querystring'
compression = require 'compression'
connectmongo = require 'connect-mongo'
connect = require 'zappajs/node_modules/express/node_modules/connect'
Session = connect.middleware.session.Session
zappa = null
otto.server = ->
console.log 'we think we are installed in', otto.OTTO_ROOT
console.log 'zappa version', zappajs.version
zappa = zappajs.app server_go
otto.zappa = zappa
otto.port = 8778 # 8778 fo'eva
console.log 'otto on port', otto.port
zappa.server.listen otto.port #note! it's server.listen, not app.listen!
zappa.server.on 'listening', ->
otto.listening = true
otto.on_listening_hook() if otto.on_listening_hook
otto.zeroconf.createMDNSAdvertisement()
otto.sessionlist = {}
otto.sessioniplist = {}
otto.socketlist = {}
#############################################
## the zappa application function starts here
#############################################
server_go = ->
# @ = zappa in this context
# from http://stackoverflow.com/questions/6819911/nodejs-expressjs-session-handling-with-mongodb-mongoose
MongoStore = connectmongo(@express)
otto.sessionStore = new MongoStore(otto.db.dbconf)
otto.ourlisteners = new otto.listeners.Listeners()
@use 'cookieParser'
#@use session: { store: otto.sessionStore, secret: otto.SECRET, key: 'express.sid'}, =>
# # we should wait for this callback before proceeding FIXME <- wait!
# # it seems to trigger on each connection? or maybe each socket.io message?
# # otherwise we risk getting a "Error setting TTL index on collection" error
# # see https://github.com/kcbanner/connect-mongo/pull/58#issuecomment-32148111
# console.log 'session db connection established'
@use {session: {store: otto.sessionStore, secret: otto.SECRET, key: 'express.sid', cookie: {maxAge: 365 * 24 * 60 * 60 * 1000}}}
#@app.configure 'development', =>
# @app.use otto.misc.debug_request
@app.use compression() # wondering how much this helps, esp. locally
@app.use '/static', @express.static(__dirname + '/static')
@app.use @express.favicon(__dirname + '/static/images/favicon.ico')
@app.use otto.misc.authenticate_user
@app.use (req, res, next) ->
req.session.sessionID = req.sessionID
next()
@app.use (req, res, next) ->
otto.ourlisteners.set_user req.session
next()
@app.use (req, res, next) ->
otto.sessionlist[req.sessionID] = req.session
next()
@io.set 'authorization', otto.misc.socket_authenticate_user
@enable 'serve jquery', 'serve sammy', 'serve zappa'
@use 'partials'
# we define our own layout (so we can have <links> in the <head> and <scripts> at the bottom)
#@enable 'default layout'
@io.set 'log level', 2
@on 'connection': ->
console.log 'sio connection'
otto.socketlist[@id] = @socket
session = socket_get_session @
#console.log 'session is', session
if not session
console.log 'telling client to resession'
@emit 'resession'
else
@emit 'proceed'
# now we wait for the client to say 'hello'
@on 'disconnect': ->
console.log 'sio disconnection!'
session = socket_get_session @
if session
otto.ourlisteners.remove_socket session, @
@on 'hello': ->
console.log 'sio hello client!'
if not session = socket_get_session @ then return
data = {}
data.channellist = otto.channelinfolist
sessionip = socket_get_sessionip @
console.log 'sessionip', sessionip
# the [^0-9.] test is checking if the user name is just an ip address
if session.user and /[^0-9.]/.test(session.user)
username = session.user
else if sessionip and sessionip.localhost
# auto login the local app
username = process.env['USER'] || ''
else
username = ''
channelname = session.channelname || 'main'
sessionSet session, user: username, channelname: channelname, =>
otto.ourlisteners.change_user session
data.myusername = session.user
console.log 'telling client their username is', data.myusername
otto.ourlisteners.add_socket session, @
# prime the client state
for state,val of @data
if state is 'idle'
val = if @data then (new Date()).getTime() else 0
otto.ourlisteners.set_state session.sessionID, @id, state, val
data.mychannel = session.channelname
console.log 'telling client their channel is', data.mychannel
if sessionip
data.localhost = sessionip.localhost
otto.ourlisteners.change_channel session
@join(data.mychannel)
data.haslineout = process.platform is 'darwin'
data.largedatabase = otto.db.largedatabase
data.emptydatabase = otto.db.emptydatabase
data.musicroot = '/Users/' + process.env['USER'] + '/Music' # FIXME
#console.log 'emitting welcome packet', data
@emit 'welcome', data
@on 'updateme': ->
console.log 'sio updateme'
# initiated by the client
# i'm sure parts of this causes all clients to be updated, perhaps we can FIXME someday
if not session = socket_get_session @ then return
channellist = []
for own channelname of otto.channels.channel_list
channel = otto.channels.channel_list[channelname]
channellist.push {name: channel.name, fullname: channel.fullname}
# hmmm... we don't even use what we just built FIXME
@emit 'channellist', otto.channelinfolist
channelinfo = otto.channels.channel_list[session.channelname]
if channelinfo
#otto.channels.channel_list[session.channelname].refresh()
@emit 'queue', channelinfo.queue
@emit 'state', channelinfo.state
@emit 'time', channelinfo.time
allstatus = {}
alllineout = {}
alloutputs = {}
allreplaygain = {}
for name,channel of otto.channels.channel_list
allstatus[name] = channel.status
alllineout[name] = channel.lineout
alloutputs[name] = channel.outputs
allreplaygain[name] = channel.replaygain
@emit 'status', allstatus
@emit 'lineout', alllineout
@emit 'outputs', alloutputs
@emit 'replaygain', allreplaygain
if session.user and /[^0-9.]/.test(session.user)
#console.log "telling the client their username #{session.user}"
#@emit 'myusername', session.user
otto.db.load_stars session.user, false, (stars) =>
@emit 'stars', stars
otto.db.load_all_lists false, (err, data) =>
@emit 'lists', data
# hack to force a listeners update
otto.ourlisteners.update()
@on 'begin': ->
console.log 'sio begin'
if not session = socket_get_session @ then return
channel = otto.channels.channel_list[session.channelname]
if channel
zappa.io.sockets.in(session.channelname).emit 'begun'
channel.autofill_queue ->
channel.play 0, ->
@on 'selectfolder': ->
console.log 'sio selectfolder'
if not session = socket_get_session @ then return
# bounce this message from the webview client to Otto.py
#zappa.io.sockets.in(session.channelname).emit 'selectfolder' # sends to everyone, for now FIXME
@broadcast 'selectfolder' # sends to everyone (except self), for now FIXME
@on 'changechannel': ->
console.log 'sio changechannel'
if not session = socket_get_session @ then return
newchannelname = @data
console.log 'changing channel to', newchannelname
channel = otto.channels.channel_list[newchannelname]
if channel
oldchannelname = session.channelname
sessionSet session, channelname: newchannelname, =>
if session.channelname != oldchannelname
otto.ourlisteners.change_channel session
apply_across_all_tabs session, ->
@leave(oldchannelname)
@join(session.channelname)
@emit 'mychannel', name: channel.name, fullname: channel.fullname
otto.ourlisteners.update()
else
console.log 'not a valid channel name'
@on 'login': ->
name = @data
console.log 'sio login', name
if not session = socket_get_session @ then return
if session
console.log session.sessionID
sessionSet session, user: name, =>
console.log "telling the client their username #{session.user}"
@emit 'myusername', session.user
otto.db.load_stars session.user, false, (stars) =>
console.log 'about to emit preloaded stars'
@emit 'stars', stars
otto.ourlisteners.change_user session
otto.ourlisteners.update()
@on 'logout': ->
console.log 'sio logout'
if not session = socket_get_session @ then return
if session.sessionID
console.log session.sessionID
sessionSet session, user: '', =>
console.log 'telling the client they are logged out'
@emit 'myusername', session.user
otto.ourlisteners.change_user session
otto.ourlisteners.update()
@on 'play': (socket) ->
console.log 'sio play'
if not session = socket_get_session @ then return
if otto.channels.channel_list[session.channelname]
otto.channels.channel_list[session.channelname].play @data, ->
#zappa.io.sockets.in(session.channelname).emit 'state', 'play'
# pause also unpauses
@on 'pause': (socket) ->
console.log 'sio pause'
if not session = socket_get_session @ then return
if otto.channels.channel_list[session.channelname]
otto.channels.channel_list[session.channelname].pause ->
#zappa.io.sockets.in(session.channelname).emit 'state', 'pause'
# this just pauses, Otto.py uses it for 'stop'
# don't want to use mpd command stop as that might disconnect things
@on 'pauseifnot': (socket) ->
console.log 'sio pauseifnot'
if not session = socket_get_session @ then return
if otto.channels.channel_list[session.channelname]
otto.channels.channel_list[session.channelname].pauseifnot ->
#zappa.io.sockets.in(session.channelname).emit 'state', 'pause'
@on 'toggleplay': (socket) ->
console.log 'sio toggleplay'
if not session = socket_get_session @ then return
channelname = @data || session.channelname
console.log 'channel', channelname
if otto.channels.channel_list[channelname]
otto.channels.channel_list[channelname].toggleplay ->
@on 'seek': (socket) ->
console.log 'sio seek', @data
if not session = socket_get_session @ then return
if otto.channels.channel_list[session.channelname]
otto.flush_streams(session.channelname)
otto.channels.channel_list[session.channelname].seek @data, ->
# no longer used
@on 'lineout': (socket) ->
console.log 'sio lineout', @data
if not session = socket_get_session @ then return
if otto.channels.channel_list[session.channelname]
otto.channels.channel_list[session.channelname].set_lineout @data, ->
@on 'togglelineout': ->
console.log 'sio togglelineout', @data
channelname = @data.channelname
alt = @data.alt
if otto.channels.channel_list[channelname]
if otto.channels.channel_list[channelname].lineout == '1'
otto.channels.channel_list[channelname].set_lineout 0
else
if not alt
for name,channel of otto.channels.channel_list
if channel.lineout == '1'
channel.set_lineout 0
otto.channels.channel_list[channelname].set_lineout 1
@on 'togglecrossfade': ->
console.log 'sio togglecrossfade'
channelname = @data.channelname
if otto.channels.channel_list[channelname]
otto.channels.channel_list[channelname].toggle_crossfade()
@on 'togglereplaygain': ->
console.log 'sio togglereplaygain', @data
channelname = @data.channelname
if otto.channels.channel_list[channelname]
otto.channels.channel_list[channelname].toggle_replaygain()
@on 'setvol': (socket) ->
console.log 'sio setvol'
if not session = socket_get_session @ then return
channelname = @data.channelname
volume = @data.volume
if otto.channels.channel_list[channelname]
otto.channels.channel_list[channelname].setvol volume, ->
@on 'reloadme': (socket) ->
console.log 'sio reloadme'
@emit 'reload'
@on 'reloadall': (socket) ->
console.log 'sio reloadall'
if not session = socket_get_session @ then return
if session and session.user
if session.user is 'jon'
console.log 'reload all'
zappa.io.sockets.emit 'reload'
else
console.log "#{session.user} tried to .reloadall! awesome."
# mpd's next is not currently used, it's not very useful
# it only works once playing has started, and it resumes play if paused
@on 'deleteid': (socket) ->
console.log 'sio deleteid', @data
if not session = socket_get_session @ then return
if not session or not session.user
console.log 'error: don\'t know the user, ignoring socket event'
#@emit 'reload'
return
if otto.channels.channel_list[session.channelname]
otto.channels.channel_list[session.channelname].remove_from_queue @data, session.user
# used by 'next' in Otto.py
@on 'delete': (socket) ->
console.log 'sio delete'
if not session = socket_get_session @ then return
if not session or not session.user
console.log 'error: don\'t know the user, ignoring socket event'
#@emit 'reload'
return
if otto.channels.channel_list[session.channelname]
otto.channels.channel_list[session.channelname].remove_from_queue '', session.user
@on 'enqueue': (socket) ->
console.log 'sio enqueue', @data
if not session = socket_get_session @ then return
if not session or not session.user
console.log 'error: do not know the user, ignoring socket event'
#@emit 'reload'
return
client_channel = otto.channels.channel_list[session.channelname]
if client_channel
client_channel.add_to_queue @data, session.user
@on 'stars': (socket) ->
console.log 'sio stars'
if not session = socket_get_session @ then return
if not session or not session.user
console.log 'error: do not know the user, ignoring socket event'
#@emit 'reload'
return
if not session.user or not /[^0-9.]/.test(session.user)
return
otto.db.add_to_user_list session.user, @data.id, @data.rank, (success) ->
if success
otto.db.load_stars session.user, no, (stars) ->
#console.log 'stars', stars
zappa.io.sockets.emit 'stars', stars
@on 'unlist': (socket) ->
console.log 'sio unlist'
if not session = socket_get_session @ then return
if not session or not session.user
console.log 'error: do not know the user, ignoring socket event'
#@emit 'reload'
return
if not session.user or not /[^0-9.]/.test(session.user)
return
jsonreq.post_with_body 'http://localhost:8778/remove_from_list', querystring.stringify({ user: session.user, id: @data }), (err, data) ->
jsonreq.get 'http://localhost:8778/load_lists', (err, data) ->
zappa.io.sockets.emit 'lists', data
@on 'loadmusic': (socket) ->
console.log 'sio loadmusic', @data
otto.loader.load(zappa, @data)
@on 'loadmusiccancel': (socket) ->
console.log 'sio loadmusiccancel'
otto.loader.cancel(zappa)
@on 'chat': (socket) ->
console.log 'sio chat'
if not session = socket_get_session @ then return
otto.report_event 'chat', session.channelname, 0, session.user, @data
@on 'inchat': (socket) ->
console.log 'sio inchat'
if not session = socket_get_session @ then return
otto.ourlisteners.set_state session.sessionID, @id, 'inchat', @data
if @data
[eventname, message] = ['joinedchat', 'joined the chat']
else
[eventname, message] = ['leftchat', 'left the chat']
otto.report_event eventname, session.channelname, 0, session.user, message
@on 'typing': (socket) ->
if not session = socket_get_session @ then return
otto.ourlisteners.set_state session.sessionID, @id, 'typing', @data
@on 'focus': (socket) ->
if not session = socket_get_session @ then return
otto.ourlisteners.set_state session.sessionID, @id, 'focus', @data
@on 'idle': (socket) ->
if not session = socket_get_session @ then return
val = if @data then (new Date()).getTime() else 0
otto.ourlisteners.set_state session.sessionID, @id, 'idle', val
@on 'console.log': (socket) ->
console.log.apply @data
@on 'console.dir': (socket) ->
console.dir.apply @data
########################################
@get '/': ->
otto.index.render bodyclasses: '.disconnected'
@get '/starts_with': ->
query = @req.query
otto.db.starts_with query.value, query.attribute, parseInt(query.otype), query.nochildren, (objects) =>
@res.json(objects)
@get '/all_albums': ->
otto.db.all_albums (objects) =>
@res.json(objects)
@get '/all_albums_by_year': ->
otto.db.all_albums_by_year (objects) =>
@res.json(objects)
@get '/all_albums_by_fileunder': ->
otto.db.all_albums_by_fileunder (objects) =>
@res.json(objects)
@get '/image(/:extra?)?': ->
id = @req.query.id
size = false
if @req.params.extra
size = @req.params.extra.replace(/^[/]/, '')
otto.db.load_image id, size, (image) =>
if !image
#return @req.redirect '/static/images/gray.png'
if not otto.graypixel
otto.graypixel = fs.readFileSync('static/images/gray.png')
image = otto.graypixel
#im = gd.createFromPngPtr(imagedata)
#w = Math.floor (im.width + 2)
#h = Math.floor (im.height + 2)
#w = 100
#h = 100
#target_png = gd.createTrueColor(w, h)
#im.copyResampled(target_png, 0,0,0,0, w, h, im.width, im.height)
@res.setHeader 'Content-Type', 'image/png'
#image = target_png.pngPtr()
@res.write image, 'binary'
@res.end()
@get '/load_object': ->
query = @req.query
otto.db.load_object query.id, query.load_parents, (object) =>
@res.json(object)
@get '/album_details': ->
query = @req.query
otto.db.album_details query.id, (object) =>
@res.json(object)
@get '/search': ->
value = @req.query.value
otto.db.search value, (err, results) =>
@res.json(results)
# still to be converted to mongodb
#@get
#'/music_root_dirs': proxy_api_request
#'/load_dir': proxy_api_request
#'/load_lists': proxy_api_request
@get '/load_users': ->
query = @req.query
otto.db.load_users (users) =>
@res.json(users)
@get '/load_stars': ->
query = @req.query
otto.db.load_stars null, yes, (stars) =>
@res.json(stars)
@get '/load_newest_albums': ->
query = @req.query
otto.db.get_newest_albums (albums) =>
@res.json(albums)
@get '/load_fileunder': ->
artistid = @req.query.artistid
otto.db.load_fileunder artistid, (results) =>
@res.json(results)
# we ask the client to hit this when we need to reload their session cookie
@get '/resession': ->
console.log '/resession'
return ''
#@coffee '/shared.js': ... # use @coffee if you want the code to be shared between server and client
# these seem to cache the outgoing results! plus they wrap everything in zappa.run
#@client '/otto.client.js': otto.client
#@client '/otto.client.cubes.js': otto.client.cubes
#@client '/otto.client.soundfx.js': otto.client.soundfx
#@client '/otto.client.templates.js': otto.client.templates
@client 'shunt.js': otto.client # seems something must use @client for zappa.js to be served
@get '/otto.client.js': ->
@res.setHeader('Content-Type', 'text/javascript')
#return ';(' + otto.client + ')();'
return ';window.otto = window.otto || {};zappa.run(' + otto.client + ');'
@get '/otto.client.templates.js': ->
@res.setHeader('Content-Type', 'text/javascript')
return ';window.otto = window.otto || {};(' + otto.client.templates + ')();'
@get '/otto.client.:modulename.js': ->
modulename = @req.params.modulename
if otto.client[modulename]
@res.setHeader('Content-Type', 'text/javascript')
return ';(' + otto.client[modulename] + ')();'
else
@res.status(404).send('Not found')
proxy_stream = (format) ->
host = @req.headers.host
add_stream_callback = (@req, channel, format) =>
otto.ourlisteners.add_stream @req.session
remove_stream_callback = (@req, channel, format) =>
otto.ourlisteners.remove_stream @req.session
#console.dir @req.params
channelname = @req.params.channelname || 'main'
format = format || @req.params.format || 'mp3'
console.log 'channelname', channelname, 'format', format
if otto.channels.channel_list[channelname]
if format in ['mp3', 'ogg', 'wav']
otto.channels.channel_list[channelname].proxy_stream @req, @res, add_stream_callback, remove_stream_callback, format
else
throw new Error 'unknown format'
else
throw new Error 'stream not found'
@get '/stream/:channelname/:format': proxy_stream
@get '/stream/mp3': -> proxy_stream.call(@, 'mp3')
@get '/stream/ogg': -> proxy_stream.call(@, 'ogg')
@get '/stream/wav': -> proxy_stream.call(@, 'wav')
@get '/stream/:channelname': proxy_stream
@get '/stream': proxy_stream
@get '/download/:id': ->
return #!#
if not @req.session.user or not /[^0-9.]/.test(@req.session.user)
return @res.send('not logged in', 403)
id = @req.params.id
jsonreq.get 'http://localhost:8778/load_lists?objects=1', (err, data) =>
filenames = []
archivename = no
for user in data
console.log "id #{id}, user.id #{user.id}"
if id == user.id
archivename = "#{user.owner}.zip"
console.log "archiving #{archivename} for id #{user.id}"
for item in user.list
if item.otype == 10
filename = path.join(user.owner, path.basename(item.filename))
filenames.push( [item.filename, filename] )
#console.log "adding song #{item.filename} as #{filename}"
else if item.otype == 20
albumdirname = path.basename(item.dirpath)
#console.log "adding album #{albumdirname}"
if item.items and item.items.length
for song in item.items
filename = path.join(user.owner, albumdirname, path.basename(song.filename))
filenames.push( [song.filename, filename] )
#console.log "adding album song #{song.filename} as #{filename}"
if archivename
console.log 'writeHead'
@res.writeHead 200,
'Pragma': 'public'
'Expires': '0'
'Cache-Control': 'must-revalidate, post-check=0, pre-check=0'
#'Cache-Control': 'public' # twice?
'Content-Description': 'File Transfer'
'Content-Type': 'application/octet-stream'
'Content-Disposition': "attachment; filename=\"#{archivename}\""
'Content-Transfer-Encoding': 'binary'
zip = zipstream.createZip level: 1
zip.pipe( @res )
nextfile = =>
if filenames.length
entry = filenames.shift()
fullfilename = entry[0]
shortfilename = entry[1]
zip.addFile fs.createReadStream(fullfilename), name: shortfilename, store: true, nextfile
else
zip.finalize (bytes) =>
@res.end()
@req.connection.destroy()
console.log "zip file downloaded, #{bytes} bytes total"
nextfile()
@get '/otto.m3u': ->
host = @req.headers.host
@res.setHeader('Content-Type', 'audio/x-mpegurl')
"""
#EXTM3U
#EXTINF:-1,otto.local-
http://#{host}/stream/1/#{@req.session.user}/#{host}
"""
@get '/otto.pls': ->
host = @req.headers.host
@res.setHeader('Content-Type', 'audio/x-scpls')
"""
[playlist]
numberofentries=1
File1=http://#{host}/stream/1/#{@req.session.user}/Otto%20(#{host})
Title1=Otto (#{host})
Length1=-1
Version=2
"""
########################################
socket_get_session = (s) -> otto.sessionlist[s.io.handshaken[s.id].sessionID]
socket_get_sessionip = (s) -> otto.sessioniplist[s.io.handshaken[s.id].sessionID]
# loop across all socket.io connections for a given session and call func with @ set to each socket
apply_across_all_tabs = (session, func) ->
sessionsockets = otto.ourlisteners.get_sockets session
for id of sessionsockets
func.apply otto.socketlist[id]
sessionSet = (session, dict, callback) ->
console.log 'sessionSet'
otto.sessionStore.get session.sessionID, (err, session2) =>
if err or not session2
console.log 'error: no session found in database? - ', err
console.log session
# not sure if we should call the callback or not on err
# at least the in memory session doesn't get changed
callback()
else
for key,val of dict
session[key] = val
session2[key] = val
otto.sessionStore.set session.sessionID, session2, ->
callback()
otto.channels.set_global_event_handler (eventname, channel, args...) ->
switch eventname
when 'queue'
zappa.io.sockets.in(channel.name).emit 'queue', channel.queue
when 'state'
zappa.io.sockets.in(channel.name).emit 'state', channel.state
when 'status'
allstatus = {}
for name,channel of otto.channels.channel_list
allstatus[name] = channel.status
zappa.io.sockets.emit 'status', allstatus
when 'time'
zappa.io.sockets.in(channel.name).emit 'time', channel.time
when 'lineout'
#zappa.io.sockets.in(channel.name).emit 'lineout', channel.lineout
alllineout = {}
for name,channel of otto.channels.channel_list
alllineout[name] = channel.lineout
zappa.io.sockets.emit 'lineout', alllineout
when 'replaygain'
#zappa.io.sockets.in(channel.name).emit 'lineout', channel.lineout
allreplaygain = {}
for name,channel of otto.channels.channel_list
allreplaygain[name] = channel.replaygain
zappa.io.sockets.emit 'replaygain', allreplaygain
when 'outputs'
#zappa.io.sockets.in(channel.name).emit 'outputs', channel.outputs
alloutputs = {}
for name,channel of otto.channels.channel_list
alloutputs[name] = channel.outputs
zappa.io.sockets.emit 'outputs', alloutputs
when 'started'
message = "playing #{args[0].song}"
otto.report_event 'started', channel.name, args[0], user, message
when 'finished'
previously_playing = args[0]
message = 'finished song'
if previously_playing.requestor
message += " requested by #{previously_playing.requestor}"
message += " #{previously_playing.song}"
otto.report_event 'finished', channel.name, previously_playing.id, undefined, message
when 'addtoqueue'
song = args[0]
user = args[1]
message = "picked song #{song.song}"
otto.report_event 'enqueue', channel.name, song, user, message
when 'killed', 'removed'
song = args[0]
user = args[1]
if song.requestor?
if song.requestor is user
message = "#{eventname} their own request #{song.song}"
else
message = "#{eventname} song requested by #{song.requestor}"
else
message = "#{eventname} song #{song.song}"
otto.report_event eventname, channel.name, song.id, user, message
if eventname is 'killed'
otto.flush_streams(channel.name)
otto.ourlisteners.on '*', (eventname, listeners, data) ->
switch eventname
when 'update'
zappa.io.sockets.emit 'listeners', data
when 'userjoin'
otto.report_event 'joinedchannel', 'main', 0, data.user, 'joined the channel'
when 'userchange'
otto.report_event 'userchange', 'main', 0, data.user, 'changed user'
when 'userleft'
otto.report_event 'leftchannel', 'main', 0, data.user, 'left the channel'
when 'streamingstart'
otto.report_event 'startstreaming', 'main', 0, data.user, 'started streaming'
when 'streamingstop'
otto.report_event 'stopstreaming', 'main', 0, data.user, 'stopped streaming'
#eventlog = fs.createWriteStream 'static/html/events.html', 'flags': 'a', ->
# # not working:
# otto.report_event 'ottostarted', undefined, undefined, undefined, (new Date).toUTCString()
otto.report_event = (name, channelname, id, user, message) =>
event =
timestamp: new Date()
id: id
user: user
name: name
channel: channelname
message: message
if channelname
@io.sockets.in(channelname).emit 'chat', event
else
@io.sockets.emit 'chat', event
#eventlog.write otto.client.templates.event event: event
#eventlog.write '\n'
otto.db.save_event event, ->
otto.flush_streams = (channelname) =>
console.log 'flushstreams'
if channelname
@io.sockets.in(channelname).emit 'flushstream'
else
@io.sockets.emit 'flushstream'
<|start_filename|>otto.misc.coffee<|end_filename|>
fs = require 'fs'
os = require 'os'
net = require 'net'
glob = require 'glob'
connect = require 'zappajs/node_modules/express/node_modules/connect'
Session = connect.middleware.session.Session
global.otto = otto = global.otto || {}
module.exports = global.otto.misc = do -> # note the 'do' causes the function to be called
misc = {}
# more coffeescript friendly versions of setTimeout and setInterval
misc.timeoutSet = (ms, func) -> setTimeout(func, ms)
misc.intervalSet = (ms, func) -> setInterval(func, ms)
# from http://stackoverflow.com/questions/280634/endswith-in-javascript
misc.endsWith = (str, suffix) ->
return str.indexOf(suffix, str.length - suffix.length) isnt -1
misc.is_dirSync = (path) ->
try
stat = fs.lstatSync path
if stat then return stat.isDirectory() else return no
catch err
if err.code? and err.code is 'ENOENT'
return no
else
throw err
# check if a path is an existing directory or throw an exception
misc.assert_is_dirSync = (path) ->
err = null
try
stats = fs.statSync path
catch error
err = error
if err or not stats.isDirectory()
throw new Error "error: otto needs #{path} to be a directory and it is not"
# check if a path is an existing directory, create it if not, or throw an exception if it can't be created
misc.assert_is_dir_or_create_itSync = (path) ->
err = null
try
stats = fs.statSync path
catch error
err = error
if err
if err.code? and err.code is 'ENOENT'
fs.mkdirSync path
else
throw err
else
if not stats.isDirectory() then throw new Error "error: otto needs ${path} to be a directory and it is not"
# check if a socket is openable, check every 10ms until it is (or until we run out of attempts)
misc.wait_for_socket = (socket, attempts, callback) ->
testsocket = net.connect socket, ->
testsocket.destroy()
callback null
testsocket.on 'error', (err) ->
#console.log "waiting for socket #{socket}"
attempts--
if attempts > 0
misc.timeoutSet 10, ->
misc.wait_for_socket socket, attempts, callback
else
callback "gave up waiting for socket #{socket}"
# sync. read in a pid from a file (or files, it can take a glob pattern) and send that process a kill signal
misc.kill_from_pid_fileSync = (pid_file_or_glob) ->
pid_files = glob.sync pid_file_or_glob
pid_files.forEach (pid_file) ->
data = fs.readFileSync pid_file
pid = parseInt(data)
process.kill(pid)
# expand "~" in a filename to the user's home directory
misc.expand_tilde = (path) ->
homedir = process.env[if process.platform is 'win32' then 'USERPROFILE' else 'HOME']
#username = process.env['USER']
return path.replace /^[~]/, homedir
# from http://code.google.com/p/js-test-driver/source/browse/tags/1.3.2/idea-plugin/src/com/google/jstestdriver/idea/javascript/predefined/qunit/equiv.js?r=937 -jon
#
# Tests for equality any JavaScript type and structure without unexpected results.
# Discussions and reference: http://philrathe.com/articles/equiv
# Test suites: http://philrathe.com/tests/equiv
# Author: <NAME> <<EMAIL>>
##
## note: i converted this to coffeescript, but it hasn't been tested yet -jon
##
misc.equiv = ->
callers = [] # stack to decide between skip/abort functions
# Determine what is o.
hoozit = (o) ->
if typeof o is "string"
return "string"
else if typeof o is "boolean"
return "boolean"
else if typeof o is "number"
if isNoN(o) then return "nan" else return "number"
else if typeof o is "undefined"
return "undefined"
# consider: typeof null === object
else if o is null
return "null"
# consider: typeof [] === object
else if o instanceof Array
return "array"
# consider: typeof new Date() === object
else if o instanceof Date
return "date"
# consider: /./ instanceof Object;
# /./ instanceof RegExp;
# typeof /./ === "function"; # => false in IE and Opera,
# true in FF and Safari
else if o instanceof RegExp
return "regexp"
else if typeof o is "object"
return "object"
else if o instanceof Function
return "function"
# Call the o related callback with the given arguments.
bindCallbacks (o, callbacks, args) ->
prop = hoozit(o)
if prop
if hoozit(callbacks[prop]) is "function"
return callbacks[prop].apply(callbacks, args)
else
return callbacks[prop] # or undefined
callbacks = do ->
# for string, boolean, number and null
useStrictEquality = (b, a) ->
return a is b
return {
"string": useStrictEquality
"boolean": useStrictEquality
"number": useStrictEquality
"null": useStrictEquality
"undefined": useStrictEquality
"nan": (b) ->
return isNaN(b)
"date": (b, a) ->
return hoozit(b) is "date" && a.valueOf() is b.valueOf()
"regexp": (b, a) ->
return hoozit(b) is "regexp" && \
a.source is b.source && \ # the regex itself
a.global is b.global && \ # and its modifers (gmi) ...
a.ignoreCase is b.ignoreCase && \
a.multiline is b.multiline
# - skip when the property is a method of an instance (OOP)
# - abort otherwise,
# initial === would have catch identical references anyway
"function": ->
caller = callers[callers.length - 1]
return caller isnt Object && \
typeof caller isnt "undefined"
"array": (b, a) ->
# b could be an object literal here
if ! (hoozit(b) is "array")
return false
len = a.length
if len isnt b.length # safe and faster
return false
for x, i in a
if ! innerEquiv(a[i], b[i])
return false
return true
"object": (b, a) ->
eq = true # unless we can prove it
aProperties = [] # collection of strings
bProperties = []
# comparing constructors is more strict than using instanceof
if a.constructor isnt b.constructor
return false
# stack constructor before traversing properties
callers.push(a.constructor)
for i in a # be strict: don't ensures hasOwnProperty and go deep
aProperties.push(i) # collect a's properties
if ! innerEquiv(a[i], b[i])
eq = false
callers.pop() # unstack, we are done
for i in b
bProperties.push(i) # collect b's properties
# Ensures identical properties name
return eq && innerEquiv(aProperties.sort(), bProperties.sort())
}
# the real equiv function
innerEquiv = -> # can take multiple arguments
args = Array.prototype.slice.apply(arguments)
if args.length < 2
return true # end transition
return ( (a, b) ->
if a is b
return true # catch the most you can
else if typeof a isnt typeof b || a is null || b is null || typeof a is "undefined" || typeof b is "undefined"
return false # don't lose time with error prone cases
else
return bindCallbacks(a, callbacks, [b, a])
# apply transition with (1..n) arguments
)(args[0], args[1]) && arguments.callee.apply(this, args.splice(1, args.length -1))
return innerEquiv
class misc.Elapsed
constructor: ->
@start = Date.now()
@lap = @start
seconds: =>
now = Date.now()
lap = ((now-@lap)/1000).toFixed(3)
total = ((now-@start)/1000).toFixed(3)
@lap = now
return "#{lap}s (#{total}s total)"
toString: -> @seconds()
# determine the user making the request
# or fall back to ip address if no user can be determined
misc.authenticate_user = (req, res, next) ->
client_ip = req.connection.remoteAddress
if req.headers['x-forwarded-for'] and (client_ip is '127.0.0.1' or client_ip is '::1')
# might not work in all cases (e.g. locahost on 127.0.0.2)
# but we can't just blindly accept potentially injected x-forwarded-for headers
ip_list = req.headers['x-forwarded-for'].split ','
client_ip = ip_list[ip_list.length-1]
localhost = false
for devicename,infolist of os.networkInterfaces()
for ip in infolist
if client_ip == ip.address
localhost = true
break
if localhost then break
otto.sessioniplist[req.sessionID] = { client_ip: client_ip, localhost: localhost }
next()
# use the session cookie set by express to connect
# the session.io session to the express session
# from http://www.danielbaulig.de/socket-ioexpress/
# see also https://github.com/mauricemach/zappa/pull/90
# and this might be of interest:
# https://www.npmjs.org/package/session.socket.io
misc.socket_authenticate_user = (handshake, accept) ->
console.log 'io.set authorization'
# check if there's a cookie header
if handshake.headers.cookie
# if there is, parse the cookies
cookieParser = otto.zappa.express.cookieParser()
cookieParser handshake, null, (error) ->
if error
console.log 'cookie parse error:', error
accept('cookie parse error', false)
else
# note that you will need to use the same secret key to grab the
# session id as you specified in the Express setup.
sessionID = handshake.cookies['express.sid'].split(':')[1].split('.')[0]
console.log 'sessionID', sessionID
otto.sessionStore.get sessionID, (err, session) ->
if err || !session
# if we cannot grab a session, turn down the connection
console.log 'error: no session found in database during authentication - ', err
accept('no session found in database', false)
else
# save the session data and accept the connection
# note that if you throw any exceptions in this 'else' section it'll be
# caught by sessionStore.get and the 'if' section will be called above!
# first create a real express session object so we can actually change
# session data inside socketio communications
# we fake a req object relying on the connection Session constructor only
# using two fields from it (sessionID and sessionStore)
# i looked at the code and verified this for connection 2.6.0
#fake_req = {sessionID: sessionID, sessionStore: otto.sessionStore}
#handshake.session = new Session(fake_req, session)
# we don't need this now
# # oh well, couldn't quite get that working yet
# #handshake.session = session
handshake.sessionID = sessionID
console.log "socket.io thinks the sessionID is #{handshake.sessionID}"
accept(null, true)
else
# if there isn't, turn down the connection with a message
# and leave the function.
# hacked to allow app to connection via. socket.io FIXME
#return accept('no express session ID cookie transmitted', false)
accept(null, true)
misc.debug_request = (req, res, next) ->
was_image = false
do ->
# collapse all contiguous /image* urls into one debug message
if /^[/]image/.test req.url
if !was_image
#console.log 'loading /images...'
was_image = true
else
console.log req.url
was_image = false
next()
misc.dBscale = (x) ->
# logarithmic volume dB scale approximations
# http://www.dr-lex.be/info-stuff/volumecontrols.html
# http://www.360doc.com/content/09/1127/20/155970_9889546.shtml
n = Math.round( Math.pow(4, (x / (100 / 3.322))) ) # x^4
#n = Math.round( Math.pow(3, (x / (100 / 4.192))) ) # x^3
#n = Math.round( Math.pow(2, (x / (100 / 6.644))) ) # x^2
return n
return misc
<|start_filename|>otto.client.soundfx.coffee<|end_filename|>
###############
### client side (body of otto.client.soundfx.coffee served as /otto.soundfx.js)
###############
global.otto.client.soundfx = ->
window.otto.client.soundfx = do -> # note the 'do' causes the function to be called
$('head').append '<script src="static/js/buzz.js">'
soundfx = {}
event_sound_map =
'chat': 'randomize4.wav'
'joinedchannel': 'joined.wav'
'leftchannel': 'startedorstopped.wav'
'removed': 'left.wav'
'killed': 'downish.wav'
'enqueue': 'subtle.wav'
'joinedchat': 'skweek.wav'
'leftchat': 'delete.wav'
'startstreaming': no
'stopstreaming': no
'finished': no
'fxenabled': 'subtle.wav'
fx_attenuation = 0.70 # we want the sound effects to only be 70% of the music vol
sounds = {}
$ ->
for eventname, soundfile of event_sound_map
if soundfile
sounds[eventname] = new buzz.sound '/static/sounds/'+soundfile
sounds[eventname].load()
else
sounds[eventname] = no
soundfx.play = (eventname) ->
if sounds[eventname]
if otto.chat_state or eventname not in ['chat', 'joinedchat', 'leftchat']
fx = sounds[eventname]
vol = parseInt( otto.current_volume * fx_attenuation )
fx.setVolume(vol).play()
return soundfx
| ferguson/otto |
<|start_filename|>build/rollup.js<|end_filename|>
// ------------------------------------------------------------------------------------------
// setup
// ------------------------------------------------------------------------------------------
import buble from '@rollup/plugin-buble'
import commonjs from '@rollup/plugin-commonjs'
import resolve from '@rollup/plugin-node-resolve'
import path from 'path'
import rimraf from 'rimraf'
import license from 'rollup-plugin-license'
import typescript from 'rollup-plugin-typescript2'
import { uglify } from 'rollup-plugin-uglify'
const pkg = require('../package.json')
const pkgName = pkg.name
const className = pkgName.replace(/(^\w|-\w)/g, c => c.replace('-', '').toUpperCase())
function output (ext, format = 'umd') {
return {
name: className,
file: `dist/${pkgName}.${ext}`,
format: format,
exports: 'named',
globals: {
vue: 'Vue',
},
}
}
// ------------------------------------------------------------------------------------------
// build
// ------------------------------------------------------------------------------------------
const umd = {
input: 'src/index.ts',
external: [
'vue'
],
output: output('js'),
plugins: [
resolve({
extensions: ['.js', '.ts'],
}),
typescript({
cacheRoot: `build/.rpt2_cache`,
}),
license({
banner: {
content: {
file: path.join(__dirname, 'banner.txt'),
},
},
}),
commonjs(),
buble(),
],
}
const min = Object.assign({}, umd, {
output: output('min.js'),
plugins: [...umd.plugins, uglify()],
})
const es = Object.assign({}, umd, {
output: output('esm.js', 'es'),
})
rimraf.sync('dist')
export default process.env.NODE_ENV === 'production'
? [umd, es, min]
: [umd, es]
| sadortun/vue-class-store |
<|start_filename|>Makefile<|end_filename|>
.PHONY: build coverage
build:
@python setup.py build
test:
@nosetests -v
lint:
@flake8 terminal tests
coverage:
@rm -f .coverage
@nosetests --with-coverage --cover-package=torext
clean: clean-build clean-pyc
clean-build:
@rm -rf build/
@rm -rf dist/
@rm -rf *.egg-info
clean-pyc:
@find . -name '*.pyc' -exec rm -f {} +
@find . -name '*.pyo' -exec rm -f {} +
publish:
python setup.py sdist bdist_wheel upload
<|start_filename|>examples/formal_project/sampleproject/static/home.css<|end_filename|>
.box {
box-shadow: 0 0 10px #eee;
border: 1px solid #eee;
}
#brand {
width: 300px;
margin: 0 auto;
margin-top: 100px;
}
#brand p {
text-align: center;
color: #333;
}
| reorx/torext |
<|start_filename|>App.js<|end_filename|>
import React, { useState, useEffect } from 'react';
import { Image, View, Text, Dimensions } from 'react-native';
import { Grid, Col, Row } from 'react-native-easy-grid';
import { Magnetometer } from 'expo-sensors';
const { height, width } = Dimensions.get('window');
export default App = () => {
const [subscription, setSubscription] = useState(null);
const [magnetometer, setMagnetometer] = useState(0);
useEffect(() => {
_toggle();
return () => {
_unsubscribe();
};
}, []);
const _toggle = () => {
if (subscription) {
_unsubscribe();
} else {
_subscribe();
}
};
const _subscribe = () => {
setSubscription(
Magnetometer.addListener((data) => {
setMagnetometer(_angle(data));
})
);
};
const _unsubscribe = () => {
subscription && subscription.remove();
setSubscription(null);
};
const _angle = (magnetometer) => {
let angle = 0;
if (magnetometer) {
let { x, y, z } = magnetometer;
if (Math.atan2(y, x) >= 0) {
angle = Math.atan2(y, x) * (180 / Math.PI);
} else {
angle = (Math.atan2(y, x) + 2 * Math.PI) * (180 / Math.PI);
}
}
return Math.round(angle);
};
const _direction = (degree) => {
if (degree >= 22.5 && degree < 67.5) {
return 'NE';
}
else if (degree >= 67.5 && degree < 112.5) {
return 'E';
}
else if (degree >= 112.5 && degree < 157.5) {
return 'SE';
}
else if (degree >= 157.5 && degree < 202.5) {
return 'S';
}
else if (degree >= 202.5 && degree < 247.5) {
return 'SW';
}
else if (degree >= 247.5 && degree < 292.5) {
return 'W';
}
else if (degree >= 292.5 && degree < 337.5) {
return 'NW';
}
else {
return 'N';
}
};
// Match the device top with pointer 0° degree. (By default 0° starts from the right of the device.)
const _degree = (magnetometer) => {
return magnetometer - 90 >= 0 ? magnetometer - 90 : magnetometer + 271;
};
return (
<Grid style={{ backgroundColor: 'black' }}>
<Row style={{ alignItems: 'center' }} size={.9}>
<Col style={{ alignItems: 'center' }}>
<Text
style={{
color: '#fff',
fontSize: height / 26,
fontWeight: 'bold'
}}>
{_direction(_degree(magnetometer))}
</Text>
</Col>
</Row>
<Row style={{ alignItems: 'center' }} size={.1}>
<Col style={{ alignItems: 'center' }}>
<View style={{ position: 'absolute', width: width, alignItems: 'center', top: 0 }}>
<Image source={require('./assets/compass_pointer.png')} style={{
height: height / 26,
resizeMode: 'contain'
}} />
</View>
</Col>
</Row>
<Row style={{ alignItems: 'center' }} size={2}>
<Text style={{
color: '#fff',
fontSize: height / 27,
width: width,
position: 'absolute',
textAlign: 'center'
}}>
{_degree(magnetometer)}°
</Text>
<Col style={{ alignItems: 'center' }}>
<Image source={require("./assets/compass_bg.png")} style={{
height: width - 80,
justifyContent: 'center',
alignItems: 'center',
resizeMode: 'contain',
transform: [{ rotate: 360 - magnetometer + 'deg' }]
}} />
</Col>
</Row>
<Row style={{ alignItems: 'center' }} size={1}>
<Col style={{ alignItems: 'center' }}>
<Text style={{ color: '#fff' }}>Copyright @RahulHaque</Text>
</Col>
</Row>
</Grid>
);
}
| rahulhaque/compass-react-native-expo |
<|start_filename|>rollup.config.js<|end_filename|>
export default {
entry: 'dist/ng2-page-transition.js',
dest: 'dist/bundles/ng2-page-transition.umd.js',
sourceMap: false,
format: 'umd',
moduleName: 'ng2-page-transition',
globals: {
'@angular/core': 'ng.core',
'@angular/router': 'ng.router'
}
} | bergben/ng2-page-transition |
<|start_filename|>Assets/U.movin/Movin.cs<|end_filename|>
using UnityEngine;
using u.movin;
using Unity.VectorGraphics;
namespace u.movin
{
public struct MotionProps
{
public int key; // Current keyframe
public int keys; // Total keyframes
public float startFrame; // Frame current animation started
public float endFrame; // Frame current animation ends
public float percent; // Percentage to reach next key
public bool completed; // Animation complete
public Vector2 currentOutTangent; // Current keyframe out tangent
public Vector2 nextInTangent; // Next keyframe in tangent
public Vector3 startValue;
public Vector3 endValue;
}
}
public class Movin
{
public GameObject gameObject;
public GameObject container;
public Transform transform {
get { return gameObject.transform; }
}
public BodymovinContent content;
public Updater updater;
private MovinLayer[] layers;
private MovinLayer[] layersByIndex;
public float scale;
public bool playing = false;
public bool paused = false;
public float frameRate = 0;
public float totalFrames = 0;
public float time = 0; // Local time (since animation began)
public float frame = 0; // Animation frame
public bool loop;
public bool complete = false;
public float quality;
public float strokeWidth;
public int sort;
public VectorUtils.TessellationOptions options;
/* ---- BLENDING ---- */
public bool blending = false;
public BodymovinContent blendContent;
public string blendPath;
/* ---- EVENTS ---- */
public System.Action OnComplete;
public Movin(Transform parent, string path, int sort = 0, float scale = 1f, float strokeWidth = 0.5f, bool loop = true, float quality = 0.4f)
{
gameObject = new GameObject();
transform.SetParent(parent, false);
container = new GameObject();
container.transform.SetParent(transform, false);
MovinInit(path, sort, scale, strokeWidth, loop, quality);
/* ----- GET FRAME UPDATES ----- */
updater = gameObject.AddComponent<Updater>();
updater.fired += Update;
}
private void MovinInit(string path, int sort = 0, float scale = 1f, float strokeWidth = 0.5f, bool loop = true, float quality = 0.4f){
scale *= 0.1f; // Reduce default scale
gameObject.name = "body - " + path;
container.name = "container - " + path;
this.loop = loop;
this.sort = sort;
this.scale = scale;
this.strokeWidth = strokeWidth;
content = BodymovinContent.init(path);
if (content.layers == null || content.layers.Length <= 0) {
Debug.Log(">>>> NO CONTENT LAYERS, ABORT! <<<<");
return;
}
container.transform.localScale = Vector3.one * this.scale;
container.transform.localPosition -= new Vector3(content.w / 2, -(content.h / 2), 0) * scale;
frameRate = content.fr;
totalFrames = content.op;
layers = new MovinLayer[content.layers.Length];
/* ----- SHAPE OPTIONS ----- */
options = new VectorUtils.TessellationOptions() {
StepDistance = 1000.0f,
MaxCordDeviation = 0.05f,
MaxTanAngleDeviation = 0.05f,
// SamplingStepSize = 0.01f
SamplingStepSize = quality
};
/* ----- CREATE LAYERS ----- */
layersByIndex = new MovinLayer[content.highestLayerIndex + 1];
for (int i = 0; i < content.layers.Length; i++) {
MovinLayer layer = new MovinLayer(this, content.layers[i], content.layers.Length - i);
layers[i] = layer;
layersByIndex[layer.content.ind] = layers[i];
}
/* ----- SET PARENTS ----- */
for (int i = 0; i < layers.Length; i++) {
MovinLayer layer = layers[i];
int p = layer.content.parent;
if (p <= 0){ continue; }
layer.transform.SetParent(layersByIndex[p].content.shapes.Length > 0 ?
layersByIndex[p].transform.GetChild(0) : layersByIndex[p].transform, false);
}
}
private void Update()
{
if (!playing) { return; }
time += Time.deltaTime;
frame = time * frameRate;
//Debug.Log("t: " + time);
if (frame >= totalFrames)
{
Stop();
//Debug.Log("****** COMP Animation done! ******");
complete = !loop;
OnComplete?.Invoke();
if (blending){
blending = false;
UpdateLayersWithContent(blendContent, blendPath);
}
if (loop)
{
ResetKeyframes();
Play();
}
return;
}
UpdateLayers();
}
public void UpdateLayers()
{
for (int i = 0; i < layers.Length; i++) {
float f = frame - layers[i].content.startTime;
layers[i].Update(f);
}
}
private void ResetKeyframes()
{
time = 0;
for (int i = 0; i < layers.Length; i++) {
layers[i].ResetKeyframes();
}
}
/* ------ PUBLIC METHODS ------ */
public void SetColor(Color c, bool fill = true, bool stroke = false)
{
for (int i = 0; i < layers.Length; i++) {
for (int j = 0; j < layers[i].shapes.Length; j++) {
MovinShape s = layers[i].shapes[j];
if (fill)
s.UpdateFillColor(c, true);
if (stroke)
s.UpdateStrokeColor(c, true);
}
}
}
public void SetOpacity(float o)
{
for (int i = 0; i < layers.Length; i++) {
for (int j = 0; j < layers[i].shapes.Length; j++) {
MovinShape s = layers[i].shapes[j];
s.UpdateOpacity(o * 100f);
}
}
}
public void RandomFrame(bool play = false)
{
int n = Random.Range(0, (int)totalFrames);
SetFrame(n, play);
}
public void SetFrame(int n = 0, bool play = false)
{
frame = Mathf.Clamp(n, 0, totalFrames);
time = frame / frameRate;
UpdateLayers();
if (play) {
playing = true;
}
}
public Transform FindLayer(string n)
{
for (int i = 0; i < layers.Length; i++) {
if (n == layers[i].content.nm) { return layers[i].transform; }
}
return null;
}
public void Play()
{
if (complete){
complete = false;
ResetKeyframes();
}
playing = true;
paused = false;
}
public void Pause()
{
playing = false;
paused = true;
}
public void Stop()
{
playing = false;
paused = false;
}
public void Blend(string path, float duration = 30f, Vector2[] ease = null){
BodymovinContent blend = BodymovinContent.init(path);
loop = false;
totalFrames = duration;
time = 0;
frame = 0;
blending = true;
blendPath = path;
blendContent = blend;
if (ease == null){
ease = Ease.StrongOut;
}
for (int i = 0; i < layers.Length; i++) {
layers[i].CreateBlendKeyframe(blend.layers[i], duration, ease);
}
Play();
}
/* DESTROY AND REPLACE CONTENTS */
public void ClearContent(){
if (container == null){ return; }
for (int i = 0; i < container.transform.childCount; i++){
if (Application.isPlaying){
Object.Destroy(container.transform.GetChild(i).gameObject);
} else {
Object.DestroyImmediate(container.transform.GetChild(i).gameObject);
}
}
}
public void ChangeContent(string path, int sort = 0, float scale = 1f, float strokeWidth = 0.5f, bool loop = true, float quality = 0.4f){
ClearContent();
MovinInit(path, sort, scale, strokeWidth, loop, quality);
}
/* REPLACE EXISTING LAYER CONTENT WITH NEW DATA */
public void UpdateLayersWithContent(string path){
UpdateLayersWithContent(BodymovinContent.init(path), path);
}
public void UpdateLayersWithContent(BodymovinContent c, string path){
content = c;
gameObject.name = "body - " + path;
container.name = "container - " + path;
container.transform.localPosition = Vector3.zero;
container.transform.localPosition -= new Vector3(content.w / 2, -(content.h / 2), 0) * scale;
frameRate = content.fr;
totalFrames = content.op;
time = 0;
frame = 0;
for (int i = 0; i < layers.Length; i++) {
layers[i].UpdateLayersWithContent(content.layers[i]);
}
loop = true;
Play();
}
}
<|start_filename|>Assets/U.movin/Utils/Ease.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace u.movin
{
public static class Ease
{
public static Vector2[] Linear = new Vector2[2]{ new Vector2(1, 1), new Vector2(0, 0) };
public static Vector2[] StrongInOut = new Vector2[2]{ new Vector2(0.7f, 0), new Vector2(0.3f, 1) };
public static Vector2[] StrongOut = new Vector2[2]{ new Vector2(0.167f, 0.167f), new Vector2(0.3f, 1) };
public static float CubicBezier(Vector2 p0, Vector2 p1, Vector2 p2, Vector2 p3, float p)
{
float v;
if (p == p0.x)
{
v = 0;
}
else if (p == p3.x)
{
v = 1;
}
else
{
float a = -p0.x + 3 * p1.x - 3 * p2.x + p3.x;
float b = 3 * p0.x - 6 * p1.x + 3 * p2.x;
float c = -3 * p0.x + 3 * p1.x;
float d = p0.x - p;
float temp = SolveCubic(a, b, c, d);
if (temp == -1) return -1;
v = temp;
}
return Cubed(1 - v) * p0.y + 3 * v * Squared(1 - v) * p1.y + 3 * Squared(v) * (1 - v) * p2.y + Cubed(v) * p3.y;
}
public static float Cubed(float v) { return v * v * v; }
public static float Squared(float v) { return v * v; }
public static float CubicRoot(float v) { return Mathf.Pow(v, 1.0f / 3.0f); }
public static float SolveCubic(float a, float b, float c, float d)
{
if (a == 0) return SolveQuadratic(b, c, d);
if (d == 0) return 0;
b /= a;
c /= a;
d /= a;
float q = (3.0f * c - Squared(b)) / 9.0f;
float r = (-27.0f * d + b * (9.0f * c - 2.0f * Squared(b))) / 54.0f;
float disc = Cubed(q) + Squared(r);
float term1 = b / 3.0f;
if (disc > 0)
{
float s = r + Mathf.Sqrt(disc);
s = (s < 0) ? -CubicRoot(-s) : CubicRoot(s);
float t = r - Mathf.Sqrt(disc);
t = (t < 0) ? -CubicRoot(-t) : CubicRoot(t);
float result = -term1 + s + t;
if (result >= 0 && result <= 1) return result;
}
else if (disc == 0)
{
float r13 = (r < 0) ? -CubicRoot(-r) : CubicRoot(r);
float result = -term1 + 2.0f * r13;
if (result >= 0 && result <= 1) return result;
result = -(r13 + term1);
if (result >= 0 && result <= 1) return result;
}
else
{
q = -q;
float dum1 = q * q * q;
dum1 = Mathf.Acos(r / Mathf.Sqrt(dum1));
float r13 = 2.0f * Mathf.Sqrt(q);
float result = -term1 + r13 * Mathf.Cos(dum1 / 3.0f);
if (result >= 0 && result <= 1) return result;
result = -term1 + r13 * Mathf.Cos((dum1 + 2.0f * Mathf.PI) / 3.0f);
if (result >= 0 && result <= 1) return result;
result = -term1 + r13 * Mathf.Cos((dum1 + 4.0f * Mathf.PI) / 3.0f);
if (result >= 0 && result <= 1) return result;
}
return -1;
}
public static float SolveQuadratic(float a, float b, float c)
{
float result = (-b + Mathf.Sqrt(Squared(b) - 4 * a * c)) / (2 * a);
if (result >= 0 && result <= 1) return result;
result = (-b - Mathf.Sqrt(Squared(b) - 4 * a * c)) / (2 * a);
if (result >= 0 && result <= 1) return result;
return -1;
}
}
}
<|start_filename|>Assets/U.movin/MovinShapeSlave.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Unity.VectorGraphics;
namespace u.movin
{
public class MovinShapeSlave : MovinShape
{
public MovinShape master;
public BodymovinShapePath path;
public MovinShapeSlave(MovinShape master, BodymovinShapePath path, float strokeWidth = 1f)
{
this.master = master;
this.path = path;
Transform parent = master.transform.parent;
/* SHAPE PROPS */
points = (BodyPoint[])path.points.Clone();
motionSet = path.animSets;
closed = path.closed;
/* ANIM SETUP */
MotionSetup(ref animated, ref motion, motionSet);
/* GAMEOBJECT */
gameObject = new GameObject(master.content.item.ty + " pts: " + points.Length + " closed: " + closed);
transform.SetParent(parent, false);
transform.localPosition = master.transform.localPosition;
mesh = new Mesh();
filter = gameObject.AddComponent<MeshFilter>();
filter.mesh = mesh;
renderer = gameObject.AddComponent<MeshRenderer>();
renderer.material = master.renderer.material;
sorting = gameObject.AddComponent<UnityEngine.Rendering.SortingGroup>();
sorting.sortingOrder = master.sorting.sortingOrder;
/* SETUP VECTOR */
fill = master.content.fillHidden || master.content.fillColor == null ? null : new SolidFill() { Color = master.fill.Color };
stroke = master.content.strokeHidden || master.content.strokeColor == null ? null : new Stroke() { Color = master.stroke.Color, HalfThickness = master.content.strokeWidth * strokeWidth };
props = new PathProperties() { Stroke = stroke };
shape = new Shape() {
Fill = fill,
PathProps = props,
FillTransform = Matrix2D.identity
};
options = master.options;
scene = new Scene() {
Root = new SceneNode() { Shapes = new List<Shape> { shape } }
};
UpdateMesh();
}
}
}
<|start_filename|>Assets/U.movin/MovinRenderer.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[ExecuteInEditMode]
public class MovinRenderer : MonoBehaviour
{
private Movin mov;
bool shouldUpdate = false;
[SerializeField]
string resourcePath = "json/";
[SerializeField]
float scale = 0.1f;
[SerializeField]
int sortingLayer = 0;
[SerializeField]
float strokeWidth = 0.5f;
[SerializeField]
bool loop = true;
[Range(0.01f, 1)]
public float quality = 0.1f; // Lower is better quality (more vertices)
void Start () {
RenderMovin();
}
void ClearChildren(){
for (int i = 0; i < transform.childCount; i++){
Object.DestroyImmediate(transform.GetChild(i).gameObject);
}
mov = null;
}
void RenderMovin() {
ClearChildren();
mov = new Movin(transform, resourcePath, sortingLayer, scale, strokeWidth, loop, quality);
mov.Play();
}
void OnValidate() {
shouldUpdate = true;
}
void Update(){
if (shouldUpdate){
RenderMovin();
shouldUpdate = false;
}
}
}
<|start_filename|>Assets/Scripts/Blending.cs<|end_filename|>
using UnityEngine;
public class Blending : MonoBehaviour {
Movin s;
int n = 0;
string str = "";
void Start () {
s = new Movin(transform, "json/circle", quality: 0.1f);
s.Play();
}
void Update(){
if (Input.GetMouseButtonDown(0)){
if (n == 1) str = "square";
if (n == 2) str = "circle";
if (n == 0) str = "triangle";
s.Blend("json/" + str, 40f);
n = n >= 2 ? 0 : n + 1;
}
}
}
<|start_filename|>Assets/U.movin/Bodybuilder/BodymovinContent.cs<|end_filename|>
using UnityEngine;
using SimpleJSON;
namespace u.movin
{
public struct BodymovinContent
{
public string v;
public float fr;
public float ip;
public float op;
public int w;
public int h;
public string nm;
public BodymovinLayer[] layers;
public int highestLayerIndex;
public static BodymovinContent init(string jsonPath)
{
if (Resources.Load<TextAsset>(jsonPath) == null){
Debug.Log(">>>> JSON NOT FOUND <<<<");
return new BodymovinContent(){};
}
string json = Resources.Load<TextAsset>(jsonPath).text;
JSONNode data = JSON.Parse(json);
BodymovinContent content = new BodymovinContent
{
nm = data["nm"],
v = data["v"],
fr = data["fr"],
ip = data["ip"],
op = data["op"],
w = data["w"],
h = data["h"]
};
content.w = Mathf.FloorToInt(content.w);
content.h = Mathf.FloorToInt(content.h);
ParseLayers(ref content, data);
return content;
}
public static void ParseLayers(ref BodymovinContent b, JSONNode n)
{
int assetLayers = 0;
int highestIndex = 0;
if (n["assets"].Count > 0){
for (int q = 0; q < n["layers"].Count; q++) {
JSONNode d = n["layers"][q];
string r = d["refId"];
if (r != null){
for (int s = 0; s < n["assets"].Count; s++) {
JSONNode a = n["assets"][s];
if (r == a["id"]){
assetLayers += a["layers"].Count;
break;
}
}
}
}
}
int j = 0;
b.layers = new BodymovinLayer[n["layers"].Count + assetLayers];
for (int q = 0; q < n["layers"].Count; q++) {
JSONNode d = n["layers"][q];
BodymovinLayer layer = ParseLayer(d);
highestIndex = layer.ind > highestIndex ? layer.ind : highestIndex;
b.layers[j] = layer;
if (layer.refId != null){
for (int c = 0; c < n["assets"].Count; c++) {
JSONNode a = n["assets"][c];
if (a["id"] == layer.refId){
for (int z = 0; z < a["layers"].Count; z++) {
JSONNode e = a["layers"][z];
j++;
BodymovinLayer i = ParseLayer(e);
i.id = a["id"];
i.ind += b.layers.Length + j;
highestIndex = i.ind > highestIndex ? i.ind : highestIndex;
i.startTime = layer.startTime;
if (i.parent > 0){
i.parent += b.layers.Length + j + 1;
} else {
i.parent = layer.ind;
i.positionOffset = -layer.anchorPoint;
}
b.layers[j] = i;
}
break;
}
}
}
j++;
}
b.highestLayerIndex = highestIndex;
}
public static BodymovinLayer ParseLayer(JSONNode d){
BodymovinLayer i = new BodymovinLayer
{
nm = d["nm"],
inFrame = d["ip"],
outFrame = d["op"],
blendMode = d["bm"],
refId = d["refId"],
startTime = d["st"]
};
ParseShapes(ref i, d);
return i;
}
public static void ParseShapes(ref BodymovinLayer b, JSONNode n)
{
int j = 0;
b.nm = n["nm"];
b.parent = n["parent"];
b.ind = n["ind"];
b.shapes = new BodymovinShape[n["shapes"].Count];
b.anchorPoint = new Vector3(n["ks"]["a"]["k"][0].AsFloat, -n["ks"]["a"]["k"][1], n["ks"]["a"]["k"][2]);
b.position = new Vector3(n["ks"]["p"]["k"][0].AsFloat, -n["ks"]["p"]["k"][1], n["ks"]["p"]["k"][2]);
b.rotationEuler = new Vector3(-n["ks"]["rx"]["k"].AsFloat, n["ks"]["ry"]["k"].AsFloat, -n["ks"]["rz"]["k"].AsFloat);
b.rotationXSets = new BodymovinAnimatedProperties[n["ks"]["rx"]["k"].Count];
b.rotationYSets = new BodymovinAnimatedProperties[n["ks"]["ry"]["k"].Count];
b.rotationZSets = new BodymovinAnimatedProperties[n["ks"]["rz"]["k"].Count];
b.scale = new Vector3(n["ks"]["s"]["k"][0].AsFloat * 0.01f, n["ks"]["s"]["k"][1] * 0.01f, n["ks"]["s"]["k"][2] * 0.01f);
b.opacity = n["ks"]["o"]["k"].AsFloat;
b.opacitySets = new BodymovinAnimatedProperties[n["ks"]["o"]["k"].Count];
int positionAnimated = n["ks"]["p"]["a"].AsInt;
b.positionSets = new BodymovinAnimatedProperties[positionAnimated == 1 ? n["ks"]["p"]["k"].Count : 0];
int scaleAnimated = n["ks"]["s"]["a"].AsInt;
b.scaleSets = new BodymovinAnimatedProperties[scaleAnimated == 1 ? n["ks"]["s"]["k"].Count : 0];
int anchorAnimated = n["ks"]["a"]["a"].AsInt;
b.anchorSets = new BodymovinAnimatedProperties[anchorAnimated == 1 ? n["ks"]["a"]["k"].Count : 0];
// 2D Rotation
if (b.rotationEuler == Vector3.zero){
b.rotationEuler = new Vector3(0, 0, -n["ks"]["r"]["k"].AsFloat);
}
int rotation2DAnimated = n["ks"]["r"]["a"].AsInt;
if (rotation2DAnimated > 0){
b.rotationZSets = new BodymovinAnimatedProperties[n["ks"]["r"]["k"].Count];
}
// Animated opacity
if (b.opacitySets.Length > 0)
{
for (int i = 0; i < n["ks"]["o"]["k"].Count; i++)
{
JSONNode k = n["ks"]["o"]["k"][i];
b.opacitySets[i] = new BodymovinAnimatedProperties
{
t = k["t"],
i = new Vector2(k["i"]["x"][0].AsFloat, k["i"]["y"][0].AsFloat),
o = new Vector2(k["o"]["x"][0].AsFloat, k["o"]["y"][0].AsFloat),
sf = k["s"][0].AsFloat,
ef = k["e"][0].AsFloat
};
//Debug.Log(i + " - " + b.rotationXSets[i].i + " " + b.rotationXSets[i].o + " " + b.rotationXSets[i].sf + " " + b.rotationXSets[i].ef + " " + b.rotationXSets[i].t);
}
}
// Rotation X
if (b.rotationXSets.Length > 0)
{
for (int i = 0; i < n["ks"]["rx"]["k"].Count; i++)
{
JSONNode k = n["ks"]["rx"]["k"][i];
b.rotationXSets[i] = new BodymovinAnimatedProperties
{
t = k["t"],
i = new Vector2(k["i"]["x"][0].AsFloat, k["i"]["y"][0].AsFloat),
o = new Vector2(k["o"]["x"][0].AsFloat, k["o"]["y"][0].AsFloat),
sf = -k["s"][0].AsFloat,
ef = -k["e"][0].AsFloat
};
//Debug.Log(i + " - " + b.rotationXSets[i].i + " " + b.rotationXSets[i].o + " " + b.rotationXSets[i].sf + " " + b.rotationXSets[i].ef + " " + b.rotationXSets[i].t);
}
b.rotationEuler.x = b.rotationXSets[0].sf;
}
// Rotation Y
if (b.rotationYSets.Length > 0)
{
for (int i = 0; i < n["ks"]["ry"]["k"].Count; i++)
{
JSONNode k = n["ks"]["ry"]["k"][i];
b.rotationYSets[i] = new BodymovinAnimatedProperties
{
t = k["t"],
i = new Vector2(k["i"]["x"][0].AsFloat, k["i"]["y"][0].AsFloat),
o = new Vector2(k["o"]["x"][0].AsFloat, k["o"]["y"][0].AsFloat),
sf = k["s"][0].AsFloat,
ef = k["e"][0].AsFloat
};
//Debug.Log(i + " - " + b.rotationYSets[i].i + " " + b.rotationYSets[i].o + " " + b.rotationYSets[i].sf + " " + b.rotationYSets[i].ef + " " + b.rotationYSets[i].t);
}
b.rotationEuler.y = b.rotationYSets[0].sf;
}
// Rotation Z
if (b.rotationZSets.Length > 0)
{
string r = rotation2DAnimated > 0 ? "r" : "rz";
for (int i = 0; i < n["ks"][r]["k"].Count; i++)
{
JSONNode k = n["ks"][r]["k"][i];
b.rotationZSets[i] = new BodymovinAnimatedProperties
{
t = k["t"],
i = new Vector2(k["i"]["x"][0].AsFloat, k["i"]["y"][0].AsFloat),
o = new Vector2(k["o"]["x"][0].AsFloat, k["o"]["y"][0].AsFloat),
sf = -k["s"][0].AsFloat,
ef = -k["e"][0].AsFloat
};
//Debug.Log(i + " - " + b.rotationZSets[i].i + " " + b.rotationZSets[i].o + " " + b.rotationZSets[i].sf + " " + b.rotationZSets[i].ef + " " + b.rotationZSets[i].t);
}
b.rotationEuler.z = b.rotationZSets[0].sf;
}
b.rotation = Quaternion.Euler(b.rotationEuler);
// Scale
if (b.scaleSets.Length > 0)
{
for (int i = 0; i < n["ks"]["s"]["k"].Count; i++)
{
JSONNode k = n["ks"]["s"]["k"][i];
b.scaleSets[i] = new BodymovinAnimatedProperties
{
t = k["t"],
ix = k["i"]["x"],
iy = k["i"]["y"],
ox = k["o"]["x"],
oy = k["o"]["y"],
s = k["s"],
e = k["e"]
};
b.scaleSets[i].s *= 0.01f;
b.scaleSets[i].e *= 0.01f;
//Debug.Log(i + " scale - " + b.scaleSets[i].ix + " " + b.scaleSets[i].ox + " " + b.scaleSets[i].s + " " + b.scaleSets[i].e + " " + b.scaleSets[i].t);
}
b.scale = b.scaleSets[0].s;
}
// Position
if (b.positionSets.Length > 0)
{
for (int i = 0; i < n["ks"]["p"]["k"].Count; i++)
{
JSONNode k = n["ks"]["p"]["k"][i];
b.positionSets[i] = new BodymovinAnimatedProperties
{
t = k["t"],
i = k["i"],
o = k["o"],
to = k["to"],
ti = k["ti"],
s = k["s"],
e = k["e"]
};
b.positionSets[i].s.y = -b.positionSets[i].s.y;
b.positionSets[i].e.y = -b.positionSets[i].e.y;
//Debug.Log(i + " - " + b.positionSets[i].i + " " + b.positionSets[i].o + " " + b.positionSets[i].s + " " + b.positionSets[i].e + " " + b.positionSets[i].t);
}
b.position = b.positionSets[0].s;
}
// Anchor point
if (b.anchorSets.Length > 0)
{
for (int i = 0; i < n["ks"]["a"]["k"].Count; i++)
{
JSONNode k = n["ks"]["a"]["k"][i];
b.anchorSets[i] = new BodymovinAnimatedProperties
{
t = k["t"],
i = k["i"],
o = k["o"],
to = k["to"],
ti = k["ti"],
s = k["s"],
e = k["e"]
};
b.anchorSets[i].s.y = -b.anchorSets[i].s.y;
b.anchorSets[i].e.y = -b.anchorSets[i].e.y;
}
b.anchorPoint = b.anchorSets[0].s;
}
// Items
for (int i = 0; i < n["shapes"].Count; i++) {
JSONNode d = n["shapes"][i];
BodymovinShape s = new BodymovinShape { ty = d["ty"] };
ParseItems(ref s, d);
b.shapes[j] = s;
j++;
}
}
public static void ParseItems(ref BodymovinShape b, JSONNode n)
{
int j = 0;
b.it = new BodymovinShapeItem[n["it"].Count];
/* ----- CAPTURE MULTIPLE PATHS ----- */
int pathCount = 0;
for (int i = 0; i < n["it"].Count; i++) {
JSONNode d = n["it"][i];
if (d["ty"] == "sh") { pathCount += 1; }
}
b.paths = new BodymovinShapePath[pathCount];
pathCount = 0;
/* --------- */
for (int m = 0; m < n["it"].Count; m++) {
JSONNode d = n["it"][m];
BodymovinShapeItem i = new BodymovinShapeItem
{
ty = d["ty"],
nm = d["nm"],
mn = d["mn"],
ix = d["ix"],
hd = d["hd"],
c = new float[] { d["c"]["k"][0], d["c"]["k"][1], d["c"]["k"][2], d["c"]["k"][3] },
w = d["w"]["k"],
ks = new BodymovinShapeVertices
{
a = d["ks"]["a"],
ix = d["ks"]["ix"],
ksets = new BodymovinAnimatedShapeProperties[d["ks"]["k"].Count],
k = new BodymovinShapeProperties
{
c = d["ks"]["k"]["c"],
i = new Vector2[d["ks"]["k"]["i"].Count],
o = new Vector2[d["ks"]["k"]["o"].Count],
v = new Vector2[d["ks"]["k"]["v"].Count],
}
},
path = new BodymovinShapePath { }
};
/* COLORS */
int colorAnimated = d["c"]["a"].AsInt;
if (colorAnimated == 1)
{
i.cSets = new BodymovinAnimatedProperties[d["c"]["k"].Count];
for (int c = 0; c < d["c"]["k"].Count; c++)
{
JSONNode k = d["c"]["k"][c];
i.cSets[c] = new BodymovinAnimatedProperties
{
t = k["t"],
//i = new Vector2(k["i"]["x"][0].AsFloat, k["i"]["y"][0].AsFloat),
//o = new Vector2(k["o"]["x"][0].AsFloat, k["o"]["y"][0].AsFloat),
// Clamp tangents? - FIX
i = new Vector2(Mathf.Clamp(k["i"]["x"][0].AsFloat, -1, 1), Mathf.Clamp(k["i"]["y"][0].AsFloat, -1, 1)),
o = new Vector2(Mathf.Clamp(k["o"]["x"][0].AsFloat, -1, 1), Mathf.Clamp(k["o"]["x"][0].AsFloat, -1, 1)),
s = new Vector3(k["s"][0].AsFloat, k["s"][1].AsFloat, k["s"][2].AsFloat),
e = new Vector3(k["e"][0].AsFloat, k["e"][1].AsFloat, k["e"][2].AsFloat)
};
//Debug.Log("s: " + i.cSets[c].s);
}
}
/* VERTS */
i.ks.pts = new BodyPoint[d["ks"]["k"]["v"].Count];
for (int c = 0; c < d["ks"]["k"]["v"].Count; c++)
{
JSONNode ni = d["ks"]["k"]["i"][c];
JSONNode no = d["ks"]["k"]["o"][c];
JSONNode nv = d["ks"]["k"]["v"][c];
i.ks.k.i[c] = new Vector2(ni[0].AsFloat, ni[1].AsFloat);
i.ks.k.o[c] = new Vector2(no[0].AsFloat, no[1].AsFloat);
i.ks.k.v[c] = new Vector2(nv[0].AsFloat, nv[1].AsFloat);
i.ks.pts[c] = new BodyPoint(i.ks.k.v[c], i.ks.k.i[c], i.ks.k.o[c]);
}
if (i.ks.pts.Length > 0)
{
i.path.points = i.ks.pts;
//Debug.Log("path verts: " + i.path.points);
}
/* ANIMATED VERT SETS */
if (i.path.points == null)
{
i.path.animSets = new BodymovinAnimatedShapeProperties[d["ks"]["k"].Count];
for (int s = 0; s < d["ks"]["k"].Count; s++)
{
JSONNode k = d["ks"]["k"][s];
BodymovinAnimatedShapeProperties kset = new BodymovinAnimatedShapeProperties
{
t = k["t"],
i = k["i"],
o = k["o"],
s = new BodymovinShapeProperties
{
c = k["s"][0]["c"],
i = new Vector2[k["s"][0]["i"].Count],
o = new Vector2[k["s"][0]["o"].Count],
v = new Vector2[k["s"][0]["v"].Count],
},
e = new BodymovinShapeProperties
{
c = k["e"][0]["c"],
i = new Vector2[k["e"][0]["i"].Count],
o = new Vector2[k["e"][0]["o"].Count],
v = new Vector2[k["e"][0]["v"].Count],
},
pts = new BodyPoint[2][]
};
i.path.animSets[s] = kset;
i.path.animSets[s].pts[0] = new BodyPoint[k["s"][0]["v"].Count];
i.path.animSets[s].pts[1] = new BodyPoint[k["e"][0]["v"].Count];
//Debug.Log("set - " + kset.t + " i - " + kset.i.ToString("F4") + " o - " + kset.o.ToString("F4"));
if (kset.s.v.Length > 0)
{
for (int c = 0; c < k["s"][0]["v"].Count; c++)
{
/* START SET */
JSONNode ni = k["s"][0]["i"][c];
JSONNode no = k["s"][0]["o"][c];
JSONNode nv = k["s"][0]["v"][c];
kset.s.i[c] = new Vector2(ni[0].AsFloat, ni[1].AsFloat);
kset.s.o[c] = new Vector2(no[0].AsFloat, no[1].AsFloat);
kset.s.v[c] = new Vector2(nv[0].AsFloat, nv[1].AsFloat);
/* END SET */
ni = k["e"][0]["i"][c];
no = k["e"][0]["o"][c];
nv = k["e"][0]["v"][c];
kset.e.i[c] = new Vector2(ni[0].AsFloat, ni[1].AsFloat);
kset.e.o[c] = new Vector2(no[0].AsFloat, no[1].AsFloat);
kset.e.v[c] = new Vector2(nv[0].AsFloat, nv[1].AsFloat);
/* BOTH PTS */
kset.pts[0][c] = new BodyPoint(kset.s.v[c], kset.s.i[c], kset.s.o[c]);
kset.pts[1][c] = new BodyPoint(kset.e.v[c], kset.e.i[c], kset.e.o[c]);
}
}
i.ks.ksets[s] = kset;
}
if (i.path.animSets.Length > 0)
{
i.path.points = i.path.animSets[0].pts[0];
}
}
b.it[j] = i;
if (i.ty == "st")
{
b.strokeColorSets = i.cSets != null && i.cSets.Length > 0 ? i.cSets : new BodymovinAnimatedProperties[0];
b.strokeColor = i.cSets != null && i.cSets.Length > 0 ? new float[]{ i.cSets[0].s[0], i.cSets[0].s[1], i.cSets[0].s[2] } : i.c;
b.strokeHidden = i.hd;
b.strokeWidth = i.w;
}
if (i.ty == "fl")
{
b.fillColorSets = i.cSets != null && i.cSets.Length > 0 ? i.cSets : new BodymovinAnimatedProperties[0];
b.fillColor = i.cSets != null && i.cSets.Length > 0 ? new float[]{ i.cSets[0].s[0], i.cSets[0].s[1], i.cSets[0].s[2] } : i.c;
b.fillHidden = i.hd;
}
if (i.ty == "sh")
{
b.item = i;
i.path.closed = i.path.animSets == null ? i.ks.k.c : i.ks.ksets[0].s.c;
b.paths[pathCount] = i.path;
pathCount += 1;
//Debug.Log("paths shape: " + pathCount);
//Debug.Log("paths shape pts: " + i.path.points.Length);
//Debug.Log("path: " + pathCount);
}
j++;
}
}
}
public struct BodymovinLayer
{
public string id;
public string refId;
public float startTime;
public string nm;
public BodymovinShape[] shapes;
public int parent;
public int ind;
public Vector3 positionOffset;
public Vector3 anchorPoint;
public BodymovinAnimatedProperties[] anchorSets;
public Vector3 position;
public BodymovinAnimatedProperties[] positionSets;
public Vector3 scale;
public BodymovinAnimatedProperties[] scaleSets;
public float opacity;
public BodymovinAnimatedProperties[] opacitySets;
public Vector3 rotationEuler;
public Quaternion rotation;
public BodymovinAnimatedProperties[] rotationXSets;
public BodymovinAnimatedProperties[] rotationYSets;
public BodymovinAnimatedProperties[] rotationZSets;
public float inFrame;
public float outFrame;
public int blendMode;
}
public struct BodymovinShape
{
public string ty;
public BodymovinShapeItem[] it;
public BodymovinShapeItem item;
public float[] strokeColor;
public BodymovinAnimatedProperties[] strokeColorSets;
public float[] fillColor;
public BodymovinAnimatedProperties[] fillColorSets;
public bool strokeHidden;
public float strokeWidth;
public bool fillHidden;
public BodymovinShapePath[] paths;
}
public struct BodymovinShapeItem
{
public string ty;
public string nm;
public string mn;
public float[] c;
public BodymovinAnimatedProperties[] cSets;
public float w;
public bool hd;
public int ix;
public BodymovinShapeVertices ks;
public BodymovinShapePath path;
}
public struct BodymovinShapePath
{
public bool closed;
public BodyPoint[] points;
public BodymovinAnimatedShapeProperties[] animSets;
}
public struct BodymovinShapeVertices
{
public int a;
public int ix;
public BodymovinShapeProperties k;
public BodymovinAnimatedShapeProperties[] ksets;
// Simplify point aggregation
public BodyPoint[] pts;
}
public struct BodymovinShapeProperties
{
public Vector2[] i;
public Vector2[] o;
public Vector2[] v;
public bool c;
}
public struct BodymovinAnimatedShapeProperties
{
public float t;
public Vector2 i;
public Vector2 o;
public BodymovinShapeProperties s;
public BodymovinShapeProperties e;
// Simplify point aggregation, start + end sets
public BodyPoint[][] pts;
}
public struct BodymovinAnimatedProperties
{
public float t;
public Vector3 ix;
public Vector3 iy;
public Vector3 ox;
public Vector3 oy;
public Vector2 i;
public Vector2 o;
public Vector3 ti;
public Vector3 to;
public Vector3 s;
public Vector3 e;
public float sf;
public float ef;
}
// Custom structures
public struct BodyPoint
{
public Vector2 i;
public Vector2 o;
public Vector2 p;
public BodyPoint(Vector2 point, Vector2 inPoint, Vector2 outPoint)
{
i = inPoint;
o = outPoint;
p = point;
}
}
}
<|start_filename|>Assets/U.movin/MovinShape.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Unity.VectorGraphics;
using UnityEngine.Rendering;
namespace u.movin
{
public class MovinShape
{
public GameObject gameObject;
public Transform transform
{
get { return gameObject.transform; }
}
public MovinShapeSlave[] slaves;
public BodymovinShape content;
public Shape shape;
public Scene scene;
public Mesh mesh;
public MeshFilter filter;
public MeshRenderer renderer;
public List<VectorUtils.Geometry> geoms;
public VectorUtils.TessellationOptions options;
public SortingGroup sorting;
public BodyPoint[] points;
public BodyPoint[] startPoints;
public BodyPoint[] endPoints;
public Movin movin;
public MovinLayer layer;
public BezierPathSegment[] segments;
public bool closed;
public PathProperties props;
public SolidFill fill;
public Stroke stroke;
public bool animated = false;
public bool strokeColorAnimated = false;
public bool fillColorAnimated = false;
public MotionProps motion;
public MotionProps mstrokec;
public MotionProps mfillc;
public Vector3 currentStrokeColor;
public Vector3 currentFillColor;
public BodymovinAnimatedShapeProperties[] motionSet;
public MovinShape() { }
public MovinShape(MovinLayer layer, BodymovinShape content)
{
this.content = content;
if (content.paths == null || content.paths.Length < 1) { Debug.Log("DON'T DRAW SHAPE -> NO PTS"); return; }
this.layer = layer;
this.movin = layer.movin;
Transform parent = layer.transform;
/* FIRST SHAPE PROPS */
points = (BodyPoint[])content.paths[0].points.Clone();
motionSet = content.paths[0].animSets;
closed = content.paths[0].closed;
/* ANIM SETUP */
MotionSetup(ref animated, ref motion, motionSet);
MotionSetup(ref strokeColorAnimated, ref mstrokec, content.strokeColorSets);
MotionSetup(ref fillColorAnimated, ref mfillc, content.fillColorSets);
/* GAMEOBJECT, MESH, MATERIAL */
gameObject = new GameObject(content.item.ty + " pts: " + points.Length + " closed: " + closed);
transform.SetParent(parent, false);
transform.localPosition = -layer.content.anchorPoint;
mesh = new Mesh();
filter = gameObject.AddComponent<MeshFilter>();
filter.mesh = mesh;
renderer = gameObject.AddComponent<MeshRenderer>();
renderer.material = new Material(Shader.Find("Sprites/Default"));
//renderer.material = new Material(Shader.Find("Unlit/Vector"));
sorting = gameObject.AddComponent<SortingGroup>();
sorting.sortingOrder = movin.sort + layer.sort;
/* SETUP VECTOR */
Color stClr = (content.strokeColor == null) ? new Color(1, 1, 1) : new Color(content.strokeColor[0], content.strokeColor[1], content.strokeColor[2]);
Color flClr = (content.fillColor == null) ? new Color(1, 1, 1) : new Color(content.fillColor[0], content.fillColor[1], content.fillColor[2]);
currentStrokeColor = new Vector3(stClr.r, stClr.g, stClr.b);
currentFillColor = new Vector3(flClr.r, flClr.g, flClr.b);
fill = content.fillHidden || content.fillColor == null ? null : new SolidFill() { Color = flClr };
stroke = content.strokeHidden || content.strokeColor == null ? null : new Stroke() { Color = stClr, HalfThickness = content.strokeWidth * movin.strokeWidth };
props = new PathProperties() { Stroke = stroke };
shape = new Shape() {
Fill = fill,
PathProps = props,
FillTransform = Matrix2D.identity
};
options = movin.options;
scene = new Scene() {
Root = new SceneNode() { Shapes = new List<Shape> { shape } }
};
UpdateMesh();
// ADDITIONAL SHAPE PATHS
slaves = new MovinShapeSlave[content.paths.Length - 1];
for (int i = 1; i <= slaves.Length; i++) {
slaves[i - 1] = new MovinShapeSlave(this, content.paths[i], movin.strokeWidth);
}
}
public void UpdateSegments(BodyPoint[] pts, ref BezierPathSegment[] segs)
{
float y = -1f;
for (int i = 0; i < pts.Length; i++)
{
BodyPoint point = pts[i];
// Next point...
bool last = i >= pts.Length - 1;
BodyPoint nextPoint = last ? pts[0] : pts[i + 1];
// UPDATE segment
segs[i].P0.x = point.p.x;
segs[i].P0.y = point.p.y * y;
segs[i].P1.x = point.p.x + point.o.x;
segs[i].P1.y = (point.p.y + point.o.y) * y;
segs[i].P2.x = nextPoint.p.x + nextPoint.i.x;
segs[i].P2.y = (nextPoint.p.y + nextPoint.i.y) * y;
}
int l = segs.Length - 1;
segs[l].P0.x = pts[0].p.x;
segs[l].P0.y = pts[0].p.y * y;
segs[l].P1.x = segs[l].P1.y = segs[l].P2.x = segs[l].P2.y = 0;
}
public BezierPathSegment[] ConvertPointsToSegments(BodyPoint[] pts)
{
float y = -1f;
int cnt = pts.Length + (closed ? 1 : 0);
BezierPathSegment[] segs = new BezierPathSegment[cnt];
int i = 0;
for (int j = 0; j < pts.Length; j++) {
BodyPoint point = pts[j];
// Next point...
bool last = i >= pts.Length - 1;
BodyPoint nextPoint = last ? pts[0] : pts[i + 1];
// Make segment
BezierPathSegment s = new BezierPathSegment()
{
P0 = new Vector2(point.p.x, point.p.y * y),
P1 = new Vector2((point.p.x + point.o.x), (point.p.y + point.o.y) * y),
P2 = new Vector2((nextPoint.p.x + nextPoint.i.x), (nextPoint.p.y + nextPoint.i.y) * y)
};
segs[i] = s;
i += 1;
}
if (pts.Length > 0 && i == cnt - 1)
{
BezierPathSegment final = new BezierPathSegment()
{
P0 = new Vector2(pts[0].p.x, pts[0].p.y * y)
};
segs[i] = final;
}
/* READOUT */
//foreach (BezierPathSegment s in segs)
//{
// Debug.Log("P0: " + s.P0 + " P1: " + s.P1 + " P2: " + s.P2);
//}
return segs;
}
public void Update(float frame)
{
/* ----- ANIM PROPS ----- */
if (animated && !motion.completed) {
UpdateProperty(frame, ref motion, motionSet);
}
if (strokeColorAnimated && !mstrokec.completed) {
UpdateProperty(frame, ref mstrokec, content.strokeColorSets);
}
if (fillColorAnimated && !mfillc.completed) {
UpdateProperty(frame, ref mfillc, content.fillColorSets);
}
if ((animated && !motion.completed) || (strokeColorAnimated && !mstrokec.completed) || (fillColorAnimated && !mfillc.completed)) {
FillMesh();
// geoms = VectorUtils.TessellateScene(scene, options);
// VectorUtils.FillMesh(mesh, geoms, 1.0f);
}
if (slaves == null) { return; }
for (int i = 0; i < slaves.Length; i++) {
slaves[i].Update(frame);
}
}
public void UpdateOpacity(float opacity)
{
if (!Application.isPlaying) { return; }
if (renderer == null) return;
Color c = renderer.material.color;
c.a = opacity * 0.01f;
renderer.material.color = c;
}
public void UpdateProperty(float frame, ref MotionProps m, BodymovinAnimatedShapeProperties[] set)
{
/* ----- CHECK FOR COMPLETE ----- */
if (m.keys <= 0)
{
//Debug.Log(">>> NO PROP KEYS TO ANIMATE!");
m.completed = true;
return;
}
if (frame >= m.endFrame)
{
if (set == null || m.key + 1 == set.Length - 1)
{
m.completed = true;
//Debug.Log("****** Prop Animation done! ******");
return;
}
SetKeyframe(ref m, set, m.key + 1);
}
/* ----- PERCENT KEYFRAME COMPLETE ----- */
m.percent = (frame - m.startFrame) / (m.endFrame - m.startFrame);
/* ----- CUBIC BEZIER EASE ----- */
float ease = Ease.CubicBezier(Vector2.zero, m.currentOutTangent, m.nextInTangent, Vector2.one, m.percent);
/* ----- UPDATE POINTS ----- */
for (int i = 0; i < points.Length; i++)
{
if (m.percent < 0)
{
// BACK TO START OF KEYFRAME
points[i].p = startPoints[i].p;
points[i].i = startPoints[i].i;
points[i].o = startPoints[i].o;
} else
{
points[i].p = startPoints[i].p + ((endPoints[i].p - startPoints[i].p) * ease);
points[i].i = startPoints[i].i + ((endPoints[i].i - startPoints[i].i) * ease);
points[i].o = startPoints[i].o + ((endPoints[i].o - startPoints[i].o) * ease);
}
}
// Debug.Log("Shape anim fr: " + frame + " m.percent: " + m.percent);
/* ----- UPDATE MESH ----- */
UpdateMesh(false);
}
public void UpdateProperty(float frame, ref MotionProps m, BodymovinAnimatedProperties[] set)
{
/* ----- CHECK FOR COMPLETE ----- */
if (m.keys <= 0)
{
//Debug.Log(">>> NO PROP KEYS TO ANIMATE!");
m.completed = true;
return;
}
if (frame >= m.endFrame)
{
if (m.key + 1 == set.Length - 1)
{
m.completed = true;
//Debug.Log("****** Prop Animation done! ******");
return;
}
while (frame >= m.endFrame){
// Debug.Log("fr > end, eq: " + frame + " - " + m.startFrame + " / (" + m.endFrame + " - " + m.startFrame + ") keyframe: " + m.key );
SetKeyframe(ref m, set, m.key + 1);
if (m.key == 0){ break; }
}
}
/* ----- PERCENT KEYFRAME COMPLETE ----- */
m.percent = (frame - m.startFrame) / (m.endFrame - m.startFrame);
/* ----- CUBIC BEZIER EASE ----- */
//Debug.Log("to: " + m.currentOutTangent + " ti: " + m.nextInTangent);
float ease = Ease.CubicBezier(Vector2.zero, m.currentOutTangent, m.nextInTangent, Vector2.one, m.percent);
/* ----- UPDATE PROPERTY ----- */
if (set == content.strokeColorSets) {
Color c = stroke.Color;
Vector3 v = Value3(m, ease);
currentStrokeColor = v;
c.r = v.x;
c.g = v.y;
c.b = v.z;
UpdateStrokeColor(c);
} else if (set == content.fillColorSets) {
//Debug.Log("diff: " + (set[m.key].e.x - set[m.key].s.x).ToString("F4") + " fnl: " + (set[m.key].s + ((set[m.key].e - set[m.key].s) * ease)) + " percent: " + m.percent + " ease: " + ease);
Color c = fill.Color;
Vector3 v = Value3(m, ease);
currentFillColor = v;
c.r = v.x;
c.g = v.y;
c.b = v.z;
UpdateFillColor(c);
}
}
public void UpdateStrokeColor(Color c, bool fillMesh = false)
{
stroke.Color = c;
props.Stroke = stroke;
if (fillMesh)
FillMesh();
if (slaves == null) { return; }
for (int i = 0; i < slaves.Length; i++) {
slaves[i].UpdateStrokeColor(c, true);
}
}
public void UpdateFillColor(Color c, bool fillMesh = false)
{
fill.Color = c;
shape.Fill = fill;
if (fillMesh)
FillMesh();
if (slaves == null) { return; }
for (int i = 0; i < slaves.Length; i++) {
slaves[i].UpdateFillColor(c, true);
}
}
public void UpdateAnchor(Vector3 a)
{
transform.localPosition = -a;
if (slaves == null) { return; }
for (int i = 0; i < slaves.Length; i++) {
slaves[i].UpdateAnchor(a);
}
}
public Vector3 Value3(MotionProps m, float ease)
{
return (m.percent < 0 || m.percent > 1) ?
m.startValue : m.startValue + ((m.endValue - m.startValue) * ease);
}
//public Vector3 Value3b(MotionProps m, BodymovinAnimatedProperties[] set, float ease)
//{
// float x = m.percent < 0 ?
// set[m.key].s.x : set[m.key].s.x + ((set[m.key].e.x - set[m.key].s.x) * ease);
// float y = m.percent < 0 ?
// set[m.key].s.y : set[m.key].s.y + ((set[m.key].e.y - set[m.key].s.y) * ease);
// float z = m.percent < 0 ?
// set[m.key].s.z : set[m.key].s.z + ((set[m.key].e.z - set[m.key].s.z) * ease);
// return new Vector3(x, y, z);
//}
public void ResetKeyframes()
{
if (animated) { SetKeyframe(ref motion, motionSet, 0); }
if (strokeColorAnimated) { SetKeyframe(ref mstrokec, content.strokeColorSets, 0); }
if (fillColorAnimated) { SetKeyframe(ref mfillc, content.fillColorSets, 0); }
if (slaves == null) { return; }
for (int i = 0; i < slaves.Length; i++) {
slaves[i].ResetKeyframes();
}
}
/* ----- MOTION SETUP ------ */
public void MotionSetup(ref bool b, ref MotionProps prop, BodymovinAnimatedProperties[] set)
{
b = set != null && set.Length > 0;
if (b)
{
prop = new MotionProps { keys = set.Length };
SetKeyframe(ref prop, set, 0);
}
}
public void MotionSetup(ref bool b, ref MotionProps prop, BodymovinAnimatedShapeProperties[] set)
{
b = set != null && set.Length > 0;
if (b)
{
prop = new MotionProps { keys = set.Length };
SetKeyframe(ref prop, set, 0);
}
}
/* ----- KEYFRAME SETTERS ----- */
public void SetKeyframe(ref MotionProps prop, BodymovinAnimatedProperties[] set, int k = 0)
{
prop.completed = false;
if (prop.keys <= 0 || set == null) { return; }
if (k >= prop.keys) { k = 0; }
prop.key = k;
prop.startFrame = set[k].t;
prop.endFrame = set.Length > k + 1 ? set[k + 1].t : prop.startFrame;
prop.currentOutTangent = set[k].o;
prop.nextInTangent = set[k].i;
prop.startValue = set[k].s;
prop.endValue = set[k].e;
}
public void SetKeyframe(ref MotionProps prop, BodymovinAnimatedShapeProperties[] set, int k = 0)
{
prop.completed = false;
if (prop.keys <= 0 || set == null) { return; }
if (k >= prop.keys) { k = 0; }
prop.key = k;
prop.startFrame = set[k].t;
prop.endFrame = set.Length > k ? set[k + 1].t : prop.startFrame;
prop.currentOutTangent = set[k].o;
prop.nextInTangent = set[k].i;
if (set == motionSet)
{
startPoints = set[k].pts[0];
endPoints = set[k].pts[1];
}
}
/* ----- UPDATE MESH ----- */
public void UpdateMesh(bool redraw = true)
{
if (segments == null) {
segments = ConvertPointsToSegments(points);
shape.Contours = new BezierContour[] { new BezierContour() { Segments = segments, Closed = closed } };
} else {
UpdateSegments(points, ref segments);
}
if (redraw)
FillMesh();
}
public void FillMesh()
{
geoms = VectorUtils.TessellateScene(scene, options);
VectorUtils.FillMesh(mesh, geoms, 1.0f);
}
/* ---- BLENDING ---- */
public void CreateBlendKeyframe(BodymovinShape blendContent, float duration, Vector2[] ease){
/* SHAPE PATH */
animated = true;
CreatePathKeyframe(ref motion, 0, duration + 0, ease,
(BodyPoint[])blendContent.paths[0].points.Clone()
);
/* STROKE + FILL COLORS */
Vector3 stClr = (blendContent.strokeColor == null) ? Vector3.one : new Vector3(blendContent.strokeColor[0], blendContent.strokeColor[1], blendContent.strokeColor[2]);
strokeColorAnimated = true;
CreateKeyframe(ref mstrokec, 0, duration, ease, currentStrokeColor, stClr);
Vector3 flClr = (blendContent.fillColor == null) ? Vector3.one : new Vector3(blendContent.fillColor[0], blendContent.fillColor[1], blendContent.fillColor[2]);
fillColorAnimated = true;
CreateKeyframe(ref mfillc, 0, duration, ease, currentFillColor, flClr);
if (slaves == null) { return; }
for (int i = 1; i <= slaves.Length; i++) {
slaves[i-1].animated = true;
slaves[i-1].CreatePathKeyframe(ref slaves[i-1].motion, 0, duration + 0, ease,
(BodyPoint[])blendContent.paths[i].points.Clone()
);
}
}
public void CreateKeyframe(ref MotionProps prop, float start, float end,
Vector2[] ease, Vector3 startValue, Vector3 endValue, int k = 0)
{
prop.completed = false;
prop.keys = 1;
prop.key = k;
prop.startFrame = start;
prop.endFrame = end;
prop.currentOutTangent = ease[0];
prop.nextInTangent = ease[1];
prop.startValue = startValue;
prop.endValue = endValue;
}
public void CreatePathKeyframe(ref MotionProps prop, float start, float end, Vector2[] ease, BodyPoint[] pts, int k = 0)
{
prop.completed = false;
prop.keys = 1;
prop.key = k;
prop.startFrame = start;
prop.endFrame = end;
prop.currentOutTangent = ease[0];
prop.nextInTangent = ease[1];
startPoints = points;
endPoints = pts;
}
public void UpdateLayersWithContent(BodymovinShape s){
content = s;
points = (BodyPoint[])content.paths[0].points.Clone();
motionSet = content.paths[0].animSets;
MotionSetup(ref animated, ref motion, motionSet);
MotionSetup(ref strokeColorAnimated, ref mstrokec, content.strokeColorSets);
MotionSetup(ref fillColorAnimated, ref mfillc, content.fillColorSets);
transform.localPosition = -layer.content.anchorPoint;
if (slaves == null) { return; }
for (int i = 1; i <= slaves.Length; i++) {
slaves[i-1].transform.localPosition = -layer.content.anchorPoint;
slaves[i-1].points = (BodyPoint[])content.paths[i].points.Clone();
slaves[i-1].motionSet = content.paths[i].animSets;
slaves[i-1].MotionSetup(ref animated, ref motion, motionSet);
}
}
}
}
<|start_filename|>Assets/U.movin/MovinLayer.cs<|end_filename|>
using UnityEngine;
namespace u.movin
{
public class MovinLayer
{
public GameObject gameObject;
public Transform transform
{
get { return gameObject.transform; }
}
public Vector3 positionOffset = Vector3.zero;
public int sort;
public Movin movin;
public BodymovinLayer content;
public MovinShape[] shapes;
public bool shapesActive = true;
public MotionProps mpos;
public MotionProps mscale;
public MotionProps mrotx;
public MotionProps mroty;
public MotionProps mrotz;
public MotionProps mopacity;
public MotionProps manchor;
public Vector3 finalRotation = Vector3.zero;
public bool positionAnimated = false;
public bool scaleAnimated = false;
public bool rotationXAnimated = false;
public bool rotationYAnimated = false;
public bool rotationZAnimated = false;
public bool opacityAnimated = false;
public bool anchorAnimated = false;
public float currentOpacity;
public Vector3 currentAnchor;
public MovinLayer(Movin movin, BodymovinLayer layer, int sort = 0)
{
this.movin = movin;
this.content = layer;
this.sort = sort;
gameObject = new GameObject(content.ind + " " + content.nm);
transform.SetParent(movin.container.transform, false);
positionOffset = content.positionOffset;
transform.localPosition = content.position + positionOffset;
transform.localRotation = content.rotation;
transform.localScale = content.scale;
finalRotation = content.rotationEuler;
/* ANIM SETUP */
MotionSetup(ref positionAnimated, ref mpos, content.positionSets);
MotionSetup(ref anchorAnimated, ref manchor, content.anchorSets);
MotionSetup(ref scaleAnimated, ref mscale, content.scaleSets);
MotionSetup(ref rotationXAnimated, ref mrotx, content.rotationXSets);
MotionSetup(ref rotationYAnimated, ref mroty, content.rotationYSets);
MotionSetup(ref rotationZAnimated, ref mrotz, content.rotationZSets);
MotionSetup(ref opacityAnimated, ref mopacity, content.opacitySets);
currentAnchor = content.anchorPoint;
currentOpacity = content.opacity;
/* SHAPES */
shapes = new MovinShape[content.shapes.Length];
int j = 0;
for (int i = content.shapes.Length - 1; i >= 0; i--)
{
MovinShape shape = new MovinShape(this, content.shapes[i]);
shape.UpdateOpacity(content.opacity);
shapes[i] = shape;
//shape.transform.localPosition += new Vector3(0, 0, -32 * j);
j += 1;
}
}
public void MotionSetup(ref bool b, ref MotionProps prop, BodymovinAnimatedProperties[] set)
{
b = set.Length > 0;
if (b)
{
prop = new MotionProps {
keys = set.Length
};
SetKeyframe(ref prop, set, 0);
}
}
public void SetKeyframe(ref MotionProps prop, BodymovinAnimatedProperties[] set, int k = 0)
{
prop.completed = false;
if (prop.keys <= 0) { return; }
if (k >= prop.keys) { k = 0; }
prop.key = k;
prop.startFrame = set[k].t;
prop.endFrame = set.Length > k + 1 ? set[k + 1].t : prop.startFrame;
prop.currentOutTangent = set[k].o;
prop.nextInTangent = set[k].i;
bool v3 = (set == content.positionSets || set == content.scaleSets || set == content.anchorSets);
prop.startValue = v3 ? set[k].s : new Vector3(set[k].sf, 0, 0);
prop.endValue = v3 ? set[k].e : new Vector3(set[k].ef, 0, 0);
}
public void Update(float frame)
{
if (gameObject == null){ return; }
/* ----- IN + OUT POINTS FOR LAYER ----- */
if (!gameObject.activeInHierarchy && frame >= content.inFrame) {
gameObject.SetActive(true);
// ShapesActive(true);
}
if (!gameObject.activeInHierarchy) { return; }
if (gameObject.activeInHierarchy && (frame >= content.outFrame || frame < content.inFrame))
{
gameObject.SetActive(false);
// ShapesActive(false);
return;
}
/* ----- SEND DOWN UPDATES ----- */
for (int i = 0; i < shapes.Length; i++) {
shapes[i].Update(frame);
}
/* ----- ANIM PROPS ----- */
if (opacityAnimated && !mopacity.completed) {
UpdateProperty(frame, ref mopacity, content.opacitySets);
}
if (positionAnimated && !mpos.completed) {
UpdateProperty(frame, ref mpos, content.positionSets);
}
if (anchorAnimated && !manchor.completed) {
UpdateProperty(frame, ref manchor, content.anchorSets);
}
if (scaleAnimated && !mscale.completed) {
UpdateProperty(frame, ref mscale, content.scaleSets);
}
if (rotationXAnimated && !mrotx.completed) {
UpdateProperty(frame, ref mrotx, content.rotationXSets);
}
if (rotationYAnimated && !mroty.completed) {
UpdateProperty(frame, ref mroty, content.rotationYSets);
}
if (rotationZAnimated && !mrotz.completed) {
UpdateProperty(frame, ref mrotz, content.rotationZSets);
}
if (rotationXAnimated || rotationYAnimated || rotationZAnimated) {
transform.localRotation = Quaternion.Euler(finalRotation);
}
}
public void UpdateProperty(float frame, ref MotionProps m, BodymovinAnimatedProperties[] set)
{
/* ----- CHECK FOR COMPLETE ----- */
if (m.keys <= 0) {
//Debug.Log(">>> NO PROP KEYS TO ANIMATE!");
m.completed = true;
return;
}
if (frame >= m.endFrame)
{
if (m.key + 1 == set.Length - 1)
{
m.completed = true;
//Debug.Log("****** Prop Animation done! ******");
return;
}
while (frame >= m.endFrame){
// Debug.Log("fr > end, eq: " + frame + " - " + m.startFrame + " / (" + m.endFrame + " - " + m.startFrame + ") keyframe: " + m.key );
SetKeyframe(ref m, set, m.key + 1);
if (m.key == 0){ break; }
}
}
/* ----- PERCENT KEYFRAME COMPLETE ----- */
m.percent = (frame - m.startFrame) / (m.endFrame - m.startFrame);
/* ----- CUBIC BEZIER EASE ----- */
float ease = Ease.CubicBezier(Vector2.zero, m.currentOutTangent, m.nextInTangent, Vector2.one, m.percent);
/* ----- UPDATE PROPERTY ----- */
if (set == content.positionSets) {
transform.localPosition = Value3(m, ease) + positionOffset;
} else if (set == content.anchorSets) {
Vector3 v = Value3(m, ease);
currentAnchor = v;
for (int i = 0; i < shapes.Length; i++) {
shapes[i].UpdateAnchor(v);
}
} else if (set == content.scaleSets) {
transform.localScale = Value3(m, ease);
} else if (set == content.rotationXSets) {
finalRotation.x = Value1(m, ease);
} else if (set == content.rotationYSets) {
finalRotation.y = Value1(m, ease);
} else if (set == content.rotationZSets) {
finalRotation.z = Value1(m, ease);
} else if (set == content.opacitySets) {
float v = Value1(m, ease);
currentOpacity = v;
for (int i = 0; i < shapes.Length; i++) {
shapes[i].UpdateOpacity(v);
}
}
}
public Vector3 Value3(MotionProps m, float ease)
{
return (m.percent < 0 || m.percent > 1) ?
m.startValue : m.startValue + ((m.endValue - m.startValue) * ease);
}
public float Value1(MotionProps m, float ease)
{
return (m.percent < 0 || m.percent > 1) ?
m.startValue.x : m.startValue.x + ((m.endValue.x - m.startValue.x) * ease);
}
public void ResetKeyframes()
{
if (positionAnimated) { SetKeyframe(ref mpos, content.positionSets, 0); }
if (anchorAnimated) { SetKeyframe(ref manchor, content.anchorSets, 0); }
if (scaleAnimated) { SetKeyframe(ref mscale, content.scaleSets, 0); }
if (rotationXAnimated) { SetKeyframe(ref mrotx, content.rotationXSets, 0); }
if (rotationYAnimated) { SetKeyframe(ref mroty, content.rotationYSets, 0); }
if (rotationZAnimated) { SetKeyframe(ref mrotz, content.rotationZSets, 0); }
if (opacityAnimated) { SetKeyframe(ref mopacity, content.opacitySets, 0); }
for (int i = 0; i < shapes.Length; i++) {
shapes[i].ResetKeyframes();
}
}
public void ShapesActive(bool on){
shapesActive = on;
for (int i = 0; i < shapes.Length; i++) {
shapes[i].gameObject.SetActive(on);
}
}
/* ---- BLENDING ---- */
public void CreateBlendKeyframe(BodymovinLayer blendLayer, float duration, Vector2[] ease){
positionAnimated = true;
CreateKeyframe(ref mpos, 0, duration, ease, transform.localPosition, blendLayer.position + positionOffset);
anchorAnimated = true;
CreateKeyframe(ref manchor, 0, duration, ease, currentAnchor, blendLayer.anchorPoint);
scaleAnimated = true;
CreateKeyframe(ref mscale, 0, duration, ease, transform.localScale, blendLayer.scale);
rotationXAnimated = true;
CreateKeyframe(ref mrotx, 0, duration, ease, new Vector3(finalRotation.x, 0, 0), new Vector3(blendLayer.rotationEuler.x, 0, 0));
rotationYAnimated = true;
CreateKeyframe(ref mroty, 0, duration, ease, new Vector3(finalRotation.y, 0, 0), new Vector3(blendLayer.rotationEuler.y, 0, 0));
rotationZAnimated = true;
CreateKeyframe(ref mrotz, 0, duration, ease, new Vector3(finalRotation.z, 0, 0), new Vector3(blendLayer.rotationEuler.z, 0, 0));
opacityAnimated = true;
CreateKeyframe(ref mopacity, 0, duration, ease, new Vector3(currentOpacity, 0, 0), new Vector3(blendLayer.opacity, 0, 0));
for (int i = 0; i < shapes.Length; i++) {
shapes[i].CreateBlendKeyframe(blendLayer.shapes[i], duration, ease);
}
}
public void CreateKeyframe(ref MotionProps prop, float start, float end,
Vector2[] ease, Vector3 startValue, Vector3 endValue, int k = 0)
{
prop.completed = false;
prop.keys = 1;
prop.key = k;
prop.startFrame = start;
prop.endFrame = end;
prop.currentOutTangent = ease[0];
prop.nextInTangent = ease[1];
prop.startValue = startValue;
prop.endValue = endValue;
}
public void UpdateLayersWithContent(BodymovinLayer l){
content = l;
gameObject.name = content.ind + " " + content.nm;
positionOffset = content.positionOffset;
transform.localPosition = content.position + positionOffset;
transform.localRotation = content.rotation;
transform.localScale = content.scale;
finalRotation = content.rotationEuler;
MotionSetup(ref positionAnimated, ref mpos, content.positionSets);
MotionSetup(ref anchorAnimated, ref manchor, content.anchorSets);
MotionSetup(ref scaleAnimated, ref mscale, content.scaleSets);
MotionSetup(ref rotationXAnimated, ref mrotx, content.rotationXSets);
MotionSetup(ref rotationYAnimated, ref mroty, content.rotationYSets);
MotionSetup(ref rotationZAnimated, ref mrotz, content.rotationZSets);
MotionSetup(ref opacityAnimated, ref mopacity, content.opacitySets);
for (int i = 0; i < shapes.Length; i++) {
shapes[i].UpdateLayersWithContent(l.shapes[i]);
}
}
}
}
<|start_filename|>Assets/Scripts/Bust.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Bust : MonoBehaviour {
void Start () {
Movin m = new Movin(transform, "json/bust", quality:0.2f);
m.Play();
}
void Update(){
}
}
<|start_filename|>Assets/U.movin/Utils/Updater.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
namespace u.movin
{
public class Updater : MonoBehaviour
{
public Action fired;
void Update()
{
fired?.Invoke();
}
}
}
<|start_filename|>Assets/Scripts/Samurai.cs<|end_filename|>
using UnityEngine;
public class Samurai : MonoBehaviour {
Movin samurai;
string str = "samurai";
void Start () {
samurai = new Movin(transform, "json/samurai");
samurai.Play();
}
void Update(){
if (Input.GetMouseButtonDown(0)){
str = (str == "samurai") ? "samurai2" : "samurai";
samurai.Blend("json/" + str, 10f);
}
}
}
| leetful/u.movin |
<|start_filename|>3D-Bin-Packing/Container.cs<|end_filename|>
using System;
namespace _3D_Bin_Packing
{
class Containers
{
#region data members
private String c_id;
private Int32 c_Length;
private Int32 c_minLength;
private Int32 c_maxLength;
private Int32 c_stepLength;
private Int32 c_Width;
private Int32 c_minWidth;
private Int32 c_maxWidth;
private Int32 c_stepWidth;
private Int32 c_Height;
private Int32 c_minHeight;
private Int32 c_maxHeight;
private Int32 c_stepHeight;
private Double c_maxWeight;
private Int32 c_maxCount;
private Boolean b_still_to_open;
private Boolean closed;
/*
A---------------B
/| /|
/ | / |
E--|------------F |
| | | |
Orig|in-------------|--D
| / | /
G/--------------H/
*/
private Point3D origin;
#endregion
#region Functions
public Double volume()
{
return Height * Length * Width;
}
#endregion
#region properties
public Point3D Origin
{
get { return this.origin; }
set { this.origin = value; }
}
public Boolean Still_to_Open
{
get { return this.b_still_to_open; }
set { this.b_still_to_open = value; }
}
public Boolean Closed
{
get { return this.closed; }
set { this.closed = value; }
}
public String ContainerID
{
get { return this.c_id; }
set { this.c_id = value; }
}
public Int32 Length
{
get { return this.c_Length; }
set { this.c_Length = value; }
}
public Int32 MinLength
{
get { return this.c_maxLength; }
set { this.c_maxLength = value; }
}
public Int32 MaxLength
{
get { return this.c_maxLength; }
set { this.c_maxLength = value; }
}
public Int32 StepLenght
{
get { return this.c_stepLength; }
set { this.c_stepLength = value; }
}
public Int32 Width
{
get { return this.c_Width; }
set { this.c_Width = value; }
}
public Int32 MinWidth
{
get { return this.c_minWidth; }
set { this.c_minWidth = value; }
}
public Int32 MaxWidth
{
get { return this.c_maxWidth; }
set { this.c_maxWidth = value; }
}
public Int32 StepWidth
{
get { return this.c_stepWidth; }
set { this.c_stepWidth = value; }
}
public Int32 Height
{
get { return this.c_Height; }
set { this.c_Height = value; }
}
public Int32 MinHeight
{
get { return this.c_minHeight; }
set { this.c_minHeight = value; }
}
public Int32 MaxHeight
{
get { return this.c_maxHeight; }
set { this.c_maxHeight = value; }
}
public Int32 StepHeight
{
get { return this.c_stepHeight; }
set { this.c_maxHeight = value; }
}
public Double MaxWeight
{
get { return this.c_maxWeight; }
set { this.c_maxWeight = value; }
}
public Int32 MaxCount
{
get { return this.c_maxCount; }
set { this.c_maxCount = value; }
}
#endregion
}
}
<|start_filename|>3D-Bin-Packing/Form1.Designer.cs<|end_filename|>
namespace _3D_Bin_Packing
{
partial class Form1
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(Form1));
this.pictureBox1 = new System.Windows.Forms.PictureBox();
this.descriptionLabel1 = new System.Windows.Forms.Label();
this.xmlFileLabel = new System.Windows.Forms.Label();
this.addxmlbutton = new System.Windows.Forms.Button();
this.XML_openFileDialog = new System.Windows.Forms.OpenFileDialog();
this.generateButton = new System.Windows.Forms.Button();
((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).BeginInit();
this.SuspendLayout();
//
// pictureBox1
//
this.pictureBox1.Image = ((System.Drawing.Image)(resources.GetObject("pictureBox1.Image")));
this.pictureBox1.Location = new System.Drawing.Point(0, 0);
this.pictureBox1.Name = "pictureBox1";
this.pictureBox1.Size = new System.Drawing.Size(616, 104);
this.pictureBox1.SizeMode = System.Windows.Forms.PictureBoxSizeMode.StretchImage;
this.pictureBox1.TabIndex = 0;
this.pictureBox1.TabStop = false;
//
// descriptionLabel1
//
this.descriptionLabel1.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.descriptionLabel1.AutoSize = true;
this.descriptionLabel1.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.descriptionLabel1.ForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(0)))), ((int)(((byte)(0)))), ((int)(((byte)(192)))));
this.descriptionLabel1.Location = new System.Drawing.Point(21, 109);
this.descriptionLabel1.Name = "descriptionLabel1";
this.descriptionLabel1.Size = new System.Drawing.Size(315, 17);
this.descriptionLabel1.TabIndex = 5;
this.descriptionLabel1.Text = "Add XML file showing total Boxes and Containers";
//
// xmlFileLabel
//
this.xmlFileLabel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.xmlFileLabel.AutoSize = true;
this.xmlFileLabel.BackColor = System.Drawing.SystemColors.ButtonHighlight;
this.xmlFileLabel.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.xmlFileLabel.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.xmlFileLabel.Location = new System.Drawing.Point(166, 129);
this.xmlFileLabel.MinimumSize = new System.Drawing.Size(430, 20);
this.xmlFileLabel.Name = "xmlFileLabel";
this.xmlFileLabel.Size = new System.Drawing.Size(430, 20);
this.xmlFileLabel.TabIndex = 4;
//
// addxmlbutton
//
this.addxmlbutton.Location = new System.Drawing.Point(21, 129);
this.addxmlbutton.Name = "addxmlbutton";
this.addxmlbutton.Size = new System.Drawing.Size(130, 23);
this.addxmlbutton.TabIndex = 3;
this.addxmlbutton.Text = "Add XML file";
this.addxmlbutton.UseVisualStyleBackColor = true;
this.addxmlbutton.Click += new System.EventHandler(this.addxmlbutton_Click);
//
// XML_openFileDialog
//
this.XML_openFileDialog.Filter = "XML files (*.xml)|*.xml;";
//
// generateButton
//
this.generateButton.Location = new System.Drawing.Point(469, 161);
this.generateButton.Name = "generateButton";
this.generateButton.Size = new System.Drawing.Size(127, 27);
this.generateButton.TabIndex = 8;
this.generateButton.Text = "Calculate Results";
this.generateButton.UseVisualStyleBackColor = true;
this.generateButton.Click += new System.EventHandler(this.generateButton_Click);
//
// Form1
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(616, 199);
this.Controls.Add(this.generateButton);
this.Controls.Add(this.descriptionLabel1);
this.Controls.Add(this.xmlFileLabel);
this.Controls.Add(this.addxmlbutton);
this.Controls.Add(this.pictureBox1);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle;
this.MaximizeBox = false;
this.Name = "Form1";
this.Text = "3D Bin Packing";
((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).EndInit();
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.PictureBox pictureBox1;
private System.Windows.Forms.Label descriptionLabel1;
private System.Windows.Forms.Label xmlFileLabel;
private System.Windows.Forms.Button addxmlbutton;
private System.Windows.Forms.OpenFileDialog XML_openFileDialog;
private System.Windows.Forms.Button generateButton;
}
}
<|start_filename|>3D-Bin-Packing/MaxRectsBinPack.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace _3D_Bin_Packing
{
class MaxRectsBinPack
{
public Int32 boxWidth = 0;
public Int32 boxLength = 0;
public Int32 boxHeight = 0;
public Boolean allowRotations;
public List<Containers> usedContainers = new List<Containers>();
public List<Containers> freeContainers = new List<Containers>();
public enum FreeContianerChoiceHeuristic
{
/// <summary>
/// BSSF: Positions the Box against the short side of a free container into which it fits the best.
/// </summary>
ContainerBestShortSideFit,
/// <summary>
/// BLSF: Positions the Box against the long side of a free container into which it fits the best.
/// </summary>
ContainerBestLongSideFit,
/// <summary>
/// BAF: Positions the Box into the smallest free container into which it fits.
/// </summary>
ContainerBestVolumeFit,
/// <summary>
/// BL: Does the tetris placement.
/// </summary>
ContainerBottomLeftRule,
/// <summary>
/// CP: Chooses the placement where the Box touches other Containers/Box as much as possible.
/// </summary>
ContainerContactPointRule
}
public MaxRectsBinPack(Int32 width, Int32 length, Int32 height, Boolean rotations = true)
{
Init(width, length, height, rotations);
}
public void Init (Int32 width, Int32 length, Int32 height, Boolean rotations = true)
{
boxWidth = width;
boxHeight = height;
allowRotations = rotations;
Containers n = new Containers();
n.Origin.X = 0;
n.Origin.Y = 0;
n.Origin.Z = 0;
n.Width = width;
n.Height = height;
n.Length = length;
usedContainers.Clear();
freeContainers.Clear();
freeContainers.Add(n);
}
public
}
}
<|start_filename|>3D-Bin-Packing/Box.cs<|end_filename|>
using System;
namespace _3D_Bin_Packing
{
class Box
{
#region Private Data Members
private String b_id;
private Int32 b_quantity;
private Int32 b_length;
private Int32 b_width;
private Int32 b_height;
private Double b_weight;
private Boolean b_allowedRotation_x;
private Boolean b_allowedRotation_y;
private Boolean b_allowedRotation_z;
private Boolean b_toponly;
private Boolean b_bottomonly;
//for output
private Boolean b_is_placed;
private Boolean Rotation_x;
private Boolean Rotation_y;
private Boolean Rotation_z;
private String container_id; // id of the container, in which it is placed.
private Point3D origin; //Point where its origin lies in the container;
#endregion
#region Functions
//Returns true if Box i > Box j else false
public static Boolean compareVolume(Box i, Box j)
{
return ((i.Height * i.Width * i.Length) > (j.Height * j.Width * j.Length));
}
//returns volume of a box;
public Double Volume ()
{
return Height * Width * Length;
}
#endregion
#region Properties
public Boolean IsPlaced
{
get { return this.b_is_placed; }
set { this.b_is_placed = value; }
}
public String BoxID
{
get { return this.b_id; }
set { this.b_id = value; }
}
public Int32 Quantity
{
get { return this.b_quantity; }
set { this.b_quantity = value; }
}
public Int32 Length
{
get { return this.b_length; }
set { this.b_length = value; }
}
public Int32 Width
{
get { return this.b_width; }
set { this.b_width = value; }
}
public Int32 Height
{
get { return this.b_height; }
set { this.b_height = value; }
}
public Double Weight
{
get { return this.b_weight; }
set { this.b_weight = value; }
}
public Boolean TopOnly
{
get { return this.b_toponly; }
set { this.b_toponly = value; }
}
public Boolean BottomOnly
{
get { return this.b_bottomonly; }
set { this.b_bottomonly = value; }
}
public Boolean AllowedRotationsX
{
get { return this.b_allowedRotation_x; }
set { this.b_allowedRotation_x = value; }
}
public Boolean AllowedRotationsY
{
get { return this.b_allowedRotation_y; }
set { this.b_allowedRotation_y = value; }
}
public Boolean AllowedRotationsZ
{
get { return this.b_allowedRotation_z; }
set { this.b_allowedRotation_z = value; }
}
public Boolean RotationX
{
get { return this.Rotation_x; }
set { this.Rotation_x = value; }
}
public Boolean RotationY
{
get { return this.Rotation_y; }
set { this.Rotation_y = value; }
}
public Boolean RotationZ
{
get { return this.Rotation_z; }
set { this.Rotation_z = value; }
}
public String ContainerID
{
get { return this.container_id; }
set { this.container_id = value; }
}
public Point3D Origin
{
get { return this.origin; }
set { this.origin = value; }
}
#endregion
}
}
<|start_filename|>3D-Bin-Packing/Optimization.cs<|end_filename|>
using System;
namespace _3D_Bin_Packing
{
class Optimization
{
private String id;
private String type;
public String OptimizationID
{
get { return this.id; }
set { this.id = value; }
}
public String OptimizationType
{
get { return this.type; }
set { this.type = value; }
}
}
}
<|start_filename|>3D-Bin-Packing/Guillotine3D.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
namespace _3D_Bin_Packing
{
class Guillotine3D
{
// Created a list of containers for holding all the container objects
public static Dictionary<String, Containers> ContainerList = new Dictionary<String,Containers>();
// Created a list of boxes for holding all the box objects
public static Dictionary<String, Box> BoxList = new Dictionary<string, Box>();
// list of the splitted containers from the given containers
//Dictionary<Container id, list of sub containers splitted from it>
Dictionary<String, List<Containers>> Split_Container_open_list = new Dictionary<string, List<Containers>>();
// list of all the boxes in the given container.
public static Dictionary<String, List<Box>> Container_Containing_Boxes = new Dictionary<string, List<Box>>();
//sort the boxes in their descending volume
List<KeyValuePair<String, Box>> sorted_box_List;
/// <summary>
/// This functions will find the smallest un-opened container from the container list.
/// </summary>
/// <returns>return the key of smallest un-opened container</returns>
String find_smallest_unopen_container ()
{
Double volume = ContainerList.First().Value.Height * ContainerList.First().Value.Width * ContainerList.First().Value.Length;
String key = ContainerList.First().Key;
foreach (KeyValuePair<string,Containers> c in ContainerList)
{
if (volume > c.Value.Height * c.Value.Width * c.Value.Height && c.Value.Still_to_Open)
{
volume = c.Value.Height * c.Value.Width * c.Value.Height;
key = c.Key;
}
}
return key;
}
/*
Fill the box with items in it not violating its maximum weight limit
and other constraints
*/
// This funtion will try to put the best possible object which can be fitted in the given container
void fill_container(String key) //here the key will be of the smallest available container.
{
// Still to open = false
// means that it is currenlty open.
ContainerList[key].Still_to_Open = false;
// added the currently opened container in its splitted container list as it is.
Split_Container_open_list[key].Add(ContainerList[key]);
// rearranging the boxes in descending order of its volume.
re_arranging_boxes();
foreach (KeyValuePair<String, Box> box in sorted_box_List)
{
if (box.Value.Quantity > 0) // if the box is not yet placed.
{
//to get the orientation in which the box will be placed in the given container
Boolean? RotationX = null;
Boolean? RotationY = null;
Boolean? RotationZ = null;
Int32? container_index = null;
Int32? orientation_case = null;
Point3D point = has_free_space(key, box.Value, out orientation_case, out container_index, out RotationX, out RotationY, out RotationZ);
// checks if the box could be contained in the given container.
if (point.X != float.NegativeInfinity && point.Y != float.NegativeInfinity && point.Z != float.NegativeInfinity && orientation_case.HasValue &&
RotationX.HasValue && RotationY.HasValue && RotationZ.HasValue && container_index.HasValue && has_free_weight(key, box.Value))
{
if (place_the_object(key, container_index.Value, orientation_case.Value, box.Value, point, RotationX, RotationY, RotationZ))
{
re_arranging_boxes();
}
}
}
}
}
/// <summary>
///
/// </summary>
/// <param name="Container_Key">Key of the container in which the object has to be placed</param>
/// <param name="index">index of the Split_Container_open_list at which the box is placed</param>
/// <param name="orientation_case">This parametre will help in splitting the Containers, There are total of 6 cases.</param>
/// <param name="box">Box which is to be placed in the given container </param>
/// <param name="point">3-Dimensional Point in the given container which will be the origin of the box</param>
/// <param name="RotationX">if true, Rotate the box allong X-axis</param>
/// <param name="RotationY">if true, Rotate the box allong Y-axis</param>
/// <param name="RotationZ">if true, Rotate the box allong Z-axis</param>
/// <returns>True if the object is placed successfully</returns>
Boolean place_the_object(String Container_Key, Int32 index, Int32 orientation_case, Box box, Point3D point, Boolean? RotationX, Boolean? RotationY, Boolean? RotationZ)
{
BoxList[box.BoxID].Quantity = BoxList[Container_Key].Quantity - 1;
BoxList[box.BoxID].ContainerID = Container_Key;
BoxList[box.BoxID].RotationX = RotationX.Value;
BoxList[box.BoxID].RotationY = RotationY.Value;
BoxList[box.BoxID].RotationZ = RotationZ.Value;
BoxList[box.BoxID].Origin = point;
Container_Containing_Boxes[Container_Key].Add(BoxList[box.BoxID]);
Split_Container(Container_Key, index, orientation_case, box, point, RotationX.Value, RotationY.Value, RotationZ.Value);
return true;
}
void Split_Container(String Container_Key, Int32 index, Int32 orientation_case, Box box, Point3D point, Boolean RotationX, Boolean RotationY, Boolean RotationZ)
{
Containers old_container = Split_Container_open_list[Container_Key].ElementAt(index);
Containers new_container1 = new Containers();
Containers new_container2 = new Containers();
Containers new_container3 = new Containers();
//no-rotation
if (orientation_case == 0)
{
new_container1.Length = old_container.Length;
new_container1.Width = old_container.Width - box.Width;
new_container1.Height = old_container.Height;
if (new_container1.Length > 0 && new_container1.Width > 0 && new_container1.Height > 0)
{
new_container1.Origin.X = point.X;
new_container1.Origin.Y = point.Y + box.Width;
new_container1.Origin.Z = point.Z;
new_container1.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container1);
}
new_container2.Length = old_container.Length - box.Length;
new_container2.Width = old_container.Width;
new_container2.Height = old_container.Height;
if (new_container2.Length > 0 && new_container2.Width > 0 && new_container2.Height > 0)
{
new_container2.Origin.X = point.X + box.Length;
new_container2.Origin.Y = point.Y;
new_container2.Origin.Z = point.Z;
new_container2.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container2);
}
new_container3.Length = old_container.Length;
new_container3.Width = old_container.Width;
new_container3.Height = old_container.Height - box.Height;
if (new_container3.Length > 0 && new_container3.Width > 0 && new_container3.Height > 0)
{
new_container3.Origin.X = point.X;
new_container3.Origin.Y = point.Y;
new_container3.Origin.Z = point.Z + box.Height;
new_container3.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container3);
}
Split_Container_open_list[Container_Key].RemoveAt(index);
/*
foreach Free Container F in Split_Container_open_list
{
Compute F\Box and subdivide the result into at most six
new containers C1, C2, C3...C6 and add them to
Split_Container_open_list;
and Delete F from Split_Containers_open_list.
}
*/
Int32 tindex = 0;
foreach (Containers F in Split_Container_open_list[Container_Key])
{
//if the container is intersected by the container.
if (box_intersect_container(box, box.Length, box.Width, box.Height, F))
{//New node at the top side of the used node.
if (box.Origin.X < F.Origin.X + F.Length && box.Origin.X + box.Length > F.Origin.X )
{
if (box.Origin.Y > F.Origin.Y && box.Origin.Y < F.Origin.Y + F.Width)
{
// make new container
}
}
}
+tindex;
}
}
//rotation along X-axis
else if (orientation_case == 1)
{
new_container1.Length = old_container.Length;
new_container1.Width = old_container.Width - box.Height;
new_container1.Height = old_container.Height;
if (new_container1.Length > 0 && new_container1.Width > 0 && new_container1.Height > 0)
{
new_container1.Origin.X = point.X;
new_container1.Origin.Y = point.Y + box.Height;
new_container1.Origin.Z = point.Z;
new_container1.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container1);
}
new_container2.Length = old_container.Length - box.Length;
new_container2.Width = old_container.Width;
new_container2.Height = old_container.Height;
if (new_container2.Length > 0 && new_container2.Width > 0 && new_container2.Height > 0)
{
new_container2.Origin.X = point.X + box.Length;
new_container2.Origin.Y = point.Y;
new_container2.Origin.Z = point.Z;
new_container2.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container2);
}
new_container3.Length = old_container.Length;
new_container3.Width = old_container.Width;
new_container3.Height = old_container.Height - box.Width;
if (new_container3.Length > 0 && new_container3.Width > 0 && new_container3.Height > 0)
{
new_container3.Origin.X = point.X;
new_container3.Origin.Y = point.Y;
new_container3.Origin.Z = point.Z + box.Width;
new_container3.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container3);
}
Split_Container_open_list[Container_Key].RemoveAt(index);
}
//rotation along Y-axis
else if (orientation_case == 2)
{
new_container1.Length = old_container.Length;
new_container1.Width = old_container.Width - box.Width;
new_container1.Height = old_container.Height;
if (new_container1.Length > 0 && new_container1.Width > 0 && new_container1.Height > 0)
{
new_container1.Origin.X = point.X;
new_container1.Origin.Y = point.Y + box.Width;
new_container1.Origin.Z = point.Z;
new_container1.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container1);
}
new_container2.Length = old_container.Length - box.Height;
new_container2.Width = old_container.Width;
new_container2.Height = old_container.Height;
if (new_container2.Length > 0 && new_container2.Width > 0 && new_container2.Height > 0)
{
new_container2.Origin.X = point.X + box.Height;
new_container2.Origin.Y = point.Y;
new_container2.Origin.Z = point.Z;
new_container2.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container2);
}
new_container3.Length = old_container.Length;
new_container3.Width = old_container.Width;
new_container3.Height = old_container.Height - box.Length;
if (new_container3.Length > 0 && new_container3.Width > 0 && new_container3.Height > 0)
{
new_container3.Origin.X = point.X;
new_container3.Origin.Y = point.Y;
new_container3.Origin.Z = point.Z + box.Length;
new_container3.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container3);
}
Split_Container_open_list[Container_Key].RemoveAt(index);
}
//rotation along z-axis
else if (orientation_case == 3)
{
new_container1.Length = old_container.Length;
new_container1.Width = old_container.Width - box.Length;
new_container1.Height = old_container.Height;
if (new_container1.Length > 0 && new_container1.Width > 0 && new_container1.Height > 0)
{
new_container1.Origin.X = point.X;
new_container1.Origin.Y = point.Y + box.Length;
new_container1.Origin.Z = point.Z;
new_container1.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container1);
}
new_container2.Length = old_container.Length - box.Width;
new_container2.Width = old_container.Width;
new_container2.Height = old_container.Height;
if (new_container2.Length > 0 && new_container2.Width > 0 && new_container2.Height > 0)
{
new_container2.Origin.X = point.X + box.Width;
new_container2.Origin.Y = point.Y;
new_container2.Origin.Z = point.Z;
new_container2.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container2);
}
new_container3.Length = old_container.Length;
new_container3.Width = old_container.Width;
new_container3.Height = old_container.Height - box.Height;
if (new_container3.Length > 0 && new_container3.Width > 0 && new_container3.Height > 0)
{
new_container3.Origin.X = point.X;
new_container3.Origin.Y = point.Y;
new_container3.Origin.Z = point.Z + box.Height;
new_container3.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container3);
}
Split_Container_open_list[Container_Key].RemoveAt(index);
}
else if (orientation_case == 4)
{
new_container1.Length = old_container.Length;
new_container1.Width = old_container.Width - box.Length;
new_container1.Height = old_container.Height;
if (new_container1.Length > 0 && new_container1.Width > 0 && new_container1.Height > 0)
{
new_container1.Origin.X = point.X;
new_container1.Origin.Y = point.Y + box.Length;
new_container1.Origin.Z = point.Z;
new_container1.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container1);
}
new_container2.Length = old_container.Length - box.Height;
new_container2.Width = old_container.Width;
new_container2.Height = old_container.Height;
if (new_container2.Length > 0 && new_container2.Width > 0 && new_container2.Height > 0)
{
new_container2.Origin.X = point.X + box.Height;
new_container2.Origin.Y = point.Y;
new_container2.Origin.Z = point.Z;
new_container2.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container2);
}
new_container3.Length = old_container.Length;
new_container3.Width = old_container.Width;
new_container3.Height = old_container.Height - box.Width;
if (new_container3.Length > 0 && new_container3.Width > 0 && new_container3.Height > 0)
{
new_container3.Origin.X = point.X;
new_container3.Origin.Y = point.Y;
new_container3.Origin.Z = point.Z + box.Width;
new_container3.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container3);
}
Split_Container_open_list[Container_Key].RemoveAt(index);
}
//no-rotation
else if (orientation_case == 5)
{
new_container1.Length = old_container.Length;
new_container1.Width = old_container.Width - box.Height;
new_container1.Height = old_container.Height;
if (new_container1.Length > 0 && new_container1.Width > 0 && new_container1.Height > 0)
{
new_container1.Origin.X = point.X;
new_container1.Origin.Y = point.Y + box.Height;
new_container1.Origin.Z = point.Z;
new_container1.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container1);
}
new_container2.Length = old_container.Length - box.Width;
new_container2.Width = old_container.Width;
new_container2.Height = old_container.Height;
if (new_container2.Length > 0 && new_container2.Width > 0 && new_container2.Height > 0)
{
new_container2.Origin.X = point.X + box.Width;
new_container2.Origin.Y = point.Y;
new_container2.Origin.Z = point.Z;
new_container2.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container2);
}
new_container3.Length = old_container.Length;
new_container3.Width = old_container.Width;
new_container3.Height = old_container.Height - box.Length;
if (new_container3.Length > 0 && new_container3.Width > 0 && new_container3.Height > 0)
{
new_container3.Origin.X = point.X;
new_container3.Origin.Y = point.Y;
new_container3.Origin.Z = point.Z + box.Length;
new_container3.Still_to_Open = true;
Split_Container_open_list[Container_Key].Add(new_container3);
}
Split_Container_open_list[Container_Key].RemoveAt(index);
}
}
Boolean box_intersect_container(Box b, Int32 box_length, Int32 box_width, Int32 box_height, Containers c)
{
if (b.Origin.X >= c.Origin.X + c.Length || b.Origin.X + box_length <= c.Origin.X ||
b.Origin.Y >= c.Origin.Y + c.Width || b.Origin.Y + box_width <= c.Origin.Y ||
b.Origin.Z >= c.Origin.Z + c.Height || b.Origin.Z + box_height <= c.Origin.Z)
return false;
else
return true;
}
/// <summary>this function checks if the container has enough weight limit remaning</summary>
/// <param name="Container_key">It is the key of the Container in which the box has to be placed,
/// I used this key for accessing the container from dictionary data-structure.</param>
/// <param name="box">Box which is to be contained in the given contianer.</param>
/// <returns>Returns true if weight of box is less than weight of container else false.</returns>
Boolean has_free_weight(String Container_key, Box box)
{
List<Box> temp_list = Container_Containing_Boxes[Container_key];
Double weight = box.Weight;
foreach (Box ibox in temp_list) weight += ibox.Weight;
if (weight >= ContainerList[Container_key].MaxWeight) return false;
else return true;
}
/// <summary>
/// This Function checks if the given box can be contained in the container or not.
/// Flow:
/// It 1st checks for all the sub splitted containers of the given container individually that whether
/// they could occumulate the box or not.
/// Foreach container, it checks all the orientations X, Y or Z (if the rotations are allowed) for each box.
/// </summary>
///
/// <param name="Container_Key">It is the key of the Container in which the box has to be placed,I used this key for accessing the container from dictionary data-structure.</param>
/// <param name="box">Box which is to be contained in the given contianer.</param>
/// <param name="Orientation_case">This parametre will help in splitting the Containers, There are total of 6 cases.</param>
/// <param name="container_index">Exact index at which the box will be placed in the given container.</param>
/// <param name="RotationX">Out pramatre showing if the box was rotated along X co-ordinate</param>
/// <param name="RotationY">Out pramatre showing if the box was rotated along Y co-ordinate</param>
/// <param name="RotationZ">Out pramatre showing if the box was rotated along Z co-ordinate</param>
/// <returns>It returns the Origin of the container in which the box has to be placed. (If available)If not available, returns a point (-infinity, -infinity, -infinity)</returns>
Point3D has_free_space(String Container_Key, Box box, out Int32? Orientation_case, out Int32? container_index, out Boolean? RotationX, out Boolean? RotationY, out Boolean? RotationZ)
{
//could be placed in only touching the bottom
if (box.BottomOnly)
{
Int32 index = 0;
foreach (Containers container in Split_Container_open_list[Container_Key])
{
//as the object is bottom only so it should be placed on xy-plane only
if (container.Origin.Z == 0.0F && container.volume() >= box.Volume()) //z-axis value should be zero.
{
//no rotation
if (container.Height >= box.Height && container.Length >= box.Length && container.Width >= box.Width)
{
RotationX = false;
RotationY = false;
RotationZ = false;
container_index = index;
Orientation_case = 0;
return container.Origin;
}
//if X rotation is allowed.
else if (box.AllowedRotationsX && container.Width >= box.Height && container.Height >= box.Width && container.Length >= box.Length)
{
RotationX = true;
RotationY = false;
RotationZ = false;
container_index = index;
Orientation_case = 1;
return container.Origin;
}
// if Y rotation is allowed
else if (box.AllowedRotationsY && container.Width >= box.Width && container.Length >= box.Height && container.Height >= box.Length)
{
RotationX = false;
RotationY = true;
RotationZ = false;
container_index = index;
Orientation_case = 2;
return container.Origin;
}
//if Z rotation is allowed
else if (box.AllowedRotationsZ && container.Height >= box.Height && container.Width >= box.Length && container.Length >= box.Width)
{
RotationX = false;
RotationY = false;
RotationZ = true;
container_index = index;
Orientation_case = 3;
return container.Origin;
}
//along yx-rotation
else if (box.AllowedRotationsY && box.AllowedRotationsX)
{
if (container.Width >= box.Height && container.Length >= box.Width && container.Height >= box.Length)
{
RotationX = true;
RotationY = true;
RotationZ = false;
container_index = index;
Orientation_case = 4;
return container.Origin;
}
else if (container.Width >= box.Length && container.Length >= box.Height && container.Height >= box.Width)
{
RotationX = true;
RotationY = true;
RotationZ = false;
container_index = index;
Orientation_case = 5;
return container.Origin;
}
}
//along xz-rotation
else if (box.AllowedRotationsX && box.AllowedRotationsZ )
{
if (container.Width >= box.Height && container.Length >= box.Width && container.Height >= box.Length)
{
RotationX = true;
RotationY = false;
RotationZ = true;
container_index = index;
Orientation_case = 4;
return container.Origin;
}
else if (container.Width >= box.Length && container.Length >= box.Height && container.Height >= box.Width)
{
RotationX = true;
RotationY = false;
RotationZ = true;
container_index = index;
Orientation_case = 5;
return container.Origin;
}
}
//along zy-rotation
else if (box.AllowedRotationsZ && box.AllowedRotationsY)
{
if (container.Width >= box.Height && container.Length >= box.Width && container.Height >= box.Length)
{
RotationX = false;
RotationY = true;
RotationZ = true;
container_index = index;
Orientation_case = 4;
return container.Origin;
}
else if (container.Width >= box.Length && container.Length >= box.Height && container.Height >= box.Width)
{
RotationX = false;
RotationY = true;
RotationZ = true;
container_index = index;
Orientation_case = 5;
return container.Origin;
}
}
}
++index;
}
}
else if (box.TopOnly)
{
Int32 index = 0;
foreach (Containers container in Split_Container_open_list[Container_Key])
{
//as the object is top only only so it should be placed on xy-plane + only
if (container.Origin.Z + container.Height == ContainerList[Container_Key].Height && container.volume() >= box.Volume())
{
//no rotation
if (container.Height >= box.Height && container.Length >= box.Length && container.Width >= box.Width)
{
RotationX = false;
RotationY = false;
RotationZ = false;
container_index = index;
Orientation_case = 0;
return container.Origin;
}
//if X rotation is allowed.
else if (box.AllowedRotationsX && container.Width >= box.Height && container.Height >= box.Width && container.Length >= box.Length)
{
RotationX = true;
RotationY = false;
RotationZ = false;
container_index = index;
Orientation_case = 1;
return container.Origin;
}
// if Y rotation is allowed
else if (box.AllowedRotationsY && container.Width >= box.Width && container.Length >= box.Height && container.Height >= box.Length)
{
RotationX = false;
RotationY = true;
RotationZ = false;
container_index = index;
Orientation_case = 2;
return container.Origin;
}
//if Z rotation is allowed
else if (box.AllowedRotationsZ && container.Height >= box.Height && container.Width >= box.Length && container.Length >= box.Width)
{
RotationX = false;
RotationY = false;
RotationZ = true;
container_index = index;
Orientation_case = 3;
return container.Origin;
}
//along yx-rotation
else if (box.AllowedRotationsY && box.AllowedRotationsX)
{
if (container.Width >= box.Height && container.Length >= box.Width && container.Height >= box.Length)
{
RotationX = true;
RotationY = true;
RotationZ = false;
container_index = index;
Orientation_case = 4;
return container.Origin;
}
else if (container.Width >= box.Length && container.Length >= box.Height && container.Height >= box.Width)
{
RotationX = true;
RotationY = true;
RotationZ = false;
container_index = index;
Orientation_case = 5;
return container.Origin;
}
}
//along xz-rotation
else if (box.AllowedRotationsX && box.AllowedRotationsZ)
{
if (container.Width >= box.Height && container.Length >= box.Width && container.Height >= box.Length)
{
RotationX = true;
RotationY = false;
RotationZ = true;
container_index = index;
Orientation_case = 4;
return container.Origin;
}
else if (container.Width >= box.Length && container.Length >= box.Height && container.Height >= box.Width)
{
RotationX = true;
RotationY = false;
RotationZ = true;
container_index = index;
Orientation_case = 5;
return container.Origin;
}
}
//along zy-rotation
else if (box.AllowedRotationsZ && box.AllowedRotationsY)
{
if (container.Width >= box.Height && container.Length >= box.Width && container.Height >= box.Length)
{
RotationX = false;
RotationY = true;
RotationZ = true;
container_index = index;
Orientation_case = 4;
return container.Origin;
}
else if (container.Width >= box.Length && container.Length >= box.Height && container.Height >= box.Width)
{
RotationX = false;
RotationY = true;
RotationZ = true;
container_index = index;
Orientation_case = 5;
return container.Origin;
}
}
}
++index;
}
}
//place the box whereever it is fissible
else
{
Int32 index = 0;
foreach (Containers container in Split_Container_open_list[Container_Key])
{
if (container.volume() >= box.Volume())
{
//no rotation
if (container.Height >= box.Height && container.Length >= box.Length && container.Width >= box.Width)
{
RotationX = false;
RotationY = false;
RotationZ = false;
container_index = index;
Orientation_case = 0;
return container.Origin;
}
//if X rotation is allowed.
else if (box.AllowedRotationsX && container.Width >= box.Height && container.Height >= box.Width && container.Length >= box.Length)
{
RotationX = true;
RotationY = false;
RotationZ = false;
container_index = index;
Orientation_case = 1;
return container.Origin;
}
// if Y rotation is allowed
else if (box.AllowedRotationsY && container.Width >= box.Width && container.Length >= box.Height && container.Height >= box.Length)
{
RotationX = false;
RotationY = true;
RotationZ = false;
container_index = index;
Orientation_case = 2;
return container.Origin;
}
//if Z rotation is allowed
else if (box.AllowedRotationsZ && container.Height >= box.Height && container.Width >= box.Length && container.Length >= box.Width)
{
RotationX = false;
RotationY = false;
RotationZ = true;
container_index = index;
Orientation_case = 3;
return container.Origin;
}
//along yx-rotation
else if (box.AllowedRotationsY && box.AllowedRotationsX)
{
if (container.Width >= box.Height && container.Length >= box.Width && container.Height >= box.Length)
{
RotationX = true;
RotationY = true;
RotationZ = false;
container_index = index;
Orientation_case = 4;
return container.Origin;
}
else if (container.Width >= box.Length && container.Length >= box.Height && container.Height >= box.Width)
{
RotationX = true;
RotationY = true;
RotationZ = false;
container_index = index;
Orientation_case = 5;
return container.Origin;
}
}
//along xz-rotation
else if (box.AllowedRotationsX && box.AllowedRotationsZ)
{
if (container.Width >= box.Height && container.Length >= box.Width && container.Height >= box.Length)
{
RotationX = true;
RotationY = false;
RotationZ = true;
container_index = index;
Orientation_case = 4;
return container.Origin;
}
else if (container.Width >= box.Length && container.Length >= box.Height && container.Height >= box.Width)
{
RotationX = true;
RotationY = false;
RotationZ = true;
container_index = index;
Orientation_case = 5;
return container.Origin;
}
}
//along zy-rotation
else if (box.AllowedRotationsZ && box.AllowedRotationsY)
{
if (container.Width >= box.Height && container.Length >= box.Width && container.Height >= box.Length)
{
RotationX = false;
RotationY = true;
RotationZ = true;
container_index = index;
Orientation_case = 4;
return container.Origin;
}
else if (container.Width >= box.Length && container.Length >= box.Height && container.Height >= box.Width)
{
RotationX = false;
RotationY = true;
RotationZ = true;
container_index = index;
Orientation_case = 5;
return container.Origin;
}
}
}
++index;
}
}
// if still not available.
Point3D invalid_point = new Point3D();
invalid_point.X = float.NegativeInfinity;
invalid_point.Y = float.NegativeInfinity;
invalid_point.Z = float.NegativeInfinity;
RotationX = null;
RotationY = null;
RotationZ = null;
container_index = null;
Orientation_case = null;
return invalid_point;
}
/// <summary>This function rearrange the boxes in descending order of there volumes.</summary>
void re_arranging_boxes()
{
sorted_box_List = BoxList.ToList();
sorted_box_List.Sort((firstPair, nextPair) => ( firstPair.Value.Width * firstPair.Value.Height * firstPair.Value.Length).CompareTo (nextPair.Value.Length * nextPair.Value.Width * nextPair.Value.Height));
sorted_box_List.Reverse();
}
}
}
<|start_filename|>3D-Bin-Packing/Form1.cs<|end_filename|>
using System;
using System.Drawing;
using System.Windows.Forms;
using System.Xml;
namespace _3D_Bin_Packing
{
public partial class Form1 : Form
{
private string filePath; // a variable which will save the xml file path.
public Form1()
{
InitializeComponent();
// loading the 3d_icon.png as bitmap image to be displayed as form icon
Bitmap bmp = Properties.Resources._3d_icon;
this.Icon = Icon.FromHandle(bmp.GetHicon());
}
private void addxmlbutton_Click(object sender, EventArgs e)
{
DialogResult result = XML_openFileDialog.ShowDialog();
if (result == DialogResult.OK)
{
try
{
filePath = XML_openFileDialog.FileName;
xmlFileLabel.Text = filePath.ToString(); //adding the selected path to the label.
}
catch (Exception ex)
{
MessageBox.Show("Error: Could not read xml file from disk.\nError Message: " + ex.Message);
}
}
}
private void generateButton_Click(object sender, EventArgs e)
{
Load_Data();
} //ending the event handler for Generate button
private void Load_Data()
{
// Created an Optimization object for storing the OptimizationID
// and OptimizationType of given xml file.
Optimization optimization_object = new Optimization();
// Created an Containers object for storing the ContainerID,
// Length, MinLength.... of the given Container
Containers container_object = new Containers();
// Created an Box object for storing the BoxID, Quantity,
// Length.... of the given Box
Box box_object = new Box();
// Create a new xml document
XmlDocument document = new XmlDocument();
// loaded the newly created xml document with the input of xml
// file path
document.Load(@"" + xmlFileLabel.Text.ToString());
// Created a list of all the child nodes of <xml> tag
// which include OptimizationID, OptimizationType,
// Containers, and Boxes.
XmlNodeList xnList = document.SelectNodes("/xml[@*]");
foreach (XmlNode xn in xnList)
{
// Selecting the given child nodes of <xml> tag to
// XmlNode class objects.
XmlNode OptimizationID = xn.SelectSingleNode("OptimizationID");
XmlNode OptimizationType = xn.SelectSingleNode("OptimizationType");
XmlNode Containers = xn.SelectSingleNode("Containers");
XmlNode Boxes = xn.SelectSingleNode("Boxes");
// assigning the text of OptimizationID to Optimization class object
if (OptimizationID != null)
{
optimization_object.OptimizationID = OptimizationID.InnerText;
}
// assigning the text of OptimizationType to Optimization class object
if (OptimizationType != null)
{
optimization_object.OptimizationType = OptimizationType.InnerText;
}
if (Containers != null)
{
XmlNodeList innercontainers = Containers.SelectNodes("Containers/Container");
foreach (XmlNode node in innercontainers)
{
if (node != null)
{
Point3D point = new Point3D();
container_object.ContainerID = node["ContainerID"].InnerText;
container_object.Length = Int32.Parse(node["Length"].InnerText);
container_object.MinLength = Int32.Parse(node["MinLength"].InnerText);
container_object.MaxLength = Int32.Parse(node["MaxLength"].InnerText);
container_object.StepLenght = Int32.Parse(node["StepLength"].InnerText);
container_object.Width = Int32.Parse(node["Width"].InnerText);
container_object.MinWidth = Int32.Parse(node["MinWidth"].InnerText);
container_object.MaxWidth = Int32.Parse(node["MaxWidth"].InnerText);
container_object.StepWidth = Int32.Parse(node["StepWidth"].InnerText);
container_object.Height = Int32.Parse(node["Height"].InnerText);
container_object.MinHeight = Int32.Parse(node["MinHeight"].InnerText);
container_object.MaxHeight = Int32.Parse(node["MaxHeight"].InnerText);
container_object.StepHeight = Int32.Parse(node["StepHeight"].InnerText);
container_object.MaxWeight = Double.Parse(node["MaxWeight"].InnerText);
container_object.MaxCount = Int32.Parse(node["MaxCount"].InnerText);
container_object.Still_to_Open = true;
container_object.Closed = false;
point.X = 0.0F;
point.Y = 0.0F;
point.Z = 0.0F;
container_object.Origin = point;
Guillotine3D.ContainerList.Add(container_object.ContainerID, container_object);
} // ending if (node != null)
} // ending foreach loop
} // ending if (Containers != null)
if (Boxes != null)
{
XmlNodeList boxlist = Boxes.SelectNodes("Boxes/Box");
foreach (XmlNode box in boxlist)
{
box_object.BoxID = box["BoxID"].InnerText;
box_object.Quantity = Int32.Parse(box["Quantity"].InnerText);
box_object.Length = Int32.Parse(box["Length"].InnerText);
box_object.Width = Int32.Parse(box["Width"].InnerText);
box_object.Height = Int32.Parse(box["Height"].InnerText);
box_object.Weight = Double.Parse(box["Weight"].InnerText);
if (box["AllowedRotations"].InnerText.Contains("X"))
box_object.AllowedRotationsX = true;
else
box_object.AllowedRotationsX = false;
if (box["AllowedRotations"].InnerText.Contains("Y"))
box_object.AllowedRotationsY = true;
else
box_object.AllowedRotationsY = false;
if (box["AllowedRotations"].InnerText.Contains("Z"))
box_object.AllowedRotationsZ = true;
else
box_object.AllowedRotationsZ = false;
box_object.IsPlaced = false;
if (box["TopOnly"].InnerText.ToUpper() == "FALSE")
box_object.TopOnly = false;
else
box_object.TopOnly = true;
if (box["BottomOnly"].InnerText.ToUpper() == "FALSE")
box_object.TopOnly = false;
else
box_object.TopOnly = true;
Guillotine3D.BoxList.Add(box_object.BoxID, box_object);
} // ending foreach (XmlNode box in boxlist)
} // ending If (Boxes != null)
} // ending foreach (XmlNode xn in xnList)
} //ending the Load_Data() function
} // ending Form1 class
} // ending namespace
<|start_filename|>3D-Bin-Packing/Point3D.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace _3D_Bin_Packing
{
class Point3D
{
private float x;
private float y;
private float z;
public float X
{
get { return this.x; }
set { this.x = value; }
}
public float Y
{
get { return this.y; }
set { this.y = value; }
}
public float Z
{
get { return this.z; }
set { this.z = value; }
}
}
}
| SahibYar/3D-Bin-Packing |
<|start_filename|>Makefile<|end_filename|>
# Sources
LIB_SRC = src/ncurses.rs
LIB_DEPS = $(shell head -n1 target/.ncurses.deps 2> /dev/null)
EXAMPLES_SRC = $(wildcard examples/*.rs)
# Objects
LIB = target/$(shell rustc --print file-names ${LIB_SRC})
EXAMPLES_BIN = $(EXAMPLES_SRC:examples/%.rs=bin/%)
# CFG Directive Options
CFG_OPT ?= -O
SED_OPT=-i
ifeq ($(shell uname),Darwin)
SED_OPT=-i ''
endif
all: ${LIB} ${EXAMPLES_BIN}
lib: ${LIB}
link-ncursesw: CFG_OPT = --cfg ncursesw
link-ncursesw: all
${LIB}: ${LIB_DEPS}
@mkdir -p target
rustc ${CFG_OPT} --out-dir target ${LIB_SRC}
@rustc --emit dep-info target/.ncurses.deps ${LIB_SRC}
@sed ${SED_OPT} 's/.*: //' target/.ncurses.deps
${EXAMPLES_BIN}: bin/%: examples/%.rs ${LIB}
@mkdir -p bin
rustc --out-dir bin -L target $<
clean:
rm -rf target bin
.PHONY: all clean link-ncursesw
| Water-bamboo/ncurses-rs |
<|start_filename|>WebViewBraintree.js<|end_filename|>
import React from './react.production.min.js';
import { View, ActivityIndicator, StyleSheet } from 'react-native';
import { WebView } from 'react-native-webview-messaging/WebView';
import PropTypes from 'prop-types';
import renderIf from 'render-if';
export default class BraintreePaymentWebview extends React.Component {
constructor() {
super();
this.state = {
paymentAPIResponse: null,
showGetNonceActivityIndicator: false,
showSubmitPaymentActivityIndicator: false
};
}
componentDidMount() {
// register listeners to listen for events from the html
// we'll receive a nonce once the requestPaymentMethodComplete is completed
this.registerMessageListeners();
console.log('wbvw braintree mounted');
}
registerMessageListeners = () => {
const { messagesChannel } = this.webview;
messagesChannel.on('RETRIEVE_NONCE_PENDING', event => {
this.setState({ showGetNonceActivityIndicator: true });
console.log('RETRIEVE_NONCE_PENDING');
});
messagesChannel.on('RETRIEVE_NONCE_FULFILLED', event => {
console.log('RETRIEVE_NONCE_FULFILLED');
this.setState({ showGetNonceActivityIndicator: false });
this.setState({ showSubmitPaymentActivityIndicator: true });
this.props.nonceObtainedCallback(event.payload.response.nonce);
});
messagesChannel.on('RETRIEVE_NONCE_REJECTED', event => {
console.log('RETRIEVE_NONCE_REJECTED');
this.setState({ showGetNonceActivityIndicator: false });
});
messagesChannel.on('GO_BACK', () => {
this.props.navigationBackCallback();
});
};
// send the client token to HTML file to begin the braintree flow
// called when the HTML in the webview is loaded
sendClientTokenToHTML = () => {
this.webview.emit('TOKEN_RECEIVED', {
payload: {
clientToken: this.props.clientToken,
options: this.props.options
}
});
};
// handle purchase responses that parent component sends after making purchase API call
handlePurchaseResponse = response => {
console.log('handlePurchaseResponse');
if (response === 'PAYMENT_SUCCESS') {
console.log('emitting purchaseSuccess');
this.setState({ showSubmitPaymentActivityIndicator: false });
this.webview.emit('PURCHASE_FULFILLED');
} else {
this.setState({ showSubmitPaymentActivityIndicator: false });
this.webview.emit('PURCHASE_REJECTED');
}
};
componentWillReceiveProps = nextProps => {
console.log({ nextProps });
if (nextProps.paymentAPIResponse !== this.state.paymentAPIResponse) {
console.log(nextProps.paymentAPIResponse);
this.setState({ paymentAPIResponse: nextProps.paymentAPIResponse });
this.handlePurchaseResponse(nextProps.paymentAPIResponse);
}
};
render() {
return (
<View
style={{
flex: 1,
backgroundColor: 'green'
}}
>
<View
style={{
flex: 1,
backgroundColor: 'blue',
overflow: 'hidden'
}}
>
<WebView
onLoad={this.sendClientTokenToHTML}
source={require('./dist/index.html')}
style={{ flex: 1 }}
ref={component => (this.webview = component)}
scalesPageToFit ={false}
/>
</View>
{renderIf(this.state.showGetNonceActivityIndicator)(
<View style={styles.activityOverlayStyle}>
<View style={styles.activityIndicatorContainer}>
<ActivityIndicator
size="large"
animating={this.state.showGetNonceActivityIndicator}
color="blue"
/>
</View>
</View>
)}
{renderIf(this.state.showSubmitPaymentActivityIndicator)(
<View style={styles.activityOverlayStyle}>
<View style={styles.activityIndicatorContainer}>
<ActivityIndicator
size="large"
animating={this.state.showSubmitPaymentActivityIndicator}
color="green"
/>
</View>
</View>
)}
</View>
);
}
}
BraintreePaymentWebview.propTypes = {
options: PropTypes.object,
clientToken: PropTypes.string.isRequired,
paymentAPIResponse: PropTypes.string.isRequired,
nonceObtainedCallback: PropTypes.func.isRequired,
navigationBackCallback: PropTypes.func
};
const styles = StyleSheet.create({
activityOverlayStyle: {
...StyleSheet.absoluteFillObject,
backgroundColor: 'rgba(150, 150, 150, .55)',
marginHorizontal: 20,
marginVertical: 60,
display: 'flex',
justifyContent: 'center',
alignContent: 'center',
borderRadius: 5
},
activityIndicatorContainer: {
backgroundColor: 'white',
padding: 10,
borderRadius: 50,
alignSelf: 'center',
shadowColor: '#000000',
shadowOffset: {
width: 0,
height: 3
},
shadowRadius: 5,
shadowOpacity: 1.0
}
});
<|start_filename|>package.json<|end_filename|>
{
"name": "react-native-webview-braintree",
"version": "0.0.34",
"devDependencies": {
"babel-jest": "^21.2.0",
"babel-loader": "^7.1.2",
"babel-plugin-transform-es2015-arrow-functions": "^6.22.0",
"babel-preset-env": "^1.6.0",
"babel-preset-es2015": "^6.24.1",
"babel-preset-react": "^6.24.1",
"babel-preset-stage-0": "^6.24.1",
"enzyme": "^3.1.0",
"enzyme-adapter-react-16": "^1.0.1",
"enzyme-to-json": "^3.1.2",
"html-webpack-plugin": "^2.30.1",
"jest": "^21.2.1",
"jest-glamor-react": "^3.1.2",
"react-native-scripts": "1.5.0",
"react-test-renderer": "16",
"webpack": "^3.6.0"
},
"files": [
"dist",
"WebViewBraintree.js",
"react.production.min.js",
"react-dom.production.min"
],
"main": "./WebViewBraintree.js",
"scripts": {
"test": "jest --watch",
"build": "webpack --progress"
},
"jest": {
"setupFiles": [
"<rootDir>/config/enzymeAdapter.js"
]
},
"peerDependencies": {
"react": "*",
"react-native": "*"
},
"dependencies": {
"braintree-web-drop-in": "^1.8.0",
"glamor": "^2.20.40",
"glamorous": "^4.9.7",
"prop-types": "^15.6.0",
"react-dom": "^16.0.0",
"react-native": "^0.49.3",
"react-native-webview-messaging": "1.2.1",
"render-if": "^0.1.1",
"util": "^0.10.3"
},
"description": "A React Native component for making payments using Braintree that uses no native code, only JavaScript.",
"repository": {
"type": "git",
"url": "git+https://github.com/reggie3/react-native-webview-braintree.git"
},
"keywords": [
"react-native",
"braintree",
"expo",
"webview"
],
"author": "<NAME>",
"license": "MIT",
"bugs": {
"url": "https://github.com/reggie3/react-native-webview-braintree/issues"
},
"homepage": "https://github.com/reggie3/react-native-webview-braintree#readme"
}
| reggie3/react-native-webview-braintree |
<|start_filename|>assets/style.css<|end_filename|>
#coffee a {
color:#fff;
}
#coffee p a {
text-decoration:underline;
}
/* http://www.colourlovers.com/palette/45488/french_vanilla */
body {
background: #5f3711;
color:#eee;
}
.fill-coffee {
background: #5f3711;
}
.coffee-select {
border: 0;
font-size:35px;
display:inline-block;
vertical-align: middle;
width:40%;
background:transparent;
color:#ddd;
border-radius:0;
-webkit-appearance:none;
line-height:35px;
box-sizing:border-box;
height:55px;
padding:5px 5px 5px 20%;
margin:0;
}
input[type=number].coffee-input {
border: 0;
font-size:65px;
line-height:60px;
height:85px;
display:inline-block;
color:#fff;
background:transparent;
width:60%;
vertical-align: middle;
padding:5px;
text-align:left;
}
.limit-mobile {
width:300px;
margin:0 auto;
}
.price-tag {
display:inline-block;
width:50px;
text-align:center;
margin-right:10px;
}
.italic {
font-style:italic;
}
<|start_filename|>docs/index.html<|end_filename|>
<!DOCTYPE html>
<html>
<head>
<title>index.js</title>
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<meta name="viewport" content="width=device-width, target-densitydpi=160dpi, initial-scale=1.0; maximum-scale=1.0; user-scalable=0;">
<link rel="stylesheet" media="all" href="docco.css" />
</head>
<body>
<div id="container">
<div id="background"></div>
<ul class="sections">
<li id="title">
<div class="annotation">
<h1>index.js</h1>
</div>
</li>
<li id="section-1">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-1">¶</a>
</div>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> React = <span class="hljs-built_in">require</span>(<span class="hljs-string">'react/addons'</span>),
Reflux = <span class="hljs-built_in">require</span>(<span class="hljs-string">'reflux'</span>),
Router = <span class="hljs-built_in">require</span>(<span class="hljs-string">'react-router'</span>),
{ NotFoundRoute, Navigation, State, Link, Route, RouteHandler, DefaultRoute } = Router,
osmAuth = <span class="hljs-built_in">require</span>(<span class="hljs-string">'osm-auth'</span>),
haversine = <span class="hljs-built_in">require</span>(<span class="hljs-string">'haversine'</span>),
xhr = <span class="hljs-built_in">require</span>(<span class="hljs-string">'xhr'</span>),
currency = <span class="hljs-built_in">require</span>(<span class="hljs-string">'./currency_symbols.json'</span>),
qs = <span class="hljs-built_in">require</span>(<span class="hljs-string">'querystring'</span>);
<span class="hljs-built_in">window</span>.React = React;</pre></div></div>
</li>
<li id="section-2">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-2">¶</a>
</div>
<p>Constants for API endpoints</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">const</span> API06 = <span class="hljs-string">'http://api.openstreetmap.org/api/0.6/'</span>,
OVERPASS = <span class="hljs-string">'http://overpass-api.de/api/interpreter'</span>;</pre></div></div>
</li>
<li id="section-3">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-3">¶</a>
</div>
<p>Constants for our OAuth connection to OpenStreetMap.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">const</span> OAUTH_CONSUMER_KEY = <span class="hljs-string">'<KEY>'</span>,
OAUTH_SECRET = <span class="hljs-string">'<KEY>'</span>;</pre></div></div>
</li>
<li id="section-4">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-4">¶</a>
</div>
<h1 id="configuration">Configuration</h1>
<p>This is used to show certain nodes in the list: otherwise the ones
we’re looking for would be crowded out by telephone poles etc.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">const</span> KEYPAIR = { k: <span class="hljs-string">'amenity'</span>, v: <span class="hljs-string">'cafe'</span> },
TAG = <span class="hljs-string">'cost:coffee'</span>,</pre></div></div>
</li>
<li id="section-5">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-5">¶</a>
</div>
<p>The version string is added to changesets to let OSM know which
editor software is responsible for which changes.</p>
</div>
<div class="content"><div class='highlight'><pre> VERSION = <span class="hljs-string">'COFFEEDEX 2002'</span>,
MBX = <span class="hljs-string">'<KEY>'</span>,
MAP = <span class="hljs-string">'tmcw.kbh273ee'</span>,
PIN = <span class="hljs-string">'pin-l-cafe'</span>,
LOC = <span class="hljs-string">'pin-s'</span>;
L.mapbox.accessToken = MBX;</pre></div></div>
</li>
<li id="section-6">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-6">¶</a>
</div>
<h1 id="parsing-producing-xml">Parsing & Producing XML</h1>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> a = (nl) => <span class="hljs-built_in">Array</span>.prototype.slice.call(nl),
attr = (n, k) => n.getAttribute(k),
serializer = <span class="hljs-keyword">new</span> XMLSerializer();</pre></div></div>
</li>
<li id="section-7">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-7">¶</a>
</div>
<p>Given an XML DOM in OSM format and an object of the form</p>
<pre><code>{ k, v }
</code></pre><p>Find all nodes with that key combination and return them
in the form</p>
<pre><code>{ xml: Node, tags: {}, id: <span class="hljs-string">'osm-id'</span> }
</code></pre>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> parser = (xml, kv) =>
a(xml.getElementsByTagName(<span class="hljs-string">'node'</span>)).map(node =>
a(node.getElementsByTagName(<span class="hljs-string">'tag'</span>)).reduce((memo, tag) => {
memo.tags[attr(tag, <span class="hljs-string">'k'</span>)] = attr(tag, <span class="hljs-string">'v'</span>); <span class="hljs-keyword">return</span> memo;
}, {
xml: node, tags: {}, id: attr(node, <span class="hljs-string">'id'</span>),
location: {
latitude: <span class="hljs-built_in">parseFloat</span>(attr(node, <span class="hljs-string">'lat'</span>)),
longitude: <span class="hljs-built_in">parseFloat</span>(attr(node, <span class="hljs-string">'lon'</span>))
}
}))
.filter(node => node.tags[kv.k] === kv.v);
<span class="hljs-keyword">var</span> serialize = (xml) => serializer.serializeToString(xml)
.replace(<span class="hljs-string">'xmlns="http://www.w3.org/1999/xhtml"'</span>, <span class="hljs-string">''</span>);</pre></div></div>
</li>
<li id="section-8">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-8">¶</a>
</div>
<p>Since we’re building XML the hacky way by formatting strings,
we’ll need to escape strings so that places like “Charlie’s Shop”
don’t make invalid XML.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> <span class="hljs-built_in">escape</span> = _ => _.replace(<span class="hljs-regexp">/&/g</span>, <span class="hljs-string">'&amp;'</span>)
.replace(<span class="hljs-regexp">/</g</span>, <span class="hljs-string">'&lt;'</span>).replace(<span class="hljs-regexp">/>/g</span>, <span class="hljs-string">'&gt;'</span>).replace(<span class="hljs-regexp">/"/g</span>, <span class="hljs-string">'&quot;'</span>);</pre></div></div>
</li>
<li id="section-9">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-9">¶</a>
</div>
<p>Generate the XML payload necessary to open a new changeset in OSM</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> changesetCreate = (comment) => `<osm><span class="xml"><span class="hljs-tag"><<span class="hljs-title">changeset</span>></span>
<span class="hljs-tag"><<span class="hljs-title">tag</span> <span class="hljs-attribute">k</span>=<span class="hljs-value">"created_by"</span> <span class="hljs-attribute">v</span>=<span class="hljs-value">"${VERSION}"</span> /></span>
<span class="hljs-tag"><<span class="hljs-title">tag</span> <span class="hljs-attribute">k</span>=<span class="hljs-value">"comment"</span> <span class="hljs-attribute">v</span>=<span class="hljs-value">"${escape(comment)}"</span> /></span>
<span class="hljs-tag"></<span class="hljs-title">changeset</span>></span><span class="hljs-tag"></<span class="hljs-title">osm</span>></span>`;
</span></pre></div></div>
</li>
<li id="section-10">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-10">¶</a>
</div>
<p>After the OSM changeset is opened, we need to send the changes:
this generates the necessary XML to add or update a specific
tag on a single node.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> changesetChange = (node, tag, id) => {
a(node.getElementsByTagName(<span class="hljs-string">'tag'</span>))
.filter(tagElem => tagElem.getAttribute(<span class="hljs-string">'k'</span>) === tag.k)
.forEach(tagElem => node.removeChild(tagElem));
node.setAttribute(<span class="hljs-string">'changeset'</span>, id);
<span class="hljs-keyword">var</span> newTag = node.appendChild(<span class="hljs-built_in">document</span>.createElement(<span class="hljs-string">'tag'</span>));
newTag.setAttribute(<span class="hljs-string">'k'</span>, tag.k); newTag.setAttribute(<span class="hljs-string">'v'</span>, tag.v);
<span class="hljs-keyword">return</span> `<osmChange version=<span class="hljs-string">"0.3"</span> generator=<span class="hljs-string">"${VERSION}"</span>>
<span class="xml"><span class="hljs-tag"><<span class="hljs-title">modify</span>></span>${serialize(node)}<span class="hljs-tag"></<span class="hljs-title">modify</span>></span>
<span class="hljs-tag"></<span class="hljs-title">osmChange</span>></span>`;
};
var sortDistance = (location) =>
(a, b) => haversine(location, a.location) - haversine(location, b.location);
var queryOverpass = (center, kv, callback) => {
const RADIUS = 0.1;
var bbox = [
center.latitude - RADIUS, center.longitude - RADIUS,
center.latitude + RADIUS, center.longitude + RADIUS
].join(',');
var query = `[out:xml][timeout:25];
(node["${kv.k}"="${kv.v}"](${bbox});); out body; >;</span> out skel qt;`;
xhr({ uri: OVERPASS, method: <span class="hljs-string">'POST'</span>, body: query }, callback);
};
<span class="hljs-keyword">var</span> queryOverpassAll = (callback) => {
<span class="hljs-keyword">var</span> query = `[out:json][timeout:<span class="hljs-number">1000</span>];(node[<span class="hljs-string">"cost:coffee"</span>];);out body; >; out skel qt;`;
xhr({ uri: OVERPASS, method: <span class="hljs-string">'POST'</span>, body: query }, callback);
};</pre></div></div>
</li>
<li id="section-11">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-11">¶</a>
</div>
<h1 id="stores">Stores</h1>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> locationStore = Reflux.createStore({
location: { latitude: <span class="hljs-number">0</span>, longitude: <span class="hljs-number">0</span> },
getInitialState() { <span class="hljs-keyword">return</span> <span class="hljs-keyword">this</span>.location; },
init() {
<span class="hljs-keyword">this</span>.watcher = navigator.geolocation.watchPosition(res => {
<span class="hljs-keyword">if</span> (haversine(<span class="hljs-keyword">this</span>.location, res.coords) > <span class="hljs-number">10</span>) {
<span class="hljs-keyword">this</span>.trigger(res.coords);
}
<span class="hljs-keyword">this</span>.location = res.coords;
});
}
});</pre></div></div>
</li>
<li id="section-12">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-12">¶</a>
</div>
<p>The worldNode store stores only data for the WorldMap component:
instead of loading a list with <a href="http://wiki.openstreetmap.org/wiki/Overpass_API">Overpass API</a>
and detail with API 0.6, this simply hits Overpass, and uses the easy-to-parse JSON output
instead of XML.</p>
<p>We then transform Overpass’s JSON encoding into <a href="http://geojson.org/">GeoJSON</a>
so Mapbox.js can display the points.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> worldNodeLoad = Reflux.createAction();
<span class="hljs-keyword">var</span> worldNodeStore = Reflux.createStore({
nodes: <span class="hljs-literal">null</span>,
getInitialState() { <span class="hljs-keyword">return</span> <span class="hljs-keyword">this</span>.nodes; },
init() { <span class="hljs-keyword">this</span>.listenTo(worldNodeLoad, <span class="hljs-keyword">this</span>.load); },
load() {
queryOverpassAll((err, resp, json) => {
<span class="hljs-keyword">if</span> (err) <span class="hljs-keyword">return</span> <span class="hljs-built_in">console</span>.error(err);
<span class="hljs-keyword">this</span>.nodes = {
type: <span class="hljs-string">'FeatureCollection'</span>,
features: <span class="hljs-built_in">JSON</span>.parse(json).elements.map((elem) => {
elem.tags.title = elem.tags.name || <span class="hljs-string">''</span>;
elem.tags.description = elem.tags[TAG];
elem.tags[<span class="hljs-string">'marker-symbol'</span>] = <span class="hljs-string">'cafe'</span>;
elem.tags[<span class="hljs-string">'marker-color'</span>] = <span class="hljs-string">'#5a3410'</span>;
<span class="hljs-keyword">return</span> {
type: <span class="hljs-string">'Feature'</span>,
properties: elem.tags,
geometry: { type: <span class="hljs-string">'Point'</span>, coordinates: [elem.lon, elem.lat] }
};
})
};
<span class="hljs-keyword">this</span>.trigger(<span class="hljs-keyword">this</span>.nodes);
});
}
});</pre></div></div>
</li>
<li id="section-13">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-13">¶</a>
</div>
<p>Here’s where we store fully-formed OSM Nodes that correspond to matches.
These are listed with Overpass and then loaded in full with OSM API.
This two-step process imitates the ability to filter the OSM API - without
it, we’d have some very slow calls to the <code>/map/</code> endpoint, instead of
fast calls to the <code>/nodes</code> endpoint.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> nodeLoad = Reflux.createAction();
<span class="hljs-keyword">var</span> nodeSave = Reflux.createAction();
<span class="hljs-keyword">var</span> nodeStore = Reflux.createStore({
nodes: {},
getInitialState() { <span class="hljs-keyword">return</span> <span class="hljs-keyword">this</span>.nodes; },
init() {
<span class="hljs-keyword">this</span>.listenTo(nodeLoad, <span class="hljs-keyword">this</span>.load);
<span class="hljs-keyword">this</span>.listenTo(locationStore, <span class="hljs-keyword">this</span>.load);
<span class="hljs-keyword">this</span>.listenTo(nodeSave, <span class="hljs-keyword">this</span>.save);
},
load(center) {
queryOverpass(center, KEYPAIR, (err, resp, map) => {
<span class="hljs-keyword">if</span> (err) <span class="hljs-keyword">return</span> <span class="hljs-built_in">console</span>.error(err);
parser(resp.responseXML, KEYPAIR)
.sort(sortDistance(center))
.slice(<span class="hljs-number">0</span>, <span class="hljs-number">50</span>)
.map(node => node.id).forEach(id => <span class="hljs-keyword">this</span>.loadNodes([id]));
});
},
loadNodes(ids) {
ids = ids.filter(id => !<span class="hljs-keyword">this</span>.nodes[id]);
<span class="hljs-keyword">if</span> (!ids.length) <span class="hljs-keyword">return</span> <span class="hljs-keyword">this</span>.trigger(<span class="hljs-keyword">this</span>.nodes);
xhr({ uri: `${API06}nodes/?nodes=${ids.join(<span class="hljs-string">','</span>)}`, method: <span class="hljs-string">'GET'</span> }, (err, resp, body) => {
parser(resp.responseXML, KEYPAIR).forEach(node => {
<span class="hljs-keyword">if</span> (!<span class="hljs-keyword">this</span>.nodes[node.id]) <span class="hljs-keyword">this</span>.nodes[node.id] = node;
});
<span class="hljs-keyword">this</span>.trigger(<span class="hljs-keyword">this</span>.nodes);
});
},
save(res, price, currency) {
<span class="hljs-keyword">const</span> XMLHEADER = { header: { <span class="hljs-string">'Content-Type'</span>: <span class="hljs-string">'text/xml'</span> } };
<span class="hljs-keyword">var</span> xml = res.xml;
<span class="hljs-keyword">var</span> tag = { k: TAG, v: currency + price };
<span class="hljs-keyword">var</span> comment = `Updating coffee price to ${currency} ${price} <span class="hljs-keyword">for</span> ${res.tags.name}`;
auth.xhr({ method: <span class="hljs-string">'PUT'</span>, prefix: <span class="hljs-literal">false</span>, options: XMLHEADER,
content: changesetCreate(comment),
path: `${API06}changeset/create`
}, (err, id) => {
<span class="hljs-keyword">if</span> (err) <span class="hljs-keyword">return</span> <span class="hljs-built_in">console</span>.error(err);
auth.xhr({ method: <span class="hljs-string">'POST'</span>, prefix: <span class="hljs-literal">false</span>, options: XMLHEADER,
content: changesetChange(xml, tag, id),
path: `${API06}changeset/${id}/upload`,
}, (err, res) => {
auth.xhr({ method: <span class="hljs-string">'PUT'</span>, prefix: <span class="hljs-literal">false</span>,
path: `${API06}changeset/${id}/close`
}, (err, id) => {
<span class="hljs-keyword">if</span> (err) <span class="hljs-built_in">console</span>.error(err);
router.transitionTo(<span class="hljs-string">'/success'</span>);
});
});
});
}
});</pre></div></div>
</li>
<li id="section-14">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-14">¶</a>
</div>
<p>osm-auth does the hard work of managing user authentication with
OpenStreetMap via the OAuth protocol.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> auth = osmAuth({
oauth_consumer_key: OAUTH_CONSUMER_KEY,
oauth_secret: OAUTH_SECRET,
auto: <span class="hljs-literal">false</span>,
landing: <span class="hljs-string">'index.html'</span>,
singlepage: <span class="hljs-literal">true</span>
});</pre></div></div>
</li>
<li id="section-15">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-15">¶</a>
</div>
<p>Here we store the user’s logged-in / logged-out status so we can show
the authentication view instead of a list as an initial pageview.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> userLogin = Reflux.createAction();
<span class="hljs-keyword">var</span> userStore = Reflux.createStore({
user: <span class="hljs-literal">null</span>,
init() {
<span class="hljs-keyword">this</span>.user = auth.authenticated();
<span class="hljs-keyword">this</span>.listenTo(userLogin, <span class="hljs-keyword">this</span>.login);
},
getInitialState() {
<span class="hljs-keyword">return</span> <span class="hljs-keyword">this</span>.user;
},
login() {
auth.authenticate((err, details) => {
<span class="hljs-keyword">this</span>.user = auth.authenticated();
<span class="hljs-keyword">this</span>.trigger(<span class="hljs-keyword">this</span>.user);
});
}
});</pre></div></div>
</li>
<li id="section-16">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-16">¶</a>
</div>
<h1 id="components">Components</h1>
</div>
</li>
<li id="section-17">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-17">¶</a>
</div>
<p>A simple shout-out and log-in button that shoots a user into the OSM
oauth flow.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> LogIn = React.createClass({
render() {
<span class="hljs-comment">/* jshint ignore:start */</span>
<span class="hljs-keyword">return</span> (<span class="xml"><span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pad2'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pad1 space-bottom1'</span>></span>
COFFEEDEX is built on OpenStreetMap and requires an OpenStreetMap account.
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">button</span>
<span class="hljs-attribute">onClick</span>=<span class="hljs-value">{userLogin}</span>
<span class="hljs-attribute">className</span>=<span class="hljs-value">'button col12 fill-green icon account'</span>></span>Log in to OpenStreetMap<span class="hljs-tag"></<span class="hljs-title">button</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
);
/* jshint ignore:end */
}
});
</span></pre></div></div>
</li>
<li id="section-18">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-18">¶</a>
</div>
<p>A simple wrapper for a call to the <a href="https://www.mapbox.com/developers/api/static/">Mapbox Static Map API</a>
that we use for editing pages: this gives a basic idea of where the coffee
shop is as well as a marker for your location. Helpful when there’s
a Starbucks on every corner of an intersection.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> StaticMap = React.createClass({
render() {
<span class="hljs-keyword">return</span> (
<span class="hljs-comment">/* jshint ignore:start */</span>
<img src={`https:<span class="hljs-comment">//api.tiles.mapbox.com/v4/${MAP}/` +</span>
`${PIN}(${<span class="hljs-keyword">this</span>.props.location.longitude},${<span class="hljs-keyword">this</span>.props.location.latitude}),` +
(<span class="hljs-keyword">this</span>.props.self ? `${LOC}(${<span class="hljs-keyword">this</span>.props.self.longitude},${<span class="hljs-keyword">this</span>.props.self.latitude})` : <span class="hljs-string">''</span>) +
`/${<span class="hljs-keyword">this</span>.props.location.longitude},${<span class="hljs-keyword">this</span>.props.location.latitude}` +
`,<span class="hljs-number">14</span>/<span class="hljs-number">300</span>x200@<span class="hljs-number">2</span>x.png?access_token=${MBX}`} />
<span class="hljs-comment">/* jshint ignore:end */</span>
);
}
});
<span class="hljs-keyword">var</span> Page = React.createClass({
render() {
<span class="hljs-keyword">return</span> (
<span class="hljs-comment">/* jshint ignore:start */</span>
<div className=<span class="hljs-string">'margin3 col6'</span>>
<span class="xml"><span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'col12'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">RouteHandler</span>/></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
/* jshint ignore:end */
);
}
});
var values = obj => Object.keys(obj).map(key => obj[key]);
</span></pre></div></div>
</li>
<li id="section-19">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-19">¶</a>
</div>
<p>A list of potential nodes for viewing and editing.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> List = React.createClass({</pre></div></div>
</li>
<li id="section-20">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-20">¶</a>
</div>
<p>We use Reflux’s <code>.connect</code> method to listen for changes in stores
and automatically call setState to use their data here.</p>
</div>
<div class="content"><div class='highlight'><pre> mixins: [
Reflux.connect(nodeStore, <span class="hljs-string">'nodes'</span>),
Reflux.connect(locationStore, <span class="hljs-string">'location'</span>),
Reflux.connect(userStore, <span class="hljs-string">'user'</span>)],
<span class="hljs-comment">/* jshint ignore:start */</span>
render() {
<span class="hljs-keyword">return</span> (
<span class="xml"><span class="hljs-tag"><<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'clearfix col12'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pad2 fill-darken0 clearfix'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'col4'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">img</span> <span class="hljs-attribute">width</span>=<span class="hljs-value">{300</span>/<span class="hljs-attribute">2</span>} <span class="hljs-attribute">height</span>=<span class="hljs-value">{230</span>/<span class="hljs-attribute">2</span>}
<span class="hljs-attribute">className</span>=<span class="hljs-value">'inline'</span> <span class="hljs-attribute">src</span>=<span class="hljs-value">'assets/logo_inverted.png'</span> /></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'col8 pad2y pad1x'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">h3</span>></span>COFFEEDEX<span class="hljs-tag"></<span class="hljs-title">h3</span>></span>
<span class="hljs-tag"><<span class="hljs-title">p</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'italic'</span>></span>how much does a cup of coffee for here cost, everywhere?<span class="hljs-tag"></<span class="hljs-title">p</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
{this.state.user ?
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pad2'</span>></span>
{!values(this.state.nodes).length && <span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pad4 center'</span>></span>
Loading...
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>}
<span class="hljs-tag"><<span class="hljs-title">React.addons.CSSTransitionGroup</span> <span class="hljs-attribute">transitionName</span>=<span class="hljs-value">"t-fade"</span>></span>
{values(this.state.nodes)
.sort(sortDistance(this.state.location))
.map(res => <span class="hljs-tag"><<span class="hljs-title">Result</span> <span class="hljs-attribute">key</span>=<span class="hljs-value">{res.id}</span> <span class="hljs-attribute">res</span>=<span class="hljs-value">{res}</span> /></span>)}
<span class="hljs-tag"></<span class="hljs-title">React.addons.CSSTransitionGroup</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span> :
<span class="hljs-tag"><<span class="hljs-title">LogIn</span> /></span>}
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'center dark space-bottom1'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pill space-top1'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">Link</span>
<span class="hljs-attribute">className</span>=<span class="hljs-value">'button stroke quiet icon globe'</span>
<span class="hljs-attribute">to</span>=<span class="hljs-value">'world_map'</span>></span>World Map<span class="hljs-tag"></<span class="hljs-title">Link</span>></span>
<span class="hljs-tag"><<span class="hljs-title">Link</span>
<span class="hljs-attribute">className</span>=<span class="hljs-value">'button stroke quiet'</span>
<span class="hljs-attribute">to</span>=<span class="hljs-value">'help'</span>></span>Help<span class="hljs-tag"></<span class="hljs-title">Link</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>);
}
/* jshint ignore:end */
});
</span></pre></div></div>
</li>
<li id="section-21">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-21">¶</a>
</div>
<p>A single list item</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> Result = React.createClass({
render() {
<span class="hljs-comment">/* jshint ignore:start */</span>
<span class="hljs-keyword">return</span> <span class="xml"><span class="hljs-tag"><<span class="hljs-title">Link</span> <span class="hljs-attribute">to</span>=<span class="hljs-value">'editor'</span>
<span class="hljs-attribute">params</span>=<span class="hljs-value">{{</span> <span class="hljs-attribute">osmId:</span> <span class="hljs-attribute">this.props.res.id</span> }}
<span class="hljs-attribute">className</span>=<span class="hljs-value">'pad1 col12 clearfix fill-coffee space-bottom1'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'price-tag round'</span>></span>
{this.props.res.tags[TAG] ?
this.props.res.tags[TAG] : <span class="hljs-tag"><<span class="hljs-title">span</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'icon pencil'</span>></span><span class="hljs-tag"></<span class="hljs-title">span</span>></span>}
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">strong</span>></span>{this.props.res.tags.name}<span class="hljs-tag"></<span class="hljs-title">strong</span>></span>
<span class="hljs-tag"></<span class="hljs-title">Link</span>></span>;</span>
<span class="hljs-comment">/* jshint ignore:end */</span>
}
});
<span class="hljs-keyword">var</span> parseCurrency = str => {
<span class="hljs-keyword">var</span> number = str.match(<span class="hljs-regexp">/[\d\.]+/</span>), currency = str.match(<span class="hljs-regexp">/[^\d\.]+/</span>);
<span class="hljs-keyword">return</span> {
currency: currency || <span class="hljs-string">'$'</span>,
price: <span class="hljs-built_in">parseFloat</span>((number && number[<span class="hljs-number">0</span>]) || <span class="hljs-number">0</span>)
};
};</pre></div></div>
</li>
<li id="section-22">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-22">¶</a>
</div>
<p>This view is shown briefly after a user completes an edit. The user
can either click/tap to go back to the list, or it’ll do that automatically
in 1 second.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> Success = React.createClass({
mixins: [Navigation],
componentDidMount() {
setTimeout(() => {
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">this</span>.isMounted()) {
<span class="hljs-keyword">this</span>.transitionTo(<span class="hljs-string">'list'</span>);
}
}, <span class="hljs-number">1000</span>);
},
<span class="hljs-comment">/* jshint ignore:start */</span>
render() {
<span class="hljs-keyword">return</span> <span class="xml"><span class="hljs-tag"><<span class="hljs-title">Link</span> <span class="hljs-attribute">to</span>=<span class="hljs-value">'list'</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'col12 center pad4'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">h2</span>></span><span class="hljs-tag"><<span class="hljs-title">span</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'big icon check'</span>></span><span class="hljs-tag"></<span class="hljs-title">span</span>></span> Saved!<span class="hljs-tag"></<span class="hljs-title">h2</span>></span>
<span class="hljs-tag"></<span class="hljs-title">Link</span>></span>;</span>
}
<span class="hljs-comment">/* jshint ignore:end */</span>
});</pre></div></div>
</li>
<li id="section-23">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-23">¶</a>
</div>
<p>The help page. Doesn’t have any JavaScript functionality of its own -
this is static content.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> Help = React.createClass({
<span class="hljs-comment">/* jshint ignore:start */</span>
render() {
<span class="hljs-keyword">return</span> <span class="xml"><span class="hljs-tag"><<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">Link</span>
<span class="hljs-attribute">to</span>=<span class="hljs-value">'list'</span>
<span class="hljs-attribute">className</span>=<span class="hljs-value">'home icon button fill-darken2 col12'</span>></span>home<span class="hljs-tag"></<span class="hljs-title">Link</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pad1y'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'round fill-lighten0 pad2 dark'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">p</span>></span><span class="hljs-tag"><<span class="hljs-title">strong</span>></span>COFFEEDEX<span class="hljs-tag"></<span class="hljs-title">strong</span>></span> is a community project that aims to track the price of house coffee everywhere.<span class="hljs-tag"></<span class="hljs-title">p</span>></span>
<span class="hljs-tag"><<span class="hljs-title">p</span>></span>The data is stored in <span class="hljs-tag"><<span class="hljs-title">a</span> <span class="hljs-attribute">href</span>=<span class="hljs-value">'http://osm.org/'</span>></span>OpenStreetMap<span class="hljs-tag"></<span class="hljs-title">a</span>></span>, a free and open source map of the world, as tags on existing coffeehops. There are 150,000+.<span class="hljs-tag"></<span class="hljs-title">p</span>></span>
<span class="hljs-tag"><<span class="hljs-title">p</span>></span>Maps in this application are &copy; <span class="hljs-tag"><<span class="hljs-title">a</span> <span class="hljs-attribute">href</span>=<span class="hljs-value">'http://mapbox.com/'</span>></span>Mapbox<span class="hljs-tag"></<span class="hljs-title">a</span>></span>.<span class="hljs-tag"></<span class="hljs-title">p</span>></span>
<span class="hljs-tag"><<span class="hljs-title">p</span>></span>COFFEEDEX data stored in OpenStreetMap is <span class="hljs-tag"><<span class="hljs-title">a</span> <span class="hljs-attribute">href</span>=<span class="hljs-value">'http://www.openstreetmap.org/copyright'</span>></span>available under the ODbL license.<span class="hljs-tag"></<span class="hljs-title">a</span>></span><span class="hljs-tag"></<span class="hljs-title">p</span>></span>
<span class="hljs-tag"><<span class="hljs-title">p</span>></span>This is also an open source project. You can view the source code, clone it, fork it, and make new things with it as inspiration or raw parts.<span class="hljs-tag"></<span class="hljs-title">p</span>></span>
<span class="hljs-tag"><<span class="hljs-title">a</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'button stroke icon github col12 space-bottom1'</span> <span class="hljs-attribute">href</span>=<span class="hljs-value">'http://github.com/tmcw/coffeedex'</span>></span>COFFEEDEX on GitHub<span class="hljs-tag"></<span class="hljs-title">a</span>></span>
<span class="hljs-tag"><<span class="hljs-title">p</span>></span><span class="hljs-tag"><<span class="hljs-title">span</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'icon mobile'</span>></span><span class="hljs-tag"></<span class="hljs-title">span</span>></span> COFFEEDEX also works great on phones! Try it on your phone and add it to your iPhone home screen - it'll look even prettier.<span class="hljs-tag"></<span class="hljs-title">p</span>></span>
<span class="hljs-tag"><<span class="hljs-title">h2</span>></span>FAQ<span class="hljs-tag"></<span class="hljs-title">h2</span>></span>
<span class="hljs-tag"><<span class="hljs-title">ul</span>></span>
<span class="hljs-tag"><<span class="hljs-title">li</span>></span><span class="hljs-tag"><<span class="hljs-title">strong</span>></span>Which coffee?<span class="hljs-tag"></<span class="hljs-title">strong</span>></span> This site tracks the price of <span class="hljs-tag"><<span class="hljs-title">em</span>></span>house coffee<span class="hljs-tag"></<span class="hljs-title">em</span>></span> for here. In many cases, that means a 12oz drip, but if all coffees are pour-overs or your country uses different standard size, the overriding rule is cheapest-here.<span class="hljs-tag"></<span class="hljs-title">li</span>></span>
<span class="hljs-tag"></<span class="hljs-title">ul</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>;</span>
}
<span class="hljs-comment">/* jshint ignore:end */</span>
});</pre></div></div>
</li>
<li id="section-24">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-24">¶</a>
</div>
<p>The WorldMap page. This uses the worldNodeLoad to show all tagged
nodes worldwide on an interactive Mapbox map.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> WorldMap = React.createClass({
mixins: [Navigation, Reflux.connect(worldNodeStore, <span class="hljs-string">'nodes'</span>)],
statics: {
willTransitionTo(transition, params) {
worldNodeLoad();
}
},
<span class="hljs-comment">/* jshint ignore:start */</span>
componentDidMount() {
<span class="hljs-keyword">this</span>.map = L.mapbox.map(<span class="hljs-keyword">this</span>.refs.map.getDOMNode(), MAP, {
zoomControl: <span class="hljs-literal">false</span>
});
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">this</span>.state.nodes) <span class="hljs-keyword">this</span>.map.featureLayer.setGeoJSON(<span class="hljs-keyword">this</span>.state.nodes);
},
componentDidUpdate() {
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">this</span>.state.nodes) <span class="hljs-keyword">this</span>.map.featureLayer.setGeoJSON(<span class="hljs-keyword">this</span>.state.nodes);
},
render() {
<span class="hljs-keyword">return</span> <span class="xml"><span class="hljs-tag"><<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">ref</span>=<span class="hljs-value">'map'</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pin-top pin-bottom'</span> <span class="hljs-attribute">id</span>=<span class="hljs-value">'map'</span>></span><span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">Link</span>
<span class="hljs-attribute">to</span>=<span class="hljs-value">'list'</span>
<span class="hljs-attribute">className</span>=<span class="hljs-value">'home icon button fill-navy dark pin-top unround col12'</span>></span>home<span class="hljs-tag"></<span class="hljs-title">Link</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>;</span>
}
<span class="hljs-comment">/* jshint ignore:end */</span>
});</pre></div></div>
</li>
<li id="section-25">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-25">¶</a>
</div>
<p>The editor. This allows users to view and edit tags on single result items.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> Editor = React.createClass({
mixins: [
Reflux.listenTo(nodeStore, <span class="hljs-string">'onNodeLoad'</span>, <span class="hljs-string">'onNodeLoad'</span>),
Reflux.connect(locationStore, <span class="hljs-string">'location'</span>),
State, React.addons.LinkedStateMixin],
onNodeLoad(nodes) {
<span class="hljs-keyword">var</span> node = nodes[<span class="hljs-keyword">this</span>.getParams().osmId];
<span class="hljs-keyword">if</span> (node) {
<span class="hljs-keyword">if</span> (node.tags[TAG]) {
<span class="hljs-keyword">var</span> currency = parseCurrency(node.tags[TAG]);
<span class="hljs-keyword">this</span>.setState({
currency: currency.currency,
price: currency.price,
node: node
});
} <span class="hljs-keyword">else</span> {
<span class="hljs-keyword">this</span>.setState({ node: node });
}
}
},
getInitialState() {
<span class="hljs-keyword">return</span> {
currency: <span class="hljs-string">'$'</span>,
price: <span class="hljs-number">0</span>
};
},</pre></div></div>
</li>
<li id="section-26">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-26">¶</a>
</div>
<p>Before this view is displayed, we make sure that the node it’ll
show will be loaded soon.</p>
</div>
<div class="content"><div class='highlight'><pre> statics: {
willTransitionTo(transition, params) {
nodeStore.loadNodes([params.osmId]);
},
},
save(e) {
e.preventDefault();
<span class="hljs-keyword">var</span> node = <span class="hljs-keyword">this</span>.state.node;
nodeSave(node, <span class="hljs-keyword">this</span>.state.price, <span class="hljs-keyword">this</span>.state.currency);
},
render() {
<span class="hljs-keyword">var</span> node = <span class="hljs-keyword">this</span>.state.node;
<span class="hljs-comment">/* jshint ignore:start */</span>
<span class="hljs-keyword">if</span> (!node) <span class="hljs-keyword">return</span> <span class="xml"><span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pad4 center'</span>></span>
Loading...
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>;</span>
<span class="hljs-keyword">return</span> <span class="xml"><span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'col12'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">Link</span>
<span class="hljs-attribute">to</span>=<span class="hljs-value">'list'</span>
<span class="hljs-attribute">className</span>=<span class="hljs-value">'home icon button fill-darken0 unround col12'</span>></span>home<span class="hljs-tag"></<span class="hljs-title">Link</span>></span>
<span class="hljs-tag"><<span class="hljs-title">StaticMap</span> <span class="hljs-attribute">location</span>=<span class="hljs-value">{node.location}</span> <span class="hljs-attribute">self</span>=<span class="hljs-value">{this.state.location}</span> /></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'pad1 col12 clearfix'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'col12'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'center'</span>></span>
how much for a cup of joe at
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">h1</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'center'</span>></span>
{node.tags.name}
<span class="hljs-tag"></<span class="hljs-title">h1</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'limit-mobile'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">div</span> <span class="hljs-attribute">className</span>=<span class="hljs-value">'col12 clearfix space-bottom1'</span>></span>
<span class="hljs-tag"><<span class="hljs-title">select</span>
<span class="hljs-attribute">valueLink</span>=<span class="hljs-value">{this.linkState('currency')}</span>
<span class="hljs-attribute">className</span>=<span class="hljs-value">'coffee-select'</span>></span>
{currency.map(c => <span class="hljs-tag"><<span class="hljs-title">option</span> <span class="hljs-attribute">key</span>=<span class="hljs-value">{c[0]}</span> <span class="hljs-attribute">value</span>=<span class="hljs-value">{c[0]}</span>></span>{c[1]}<span class="hljs-tag"></<span class="hljs-title">option</span>></span>)}
<span class="hljs-tag"></<span class="hljs-title">select</span>></span>
<span class="hljs-tag"><<span class="hljs-title">input</span> <span class="hljs-attribute">valueLink</span>=<span class="hljs-value">{this.linkState('price')}</span>
<span class="hljs-attribute">className</span>=<span class="hljs-value">'coffee-input'</span> <span class="hljs-attribute">type</span>=<span class="hljs-value">'number'</span> /></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"><<span class="hljs-title">a</span> <span class="hljs-attribute">href</span>=<span class="hljs-value">'#'</span>
<span class="hljs-attribute">onClick</span>=<span class="hljs-value">{this.save}</span>
<span class="hljs-attribute">className</span>=<span class="hljs-value">'fill-darken1 button col12 icon plus pad1 unround'</span>></span>Save<span class="hljs-tag"></<span class="hljs-title">a</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>
<span class="hljs-tag"></<span class="hljs-title">div</span>></span>;</span>
<span class="hljs-comment">/* jshint ignore:end */</span>
}
});</pre></div></div>
</li>
<li id="section-27">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-27">¶</a>
</div>
<p>Our router. This manages what URLs mean and where Links can go.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">var</span> routes = (
<span class="hljs-comment">/* jshint ignore:start */</span>
<Route handler={Page} path=<span class="hljs-string">'/'</span>>
<span class="xml"><span class="hljs-tag"><<span class="hljs-title">DefaultRoute</span> <span class="hljs-attribute">name</span>=<span class="hljs-value">'list'</span> <span class="hljs-attribute">handler</span>=<span class="hljs-value">{List}</span> /></span>
<span class="hljs-tag"><<span class="hljs-title">Route</span> <span class="hljs-attribute">name</span>=<span class="hljs-value">'world_map'</span> <span class="hljs-attribute">path</span>=<span class="hljs-value">'/world_map'</span> <span class="hljs-attribute">handler</span>=<span class="hljs-value">{WorldMap}</span> /></span>
<span class="hljs-tag"><<span class="hljs-title">Route</span> <span class="hljs-attribute">name</span>=<span class="hljs-value">'success'</span> <span class="hljs-attribute">path</span>=<span class="hljs-value">'/success'</span> <span class="hljs-attribute">handler</span>=<span class="hljs-value">{Success}</span> /></span>
<span class="hljs-tag"><<span class="hljs-title">Route</span> <span class="hljs-attribute">name</span>=<span class="hljs-value">'help'</span> <span class="hljs-attribute">path</span>=<span class="hljs-value">'/help'</span> <span class="hljs-attribute">handler</span>=<span class="hljs-value">{Help}</span> /></span>
<span class="hljs-tag"><<span class="hljs-title">Route</span> <span class="hljs-attribute">name</span>=<span class="hljs-value">'editor'</span> <span class="hljs-attribute">path</span>=<span class="hljs-value">'/edit/:osmId'</span> <span class="hljs-attribute">handler</span>=<span class="hljs-value">{Editor}</span> /></span>
<span class="hljs-tag"></<span class="hljs-title">Route</span>></span>
/* jshint ignore:end */
);
var router = Router.create({ routes });
</span></pre></div></div>
</li>
<li id="section-28">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-28">¶</a>
</div>
<p>This is a little dirty: the router will rewrite paths it doesn’t know,
including the path we desperately need to complete the OAuth dance.
So before booting it up, we notice if we need to bootstrap an oauth_token,
and if so, we do that before starting the application.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-keyword">if</span> (location.search && !auth.authenticated()) {
<span class="hljs-keyword">var</span> oauth_token = qs.parse(location.search.replace(<span class="hljs-string">'?'</span>, <span class="hljs-string">''</span>)).oauth_token;
auth.bootstrapToken(oauth_token, (err, res) => {
userStore.user = <span class="hljs-literal">true</span>;
userStore.trigger(userStore.user);
router.run(Handler => {
<span class="hljs-comment">/* jshint ignore:start */</span>
React.render(<span class="xml"><span class="hljs-tag"><<span class="hljs-title">Handler</span>/></span>, document.body);
/* jshint ignore:end */
});
});
} else {
router.run(Handler => {
/* jshint ignore:start */
React.render(<span class="hljs-tag"><<span class="hljs-title">Handler</span>/></span>, document.body);
/* jshint ignore:end */
});
}
</span></pre></div></div>
</li>
</ul>
</div>
</body>
</html>
| iandees/poism |
<|start_filename|>app/src/main/java/com/hadiidbouk/sample/MainActivity.java<|end_filename|>
package com.hadiidbouk.sample;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.webkit.WebView;
import android.widget.FrameLayout;
import android.widget.Toast;
import com.hadiidbouk.appauthwebview.AppAuthWebView;
import com.hadiidbouk.appauthwebview.AppAuthWebViewData;
import com.hadiidbouk.appauthwebview.IAppAuthWebViewListener;
import net.openid.appauth.AuthState;
public class MainActivity extends AppCompatActivity implements IAppAuthWebViewListener {
private AppAuthWebViewData mData;
private FrameLayout mErrorLayout;
private FrameLayout mLoadingLayout;
private WebView mWebView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mData = new AppAuthWebViewData();
mData.setClientId("native.code");
mData.setDiscoveryUri("https://demo.identityserver.io/.well-known/openid-configuration");
mData.setScope("openid profile email api offline_access");
mData.setAuthorizationEndpointUri("https://demo.identityserver.io/connect/authorize");
mData.setRedirectLoginUri("hadiidbouk-appAuthWebView://callback");
mData.setTokenEndpointUri("https://demo.identityserver.io/connect/token");
mData.setResponseType("code");
mData.setGenerateCodeVerifier(true);
//Todo: delete after refactoring the code
mData.setRegistrationEndpointUri("");
mData.setRedirectLogoutUri("");
mData.setClientSecret("");
mErrorLayout = findViewById(R.id.ErrorLayout);
mLoadingLayout = findViewById(R.id.LoadingLayout);
mWebView = findViewById(R.id.WebView);
AppAuthWebView appAuthWebView = new AppAuthWebView
.Builder()
.webView(mWebView)
.authData(mData)
.listener(this)
.build();
appAuthWebView.performLoginRequest();
}
@Override public void onUserAuthorize(AuthState authState) {
Toast.makeText(this, "Success!\n\nToken : " + authState.getIdToken(), Toast.LENGTH_SHORT).show();
finish();
}
@Override public void showConnectionErrorLayout() {
mErrorLayout.setVisibility(View.VISIBLE);
}
@Override public void hideConnectionErrorLayout() {
mErrorLayout.setVisibility(View.INVISIBLE);
}
@Override public void showLoadingLayout() {
mLoadingLayout.setVisibility(View.VISIBLE);
}
@Override public void hideLoadingLayout() {
mLoadingLayout.setVisibility(View.INVISIBLE);
}
@Override
public void onLogoutFinish() {
}
}
<|start_filename|>library/src/main/java/com/hadiidbouk/appauthwebview/IAppAuthWebViewListener.java<|end_filename|>
package com.hadiidbouk.appauthwebview;
import net.openid.appauth.AuthState;
public interface IAppAuthWebViewListener {
void onUserAuthorize(AuthState authState);
void showConnectionErrorLayout();
void hideConnectionErrorLayout();
void showLoadingLayout();
void hideLoadingLayout();
void onLogoutFinish();
}
| PriestVallon/AppAuthWebView-Android |
<|start_filename|>src/main/java/com/example/MySmartHomeApp.java<|end_filename|>
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.actions.api.smarthome.*;
import com.google.cloud.firestore.QueryDocumentSnapshot;
import com.google.gson.Gson;
import com.google.home.graph.v1.DeviceProto;
import com.google.protobuf.Struct;
import com.google.protobuf.util.JsonFormat;
public class MySmartHomeApp extends SmartHomeApp {
private static final Logger LOGGER = LoggerFactory.getLogger(MySmartHomeApp.class);
private static MyDataStore database = MyDataStore.getInstance();
@NotNull
@Override
public SyncResponse onSync(SyncRequest syncRequest, Map<?, ?> headers) {
SyncResponse res = new SyncResponse();
res.setRequestId(syncRequest.requestId);
res.setPayload(new SyncResponse.Payload());
String token = (String) headers.get("authorization");
String userId = "";
try {
userId = database.getUserId(token);
} catch (Exception e) {
// TODO(proppy): add errorCode when
// https://github.com/actions-on-google/actions-on-google-java/issues/44 is fixed.
LOGGER.error("failed to get user id for token: %d", token);
return res;
}
res.payload.agentUserId = userId;
database.setHomegraph(userId, true);
List<QueryDocumentSnapshot> devices = new ArrayList<>();
try {
devices = database.getDevices(userId);
} catch (ExecutionException | InterruptedException e) {
LOGGER.error("failed to get devices", e);
return res;
}
int numOfDevices = devices.size();
res.payload.devices = new SyncResponse.Payload.Device[numOfDevices];
for (int i = 0; i < numOfDevices; i++) {
QueryDocumentSnapshot device = devices.get(i);
SyncResponse.Payload.Device.Builder deviceBuilder =
new SyncResponse.Payload.Device.Builder()
.setId(device.getId())
.setType((String) device.get("type"))
.setTraits((List<String>) device.get("traits"))
.setName(
DeviceProto.DeviceNames.newBuilder()
.addAllDefaultNames((List<String>) device.get("defaultNames"))
.setName((String) device.get("name"))
.addAllNicknames((List<String>) device.get("nicknames"))
.build())
.setWillReportState((Boolean) device.get("willReportState"))
.setRoomHint((String) device.get("roomHint"))
.setDeviceInfo(
DeviceProto.DeviceInfo.newBuilder()
.setManufacturer((String) device.get("manufacturer"))
.setModel((String) device.get("model"))
.setHwVersion((String) device.get("hwVersion"))
.setSwVersion((String) device.get("swVersion"))
.build());
if (device.contains("attributes")) {
Map<String, Object> attributes = new HashMap<>();
attributes.putAll((Map<String, Object>) device.get("attributes"));
String attributesJson = new Gson().toJson(attributes);
Struct.Builder attributeBuilder = Struct.newBuilder();
try {
JsonFormat.parser().ignoringUnknownFields().merge(attributesJson, attributeBuilder);
} catch (Exception e) {
LOGGER.error("FAILED TO BUILD");
}
deviceBuilder.setAttributes(attributeBuilder.build());
}
if (device.contains("customData")) {
Map<String, Object> customData = new HashMap<>();
customData.putAll((Map<String, Object>) device.get("customData"));
// TODO(proppy): remove once
// https://github.com/actions-on-google/actions-on-google-java/issues/43 is fixed.
String customDataJson = new Gson().toJson(customData);
deviceBuilder.setCustomData(customDataJson);
}
if (device.contains("otherDeviceIds")) {
deviceBuilder.setOtherDeviceIds((List) device.get("otherDeviceIds"));
}
res.payload.devices[i] = deviceBuilder.build();
}
return res;
}
@NotNull
@Override
public QueryResponse onQuery(QueryRequest queryRequest, Map<?, ?> headers) {
QueryRequest.Inputs.Payload.Device[] devices =
((QueryRequest.Inputs) queryRequest.getInputs()[0]).payload.devices;
QueryResponse res = new QueryResponse();
res.setRequestId(queryRequest.requestId);
res.setPayload(new QueryResponse.Payload());
String token = (String) headers.get("authorization");
String userId = "";
try {
userId = database.getUserId(token);
} catch (Exception e) {
LOGGER.error("failed to get user id for token: %d", headers.get("authorization"));
res.payload.setErrorCode("authFailure");
return res;
}
Map<String, Map<String, Object>> deviceStates = new HashMap<>();
for (QueryRequest.Inputs.Payload.Device device : devices) {
try {
Map<String, Object> deviceState = database.getState(userId, device.id);
deviceState.put("status", "SUCCESS");
//deviceState.put("online", true); //TODO: Not sure about this line solution
deviceStates.put(device.id, deviceState);
//ReportState.makeRequest(this, userId, device.id, deviceState);
} catch (Exception e) {
LOGGER.error("QUERY FAILED: {}", e);
Map<String, Object> failedDevice = new HashMap<>();
failedDevice.put("status", "ERROR");
failedDevice.put("errorCode", "deviceOffline");
deviceStates.put(device.id, failedDevice);
}
}
res.payload.setDevices(deviceStates);
return res;
}
@NotNull
@Override
public ExecuteResponse onExecute(ExecuteRequest executeRequest, Map<?, ?> headers) {
ExecuteResponse res = new ExecuteResponse();
String token = (String) headers.get("authorization");
String userId = "";
try {
userId = database.getUserId(token);
} catch (Exception e) {
LOGGER.error("failed to get user id for token: %d", headers.get("authorization"));
res.setPayload(new ExecuteResponse.Payload());
res.payload.setErrorCode("authFailure");
return res;
}
List<ExecuteResponse.Payload.Commands> commandsResponse = new ArrayList<>();
List<String> successfulDevices = new ArrayList<>();
Map<String, Object> states = new HashMap<>();
ExecuteRequest.Inputs.Payload.Commands[] commands =
((ExecuteRequest.Inputs) executeRequest.inputs[0]).payload.commands;
for (ExecuteRequest.Inputs.Payload.Commands command : commands) {
for (ExecuteRequest.Inputs.Payload.Commands.Devices device : command.devices) {
try {
states = database.execute(userId, device.id, command.execution[0]);
successfulDevices.add(device.id);
ReportState.makeRequest(this, userId, device.id, states);
} catch (Exception e) {
if (e.getMessage().equals("PENDING")) {
ExecuteResponse.Payload.Commands pendingDevice = new ExecuteResponse.Payload.Commands();
pendingDevice.ids = new String[] {device.id};
pendingDevice.status = "PENDING";
commandsResponse.add(pendingDevice);
continue;
}
if (e.getMessage().equals("pinNeeded")) {
ExecuteResponse.Payload.Commands failedDevice = new ExecuteResponse.Payload.Commands();
failedDevice.ids = new String[] {device.id};
failedDevice.status = "ERROR";
failedDevice.setErrorCode("challengeNeeded");
failedDevice.setChallengeNeeded(
new HashMap<String, String>() {
{
put("type", "pinNeeded");
}
});
failedDevice.setErrorCode(e.getMessage());
commandsResponse.add(failedDevice);
continue;
}
if (e.getMessage().equals("challengeFailedPinNeeded")) {
ExecuteResponse.Payload.Commands failedDevice = new ExecuteResponse.Payload.Commands();
failedDevice.ids = new String[] {device.id};
failedDevice.status = "ERROR";
failedDevice.setErrorCode("challengeNeeded");
failedDevice.setChallengeNeeded(
new HashMap<String, String>() {
{
put("type", "challengeFailedPinNeeded");
}
});
failedDevice.setErrorCode(e.getMessage());
commandsResponse.add(failedDevice);
continue;
}
if (e.getMessage().equals("ackNeeded")) {
ExecuteResponse.Payload.Commands failedDevice = new ExecuteResponse.Payload.Commands();
failedDevice.ids = new String[] {device.id};
failedDevice.status = "ERROR";
failedDevice.setErrorCode("challengeNeeded");
failedDevice.setChallengeNeeded(
new HashMap<String, String>() {
{
put("type", "ackNeeded");
}
});
failedDevice.setErrorCode(e.getMessage());
commandsResponse.add(failedDevice);
continue;
}
ExecuteResponse.Payload.Commands failedDevice = new ExecuteResponse.Payload.Commands();
failedDevice.ids = new String[] {device.id};
failedDevice.status = "ERROR";
failedDevice.setErrorCode(e.getMessage());
commandsResponse.add(failedDevice);
}
}
}
ExecuteResponse.Payload.Commands successfulCommands = new ExecuteResponse.Payload.Commands();
successfulCommands.status = "SUCCESS";
successfulCommands.setStates(states);
successfulCommands.ids = successfulDevices.toArray(new String[] {});
commandsResponse.add(successfulCommands);
res.requestId = executeRequest.requestId;
ExecuteResponse.Payload payload =
new ExecuteResponse.Payload(
commandsResponse.toArray(new ExecuteResponse.Payload.Commands[] {}));
res.setPayload(payload);
return res;
}
@NotNull
@Override
public void onDisconnect(DisconnectRequest disconnectRequest, Map<?, ?> headers) {
String token = (String) headers.get("authorization");
try {
String userId = database.getUserId(token);
database.setHomegraph(userId, false);
} catch (Exception e) {
LOGGER.error("failed to get user id for token: %d", token);
}
}
}
| electrofun-smart/my-smart-home-java-mqtt |
<|start_filename|>src/KdtreeISO/include/Mesh.h<|end_filename|>
//
// Created by Danielhu on 2017/12/25.
//
#ifndef VOXELWORLD_MESH_H
#define VOXELWORLD_MESH_H
#include <vector>
#include <glm/glm.hpp>
#include "Topology.h"
#include "Vertex.h"
struct Mesh {
std::vector<unsigned int> indices;
std::vector<glm::fvec3> positions;
std::vector<glm::fvec3> normals;
void addTriangle(Vertex **vertices, ScalarField *g);
void addVertex(Vertex *v, ScalarField *g);
void drawAABBDebug(glm::fvec3 min, glm::fvec3 max);
void generateFlatNormals();
};
#endif //VOXELWORLD_MESH_H
<|start_filename|>src/KdtreeISO/lib/ScalarField.cpp<|end_filename|>
//
// Created by Danielhu on 2018/5/30.
//
#include <glm/glm.hpp>
#include "RectilinearGrid.h"
#include "ScalarField.h"
fvec3 ScalarField::normal_f1(const fvec3 &p) {
float nx = value(p + fvec3(gradientOffset(), 0.f, 0.f)) - value(p - fvec3(gradientOffset(), 0.f, 0.f));
float ny = value(p + fvec3(0.f, gradientOffset(), 0.f)) - value(p - fvec3(0.f, gradientOffset(), 0.f));
float nz = value(p + fvec3(0.f, 0.f, gradientOffset())) - value(p - fvec3(0.f, 0.f, gradientOffset()));
// auto g = fvec3(nx, ny, nz) / gradientOffset() / 2.f;
if (nx == 0.f && ny == 0.f && nz == 0.f) {
return glm::normalize(fvec3(1));
}
return glm::normalize(fvec3(nx, ny, nz));
}
void ScalarField::normal(const fvec3 &p, fvec3 &out) {
float nx = value(p + fvec3(gradientOffset(), 0.f, 0.f)) - value(p - fvec3(gradientOffset(), 0.f, 0.f));
float ny = value(p + fvec3(0.f, gradientOffset(), 0.f)) - value(p - fvec3(0.f, gradientOffset(), 0.f));
float nz = value(p + fvec3(0.f, 0.f, gradientOffset())) - value(p - fvec3(0.f, 0.f, gradientOffset()));
// auto g = fvec3(nx, ny, nz) / gradientOffset() / 2.f;
if (nx == 0.f && ny == 0.f && nz == 0.f) {
out = glm::normalize(fvec3(1));
return;
}
out = glm::normalize(fvec3(nx, ny, nz));
// constexpr int filterSize = 5;
// float l = gradientOffset();
// for (int x = 0; x < filterSize; ++x) {
// for (int y = 0; y < filterSize; ++y) {
// for (int z = 0; z < filterSize; ++z) {
// out += p + fvec3((x - filterSize / 2) * l, (y - filterSize / 2) * l, (z - filterSize / 2) * l);
// }
// }
// }
// if (out.x == 0.f && out.y == 0.f && out.z == 0.f) {
// out = glm::normalize(fvec3(1));
// }
// out = glm::normalize(out);
assert(!isnan(out.x));
}
glm::fvec3 ScalarField::gradient(const glm::fvec3 &p) {
float nx = value(p + fvec3(gradientOffset(), 0.f, 0.f)) - value(p - fvec3(gradientOffset(), 0.f, 0.f));
float ny = value(p + fvec3(0.f, gradientOffset(), 0.f)) - value(p - fvec3(0.f, gradientOffset(), 0.f));
float nz = value(p + fvec3(0.f, 0.f, gradientOffset())) - value(p - fvec3(0.f, 0.f, gradientOffset()));
// auto g = fvec3(nx, ny, nz) / gradientOffset() / 2.f;
return fvec3(nx, ny, nz) / gradientOffset() / 2.f;
}
<|start_filename|>src/KdtreeISO/include/ScalarField.h<|end_filename|>
//
// Created by Danielhu on 2018/5/29.
//
#ifndef VOXELWORLD_SCALARFIELD_H
#define VOXELWORLD_SCALARFIELD_H
#include <cstdint>
#include <glm/glm.hpp>
#include "Utils.h"
class ScalarField {
protected:
uint8_t materialID;
public:
virtual float value(const glm::fvec3 &p) = 0;
virtual float index(const PositionCode &code) = 0;
virtual bool solve(const glm::fvec3 &p1, const glm::fvec3 &p2, glm::fvec3 &out) = 0;
virtual float gradientOffset() = 0;
virtual void normal(const glm::fvec3 &p, glm::fvec3 &out);
virtual fvec3 normal_f1(const glm::fvec3 &p);
virtual glm::fvec3 gradient(const glm::fvec3 &p);
uint8_t getMaterialID() { return materialID; }
ScalarField() : materialID(1) {}
virtual ~ScalarField(){};
};
#endif //VOXELWORLD_SCALARFIELD_H
<|start_filename|>src/KdtreeISO/include/RectilinearGrid.h<|end_filename|>
//
// Created by Danielhu on 2018/5/9.
//
#ifndef VOXELWORLD_RECTILINEARGRID_H
#define VOXELWORLD_RECTILINEARGRID_H
#include <vector>
#include <map>
#include <set>
#include "Qef.h"
#include "Mesh.h"
#include "Topology.h"
#include "Utils.h"
#include "Vertex.h"
#include "Indicators.h"
#include "AxisAlignedLine.h"
struct RectilinearGrid {
PositionCode minCode;
PositionCode maxCode;
QefSolver allQef;
std::vector<QefSolver> components;
std::vector<Vertex> vertices;
Vertex approximate;
uint8_t cornerSigns[8]{0};
int8_t componentIndices[8]{0};
bool isSigned = false;
std::map<RectilinearGrid *, Vertex *> faceVertices;
explicit RectilinearGrid(PositionCode minCode = PositionCode(0, 0, 0),
PositionCode maxCode = PositionCode(0, 0, 0),
QefSolver sum = QefSolver())
: minCode(minCode), maxCode(maxCode), allQef(sum) {
solve(allQef, approximate);
}
void solveComponent(int i);
void solve(QefSolver &qef, Vertex &v);
void assignSign(ScalarField *t);
void calCornerComponents();
bool sampleQef(ScalarField *t, bool all);
void draw(Mesh *mesh);
inline glm::fvec3 cornerPos(int i) {
return min_offset_subdivision(i) * codeToPos(maxCode - minCode, RectilinearGrid::getUnitSize()) + codeToPos(minCode, RectilinearGrid::getUnitSize());
}
inline int edgeComponentIndex(int corner1, int corner2) {
// assert(cornerSigns[corner1] != cornerSigns[corner2]);
if (cornerSigns[corner1] != 0) {
return componentIndices[corner1];
}
return componentIndices[corner2];
}
inline int faceComponentIndex(int faceDir, int edgeDir, int faceSide, int edgeSide) {
int component = -1;
int dir = 3 - faceDir - edgeDir;
for (int i = 0; i < 2; ++i) {
ivec3 code;
code[faceDir] = faceSide;
code[edgeDir] = edgeSide;
code[dir] = i;
int corner = encodeCell(code);
if (cornerSigns[corner] > 0) {
component = componentIndices[corner];
}
}
if (component != -1) {
return component;
}
for (int i = 0; i < 2; ++i) {
ivec3 code;
code[faceDir] = faceSide;
code[edgeDir] = 1 - edgeSide;
code[dir] = i;
int corner = encodeCell(code);
if (cornerSigns[corner] > 0) {
component = componentIndices[corner];
}
}
return component;
}
static void setUnitSize(float size);
static float getUnitSize();
static bool calClusterability(RectilinearGrid *left,
RectilinearGrid *right,
int dir,
const PositionCode &minCode,
const PositionCode &maxCode,
ScalarField *s);
static void combineAAGrid(RectilinearGrid *left,
RectilinearGrid *right,
int dir,
RectilinearGrid *out);
static bool isInterFreeCondition2Faild(const std::vector<Vertex *> &polygons,
const glm::fvec3 &p1,
const glm::fvec3 &p2);
template <class GridHolder>
static bool checkSign(const std::array<GridHolder *, 4> &nodes,
int quadDir1,
int quadDir2,
ScalarField *s,
int &side,
PositionCode &minEnd,
PositionCode &maxEnd);
template <class GridHolder>
static void generateQuad(const std::array<GridHolder, 4> &nodes,
int quadDir1,
int quadDir2,
Mesh *mesh,
ScalarField *t,
float threshold);
private:
static float unitSize;
};
template <class GridHolder>
bool RectilinearGrid::checkSign(const std::array<GridHolder *, 4> &nodes,
int quadDir1,
int quadDir2,
ScalarField *s,
int &side,
PositionCode &minEnd,
PositionCode &maxEnd) {
int dir = 3 - quadDir1 - quadDir2;
if (nodes[0] != nodes[1]) {
maxEnd = minEnd = nodes[0]->grid.maxCode;
}
else {
maxEnd = minEnd = nodes[3]->grid.minCode;
}
maxEnd[dir] = std::min(
std::min(nodes[0]->grid.maxCode[dir], nodes[1]->grid.maxCode[dir]),
std::min(nodes[2]->grid.maxCode[dir], nodes[3]->grid.maxCode[dir]));
minEnd[dir] = std::max(
std::max(nodes[0]->grid.minCode[dir], nodes[1]->grid.minCode[dir]),
std::max(nodes[2]->grid.minCode[dir], nodes[3]->grid.minCode[dir]));
if (minEnd[dir] >= maxEnd[dir]) {
return false;
}
float v1 = s->index(minEnd);
float v2 = s->index(maxEnd);
if ((v1 >= 0 && v2 >= 0) || (v1 < 0 && v2 < 0)) {
return false;
}
if (v2 >= 0 && v1 <= 0) {
side = 0;
}
else {
side = 1;
}
// for (int i = 0; i < 4; ++i) {
// if (nodes[i] != nodes[oppositeQuadIndex(i)]) {
// minEnd[dir] = nodes[i]->grid.minEnd[dir];
// maxEnd[dir] = nodes[i]->grid.maxEnd[dir];
// v1 = s->index(minEnd);
// v2 = s->index(maxEnd);
// if ((v1 > 0 && v2 > 0) || (v1 < 0 && v2 < 0)) {
// return false;
// }
// }
// }
return true;
}
template <class GridHolder>
void RectilinearGrid::generateQuad(const std::array<GridHolder, 4> &nodes,
int quadDir1,
int quadDir2,
Mesh *mesh,
ScalarField *t,
float) {
int edgeSide;
PositionCode minEnd, maxEnd;
if (!RectilinearGrid::checkSign(nodes, quadDir1, quadDir2, t, edgeSide, minEnd, maxEnd)) {
return;
}
std::vector<Vertex *> polygons;
int lineDir = 3 - quadDir1 - quadDir2;
int componentIndices[4];
for (int i = 0; i < 4; ++i) {
if (nodes[i] != nodes[oppositeQuadIndex(i)]) {
int c1, c2;
quadIndex(quadDir1, quadDir2, symmetryQuadIndex(i), c1, c2);
componentIndices[i] = nodes[i]->grid.edgeComponentIndex(c1, c2);
}
else {
componentIndices[i] = nodes[i]->grid.faceComponentIndex(quadDir2, lineDir, 1 - i / 2, edgeSide);
}
if (componentIndices[i] == -1) {
return;
}
}
polygons.push_back(&nodes[0]->grid.vertices.at(componentIndices[0]));
if (nodes[0] != nodes[1]) {
polygons.push_back(&nodes[1]->grid.vertices.at(componentIndices[1]));
}
polygons.push_back(&nodes[3]->grid.vertices.at(componentIndices[3]));
if (nodes[2] != nodes[3]) {
polygons.push_back(&nodes[2]->grid.vertices.at(componentIndices[2]));
}
std::set<Vertex *> identicals;
for (auto v : polygons) {
identicals.insert(v);
}
if (identicals.size() < 3) {
return;
}
bool condition1Failed = false;
int firstConcaveFaceVertex = 0;
if (false) {
int sameCellIndex[2] = {2, 3};
for (int i = 0; i < 4; ++i) {
int testDir = (lineDir + i / 2 + 1) % 3;
int edgeAdjacentCellIndexA = edgeTestNodeOrder[i][0];
int edgeAdjacentCellIndexB = edgeTestNodeOrder[i][1];
RectilinearGrid *a = &nodes[edgeAdjacentCellIndexA]->grid;
RectilinearGrid *b = &nodes[edgeAdjacentCellIndexB]->grid;
if (a != b) {
if (a->faceVertices.find(b) != a->faceVertices.end()) {
firstConcaveFaceVertex = i;
condition1Failed = true;
continue;
}
fvec3 faceMinA = fvec3(std::numeric_limits<float>::max());
fvec3 faceMinB = faceMinA;
fvec3 faceMaxA = -fvec3(std::numeric_limits<float>::max());
fvec3 faceMaxB = faceMaxA;
for (int j = 0; j < 4; ++j) {
int subIndexA = faceProcFaceMask[testDir][j][0];
int subIndexB = faceProcFaceMask[testDir][j][1];
fvec3 cornerA = a->cornerPos(subIndexA);
fvec3 cornerB = b->cornerPos(subIndexB);
faceMinA = glm::min(cornerA, faceMinA);
faceMinB = glm::min(cornerB, faceMinB);
faceMaxA = glm::max(cornerA, faceMaxA);
faceMaxB = glm::max(cornerB, faceMaxB);
}
fvec3 faceMin = glm::max(faceMinA, faceMinB);
fvec3 faceMax = glm::min(faceMaxA, faceMaxB);
if (!segmentFaceIntersection(a->vertices[componentIndices[edgeAdjacentCellIndexA]].hermiteP,
b->vertices[componentIndices[edgeAdjacentCellIndexB]].hermiteP,
faceMin,
faceMax,
testDir)) {
fvec3 minEndDir = faceMin + directionMap(lineDir) * (faceMax - faceMin);
fvec3 maxEndDir = faceMax - directionMap(lineDir) * (faceMax - faceMin);
glm::fvec3 points[4] = {faceMin, minEndDir, faceMax, maxEndDir};
fvec3 massPointSum(0.f);
int pointCount = 0;
for (int k = 0; k < 4; ++k) {
float v1 = t->value(points[k]);
float v2 = t->value(points[(k + 1) % 4]);
if ((v1 >= 0 && v2 < 0) || (v1 < 0 && v2 >= 0)) {
fvec3 x;
t->solve(points[k], points[(k + 1) % 4], x);
massPointSum += x;
pointCount++;
}
}
if (pointCount > 0) {
firstConcaveFaceVertex = i;
auto faceV = new Vertex(massPointSum / (float)pointCount);
mesh->addVertex(faceV, t);
a->faceVertices[b] = faceV;
b->faceVertices[a] = faceV;
condition1Failed = true;
}
}
}
else {
sameCellIndex[0] = edgeAdjacentCellIndexA;
sameCellIndex[1] = edgeAdjacentCellIndexB;
}
}
int minCellIndex = 0;
for (int i = 0; i < 4; ++i) {
int edgeAdjacentCellIndexA = edgeTestNodeOrder[i][0];
int edgeAdjacentCellIndexB = edgeTestNodeOrder[i][1];
if (edgeAdjacentCellIndexA != sameCellIndex[0] && edgeAdjacentCellIndexA != sameCellIndex[1] && edgeAdjacentCellIndexB != sameCellIndex[0] && edgeAdjacentCellIndexB != sameCellIndex[1]) {
minCellIndex = edgeAdjacentCellIndexA;
}
}
}
fvec3 p1 = codeToPos(minEnd, RectilinearGrid::getUnitSize());
fvec3 p2 = codeToPos(maxEnd, RectilinearGrid::getUnitSize());
bool condition2Failed = isInterFreeCondition2Faild(polygons, p1, p2);
if (polygons.size() > 3) {
std::vector<Vertex *> reversePolygons = {polygons[1], polygons[2], polygons[3], polygons[0]};
bool reverseCondition2Failed = isInterFreeCondition2Faild(reversePolygons, p1, p2);
if (!reverseCondition2Failed) {
/// NOTE: the swap here happens whether intersection-free or not
polygons.swap(reversePolygons);
}
condition2Failed = condition2Failed && reverseCondition2Failed;
}
#ifdef INTERSECTION_FREE
if (condition1Failed || condition2Failed) {
GridHolder circle[4] = {nodes[0], nodes[1], nodes[3], nodes[2]};
polygons.clear();
if (!condition2Failed) {
std::vector<int> concaveFlags;
std::vector<Vertex *> convexPart;
int concaveCount = 0;
for (int i = 0; i < 4; ++i) {
int index = (i + firstConcaveFaceVertex) % 4;
auto faceIter = circle[index]->grid.faceVertices.find(&circle[(index + 1) % 4]->grid);
auto cellVertex = &(circle[(index + 1) % 4]->grid.vertices[componentIndices[(index + 1) % 4]]);
if (faceIter != circle[index]->grid.faceVertices.end()) {
polygons.push_back(faceIter->second);
concaveFlags.push_back(1);
convexPart.push_back(faceIter->second);
concaveCount++;
}
polygons.push_back(cellVertex);
concaveFlags.push_back(0);
}
for (int i = 0; i < polygons.size() - 2; ++i) {
Vertex *triangle[3] = {
polygons[0], polygons[i + 1], polygons[i + 2]};
mesh->addTriangle(triangle, t);
}
}
else {
Vertex edgeVertex;
t->solve(p1, p2, edgeVertex.hermiteP);
mesh->addVertex(&edgeVertex, t);
for (int i = 0; i < 4; ++i) {
RectilinearGrid *a = &circle[i]->grid;
RectilinearGrid *b = &circle[(i + 1) % 4]->grid;
if (a != b) {
polygons.push_back(&a->vertices[componentIndices[i]]);
auto faceVIter = a->faceVertices.find(b);
if (faceVIter != a->faceVertices.end()) {
polygons.push_back(faceVIter->second);
polygons.push_back(faceVIter->second);
}
polygons.push_back(&b->vertices[componentIndices[(i + 1) % 4]]);
}
}
for (int i = 0; i < polygons.size() / 2; ++i) {
Vertex *triangle[3] = {
&edgeVertex, polygons[i * 2], polygons[i * 2 + 1]};
mesh->addTriangle(triangle, t);
}
}
}
else {
#endif
for (int i = 2; i < polygons.size(); ++i) {
Vertex *triangle[3] = {
polygons[0], polygons[i - 1], polygons[i]};
mesh->addTriangle(triangle, t);
}
#ifdef INTERSECTION_FREE
}
#endif
}
#endif //VOXELWORLD_RECTILINEARGRID_H
<|start_filename|>src/KdtreeISO/include/Topology.h<|end_filename|>
//
// Created by Danielhu on 2018/1/13.
//
#ifndef VOXELWORLD_GENERATORS_H
#define VOXELWORLD_GENERATORS_H
#include <iostream>
#include <algorithm>
#include <vector>
#include <glm/glm.hpp>
#include <limits>
#include "ScalarField.h"
class Topology : public ScalarField {
public:
bool solve(const glm::fvec3 &p1, const glm::fvec3 &p2, glm::fvec3 &out) override;
virtual float index(const PositionCode &code) override;
float laplaceOperator(const glm::fvec3 &p);
float gradientOffset() override { return 0.01f; }
virtual ~Topology() = default;
};
class Union : public Topology {
Topology *l;
Topology *r;
public:
Union(Topology *l, Topology *r) : l(l), r(r) {}
float value(const glm::fvec3 &p) override { return std::min(l->value(p), r->value(p)); }
~Union() override {
delete l;
delete r;
}
};
class UnionList : public Topology {
std::vector<Topology *> _list;
public:
UnionList(std::vector<Topology *> list) : _list(std::move(list)) {}
float value(const glm::fvec3 &p) override {
float d = std::numeric_limits<float>::max();
for (auto t : _list) {
d = std::min(t->value(p), d);
}
return d;
}
~UnionList() override {
for (auto p : _list) {
delete p;
}
}
};
class ExpUnion : public Topology {
Topology *l;
Topology *r;
float k;
public:
ExpUnion(Topology *l, Topology *r, float k = 32.f) : l(l), r(r), k(k) {}
float value(const glm::fvec3 &p) override {
float res = exp(-k * l->value(p)) + exp(-k * r->value(p));
return -log(std::max(0.0001f, res)) / k;
}
~ExpUnion() override {
delete l;
delete r;
}
};
class Difference : public Topology {
Topology *l;
Topology *r;
public:
Difference(Topology *l, Topology *r) : l(l), r(r) {}
float value(const glm::fvec3 &p) override { return std::max(l->value(p), -r->value(p)); }
~Difference() override {
delete l;
delete r;
}
};
class Intersection : public Topology {
Topology *l;
Topology *r;
public:
Intersection(Topology *l, Topology *r) : l(l), r(r) {}
float value(const glm::fvec3 &p) override { return std::max(l->value(p), r->value(p)); }
~Intersection() override {
delete l;
delete r;
}
};
class Transform : public Topology {
glm::mat4 trans_;
Topology *inner_;
public:
Transform(const glm::mat4 &trans, Topology *inner) : trans_(trans), inner_(inner) {}
~Transform() override { delete inner_; }
float value(const glm::fvec3 &root) override;
};
class Sphere : public Topology {
float radius;
fvec3 center;
public:
explicit Sphere(float radius, glm::fvec3 center = glm::fvec3(0)) : radius(radius), center(center) {}
~Sphere() override {}
float value(const glm::fvec3 &p) override;
// bool solve(const glm::fvec3 &p1, const glm::fvec3 &p2, glm::fvec3 &out) override;
};
class AABB : public Topology {
glm::fvec3 min_;
glm::fvec3 max_;
public:
float value(const glm::fvec3 &p) override;
AABB(glm::fvec3 min_, glm::fvec3 max_) : min_(min_), max_(max_){};
~AABB() override {}
};
class Torus : public Topology {
float r1;
float r2;
public:
Torus(float r1, float r2) : r1(r1), r2(r2) {}
float value(const glm::fvec3 &p) override {
glm::vec2 q = glm::vec2(glm::length(glm::vec2(p.x, p.z)) - r1, p.y);
return length(q) - r2;
}
};
class Cylinder : public Topology {
glm::fvec3 c;
public:
explicit Cylinder(const glm::fvec3 &c) : c(c) {}
float value(const glm::fvec3 &p) override { return glm::length(glm::vec2(p.x, p.z) - glm::vec2(c.x, c.y)) - c.z; }
};
class Capsule : public Topology {
glm::fvec3 a;
glm::fvec3 b;
float r;
public:
Capsule(const glm::fvec3 &a, const glm::fvec3 &b, float r) : a(a), b(b), r(r) {}
float value(const glm::fvec3 &p) override {
glm::fvec3 pa = p - a, ba = b - a;
float h = glm::clamp(dot(pa, ba) / dot(ba, ba), 0.f, 1.f);
return length(pa - ba * h) - r;
}
};
class Heart : public Topology {
float scale;
glm::fvec3 center;
public:
explicit Heart(float scale, glm::fvec3 center = glm::fvec3(0)) : scale(scale), center(center) {}
~Heart() override {}
float value(const glm::fvec3 &p) override;
};
#endif //VOXELWORLD_GENERATORS_H
<|start_filename|>src/KdtreeISO/lib/RectilinearGrid.cpp<|end_filename|>
//
// Created by Danielhu on 2018/5/10.
//
#define GLM_ENABLE_EXPERIMENTAL
#define GLM_FORCE_CTOR_INIT
#define GLM_FORCE_EXPLICIT_CTOR
#include <set>
#include <Mesh.h>
#include <map>
#include <glm/ext.hpp>
#include <glm/gtx/intersect.hpp>
#include "Indicators.h"
#include "RectilinearGrid.h"
float RectilinearGrid::unitSize = 0.5f;
void RectilinearGrid::setUnitSize(float size) {
unitSize = size;
}
float RectilinearGrid::getUnitSize() {
return unitSize;
}
void RectilinearGrid::solveComponent(int i) {
solve(components[i], vertices[i]);
}
void RectilinearGrid::solve(QefSolver &qef, Vertex &v) {
auto &p = v.hermiteP;
qef.solve(p, v.error);
auto extends = codeToPos(maxCode - minCode, RectilinearGrid::getUnitSize()) * 0.5f;
const auto min = codeToPos(minCode, RectilinearGrid::getUnitSize()) - extends;
const auto max = codeToPos(maxCode, RectilinearGrid::getUnitSize()) + extends;
if (p.x < min.x || p.x > max.x ||
p.y < min.y || p.y > max.y ||
p.z < min.z || p.z > max.z) {
p = qef.massPointSum / (float)qef.pointCount;
}
}
void RectilinearGrid::assignSign(ScalarField *t) {
auto sizeCode = PositionCode(
maxCode.x - minCode.x,
maxCode.y - minCode.y,
maxCode.z - minCode.z);
int8_t mtlID = t->getMaterialID();
for (int i = 0; i < 8; ++i) {
PositionCode code = decodeCell(i);
float val = t->index(minCode + sizeCode * code);
cornerSigns[i] = (uint8_t)(val >= 0. ? 0 : mtlID);
}
isSigned = !((cornerSigns[0] == cornerSigns[1]) &&
(cornerSigns[1] == cornerSigns[2]) &&
(cornerSigns[2] == cornerSigns[3]) &&
(cornerSigns[3] == cornerSigns[4]) &&
(cornerSigns[4] == cornerSigns[5]) &&
(cornerSigns[5] == cornerSigns[6]) &&
(cornerSigns[6] == cornerSigns[7]));
}
void RectilinearGrid::calCornerComponents() {
assert(components.empty());
std::set<int> clusters[8];
for (int i = 0; i < 8; ++i) {
if (cornerSigns[i] != 0) {
clusters[i].insert({i});
componentIndices[i] = static_cast<uint8_t>(i);
}
}
for (int i = 0; i < 12; ++i) {
int c1 = cellProcFaceMask[i][0];
int c2 = cellProcFaceMask[i][1];
if (cornerSigns[c1] == cornerSigns[c2] && cornerSigns[c2] != 0) {
int co1 = componentIndices[c1];
int co2 = componentIndices[c2];
auto &c2Components = clusters[co2];
for (auto comp : c2Components) {
clusters[co1].insert(comp);
}
for (auto comp : clusters[co1]) {
componentIndices[comp] = static_cast<uint8_t>(co1);
}
}
}
int reorderMap[8]{0};
for (int i = 0; i < 8; ++i) {
reorderMap[i] = -1;
}
int new_order = 0;
for (int i = 0; i < 8; ++i) {
if (reorderMap[componentIndices[i]] == -1 && cornerSigns[i] != 0) {
reorderMap[componentIndices[i]] = new_order++;
}
}
for (int i = 0; i < 8; ++i) {
componentIndices[i] = static_cast<uint8_t>(reorderMap[componentIndices[i]]);
}
vertices.resize(static_cast<unsigned long>(new_order));
components.resize(static_cast<unsigned long>(new_order));
}
bool RectilinearGrid::sampleQef(ScalarField *t, bool all) {
calCornerComponents();
const auto min = codeToPos(minCode, RectilinearGrid::getUnitSize());
// auto minX = codeToPos(minCode, RectilinearGrid::getUnitSize()).x;
// assert(!isinf(minX));
auto isize = maxCode - minCode;
auto size = codeToPos(isize, RectilinearGrid::getUnitSize());
assert(!isnan(size.x));
// size = codeToPos(isize, RectilinearGrid::getUnitSize());
fvec3 cornerPositions[8];
for (int i = 0; i < 8; ++i) {
cornerPositions[i] = min + size * min_offset_subdivision(i);
}
for (int i = 0; i < 12; ++i) {
fvec3 p1 = cornerPositions[edge_map[i][0]];
fvec3 p2 = cornerPositions[edge_map[i][1]];
if (cornerSigns[edge_map[i][0]] != cornerSigns[edge_map[i][1]]) {
fvec3 p, n;
if (t->solve(p1, p2, p)) {
t->normal(p, n);
int qefIndex = edgeComponentIndex(edge_map[i][0], edge_map[i][1]);
components.at(static_cast<unsigned long>(qefIndex)).add(p, n);
if (all) {
allQef.add(p, n);
}
}
}
}
for (int i = 0; i < components.size(); ++i) {
if (components[i].pointCount == 0 || components[i].pointCount >= 12) {
return false;
}
t->normal(vertices[i].hermiteP, vertices[i].hermiteN);
}
return allQef.pointCount > 0;
}
void RectilinearGrid::draw(Mesh *mesh) {
fvec3 size = codeToPos(maxCode - minCode, RectilinearGrid::getUnitSize());
fvec3 min = codeToPos(minCode, RectilinearGrid::getUnitSize());
for (int i = 0; i < 12; ++i) {
auto a = min_offset_subdivision(cellProcFaceMask[i][0]) * size + min;
auto b = min_offset_subdivision(cellProcFaceMask[i][1]) * size + min;
auto na = normalize(min_offset_subdivision(cellProcFaceMask[i][0]) - fvec3(0.5f));
auto nb = normalize(min_offset_subdivision(cellProcFaceMask[i][1]) - fvec3(0.5f));
mesh->positions.push_back(a);
mesh->positions.push_back(a);
mesh->positions.push_back(b);
mesh->normals.push_back(na);
mesh->normals.push_back(na);
mesh->normals.push_back(nb);
mesh->indices.push_back(static_cast<unsigned int &&>(mesh->indices.size()));
mesh->indices.push_back(static_cast<unsigned int &&>(mesh->indices.size()));
mesh->indices.push_back(static_cast<unsigned int &&>(mesh->indices.size()));
}
}
bool RectilinearGrid::isInterFreeCondition2Faild(const std::vector<Vertex *> &polygons,
const glm::fvec3 &p1,
const glm::fvec3 &p2) {
int anotherV = 3;
bool interSupportingEdge = false;
for (int i = 2; i < polygons.size(); ++i) {
fvec2 baryPos;
float distance;
bool isInter = glm::intersectRayTriangle(p1,
p2 - p1,
polygons[0]->hermiteP,
polygons[i - 1]->hermiteP,
polygons[i]->hermiteP,
baryPos,
distance);
isInter = isInter && (distance > 0.f && distance < 1.f);
if (isInter) {
interSupportingEdge = true;
anotherV = i % 3 + 1;
}
}
if (polygons.size() == 3) {
return !interSupportingEdge;
}
else {
fvec2 baryPos;
float distance;
bool interTetrahedron = glm::intersectRayTriangle(polygons[0]->hermiteP,
polygons[2]->hermiteP - polygons[0]->hermiteP,
p1,
p2,
polygons[anotherV]->hermiteP,
baryPos,
distance);
interTetrahedron = interTetrahedron && (distance > 0.f && distance < 1.f);
return !(interTetrahedron && interSupportingEdge);
}
}
bool RectilinearGrid::calClusterability(RectilinearGrid *left,
RectilinearGrid *right,
int dir,
const PositionCode &minCode,
const PositionCode &maxCode,
ScalarField *s) {
if (!left && !right) {
return true;
}
int clusterCornerSigns[8];
for (int i = 0; i < 8; ++i) {
clusterCornerSigns[i] = s->index(minCode + (maxCode - minCode) * decodeCell(i)) >= 0 ? 0 : s->getMaterialID();
}
bool homogeneous = true;
for (int i = 1; i < 8; ++i) {
if (clusterCornerSigns[i] != clusterCornerSigns[0]) {
homogeneous = false;
}
}
if (homogeneous) {
return false;
}
if (!(left && right)) {
return true;
}
RectilinearGrid *params[2] = {left, right};
for (int i = 0; i < 4; ++i) {
int edgeMinIndex = cellProcFaceMask[dir * 4 + i][0];
int edgeMaxIndex = cellProcFaceMask[dir * 4 + i][1];
int signChanges = 0;
for (int j = 0; j < 2; ++j) {
if (params[j]->cornerSigns[edgeMinIndex] != params[j]->cornerSigns[edgeMaxIndex]) {
signChanges++;
}
}
if (signChanges > 1) {
return false;
}
}
return true;
}
void RectilinearGrid::combineAAGrid(RectilinearGrid *left,
RectilinearGrid *right,
int dir,
RectilinearGrid *out) {
out->calCornerComponents();
if (!left && !right) {
return;
}
std::map<int, int> combineMaps[2];
RectilinearGrid *grids[2] = {left, right};
for (int i = 0; i < 4; ++i) {
int c = -1;
for (int j = 0; j < 2; ++j) {
if (out->cornerSigns[cellProcFaceMask[dir * 4 + i][j]] != 0) {
c = out->componentIndices[cellProcFaceMask[dir * 4 + i][j]];
break;
}
}
if (c == -1) {
continue;
}
for (int j = 0; j < 2; ++j) {
auto child = grids[j];
if (child) {
for (int k = 0; k < 2; ++k) {
if (child->cornerSigns[cellProcFaceMask[dir * 4 + i][k]] != 0) {
int childC = child->componentIndices[cellProcFaceMask[dir * 4 + i][k]];
assert(child->components[childC].pointCount > 0);
combineMaps[j][c] = childC;
break;
}
}
}
}
}
for (int i = 0; i < 2; ++i) {
for (auto p : combineMaps[i]) {
out->components.at(p.first).combine(grids[i]->components.at(p.second));
grids[i]->vertices.at(p.second).parent = &out->vertices.at(p.first);
}
}
int count = 0;
for (auto c : out->components) {
count += c.pointCount;
}
}
<|start_filename|>src/examples/opengl/main.cpp<|end_filename|>
//
// Created by Danielhu on 2018/1/16.
//
#define GLM_FORCE_CTOR_INIT
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <cmath>
#include <ctime>
#include <glm/glm.hpp>
#include <fstream>
#include <iostream>
#include <sstream>
#include <unordered_set>
#include "Mesh.h"
#include "Octree.h"
#include "Topology.h"
#include "Utils.h"
#include "VolumeData.h"
#include "cxxopts.hpp"
#include "program.h"
#include "svpng.inc"
using glm::fvec3;
using glm::mat4;
using glm::normalize;
using glm::radians;
using namespace std;
const char *vert = "#version 330 core\n"
"layout(location = 0) in vec3 position;\n"
"layout(location = 1) in vec3 normal;\n"
"\n"
"uniform mat4 mvp;\n"
"uniform mat4 m;\n"
"smooth out vec3 fragNormal;\n"
"\n"
"void main() {\n"
" fragNormal = (m * vec4(normal, 1.0)).xyz;\n"
" gl_Position = mvp * vec4(position, 1.0);\n"
"}";
const char *frag = "#version 330 core\n"
"out vec3 color;\n"
"\n"
"smooth in vec3 fragNormal;\n"
"\n"
"uniform float flag;"
"uniform vec3 lightDir;\n"
"uniform vec3 albedo;\n"
"uniform float specular;\n"
"\n"
"void main() {\n"
" vec3 normal = normalize(fragNormal);"
" vec3 h = normalize(lightDir + vec3(0, 0, 1));"
" if(flag > 0.5) {\n"
" color = albedo * mix(max(abs(dot(normal, "
"lightDir)), 0.0f) , pow(dot(normal, h), 64.f), specular);\n"
" // color = albedo;\n"
" } else {\n"
" color = vec3(0.0);\n"
" }\n"
" // color = (1.0 - albedo) * (step(dot(normal,vec3(0, "
"0, -1)), 0.0) * 0.8 + 0.2);\n"
"}";
static float cameraOffset = 15.f;
static double previousCursorX = 0.f;
static double previousCursorY = 0.f;
constexpr unsigned width = 512;
constexpr unsigned height = 512;
static float rotateX = 0.f;
static float rotateY = 0.f;
static bool pressing = false;
static bool inited = false;
struct MeshBuffer {
GLuint positions;
GLuint normals;
GLuint indices;
MeshBuffer() {
glGenBuffers(1, &positions);
glGenBuffers(1, &normals);
glGenBuffers(1, &indices);
}
~MeshBuffer() {
glDeleteBuffers(1, &positions);
glDeleteBuffers(1, &normals);
glDeleteBuffers(1, &indices);
}
};
void addMesh(Mesh *mesh, const MeshBuffer &buffer) {
glBindBuffer(GL_ARRAY_BUFFER, buffer.positions);
glBufferData(GL_ARRAY_BUFFER, mesh->positions.size() * sizeof(fvec3),
mesh->positions.data(), GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, buffer.normals);
glBufferData(GL_ARRAY_BUFFER, mesh->normals.size() * sizeof(fvec3),
mesh->normals.data(), GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, buffer.indices);
glBufferData(GL_ELEMENT_ARRAY_BUFFER,
mesh->indices.size() * sizeof(unsigned int), mesh->indices.data(),
GL_STATIC_DRAW);
}
void drawMesh(Mesh *mesh, const MeshBuffer &buffer, Program &p, bool shaded,
bool wireframe) {
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, buffer.positions);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, nullptr);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, buffer.normals);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_TRUE, 0, nullptr);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, buffer.indices);
if (wireframe) {
p.setVec3("albedo", fvec3(0, 0, 0));
p.setFloat("flag", 0);
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glDrawElements(GL_TRIANGLES, (GLsizei)mesh->indices.size(), GL_UNSIGNED_INT,
nullptr);
}
if (shaded) {
p.setVec3("albedo", fvec3(1.0, 0.4, 0));
p.setFloat("flag", 1);
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glDrawElements(GL_TRIANGLES, (GLsizei)mesh->indices.size(), GL_UNSIGNED_INT,
nullptr);
}
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
}
void setUniforms(Program &program) {
mat4 p = glm::perspective(radians(70.f), float(width) / float(height), 0.1f,
1000.f);
mat4 m = glm::rotate(mat4(), rotateX, fvec3(0, 1, 0));
m = glm::rotate(m, rotateY, fvec3(1, 0, 0));
mat4 v =
glm::lookAt(fvec3(0, 0, cameraOffset), fvec3(0, 0, -1), fvec3(0, 1, 0));
program.setMat4("m", m);
program.setMat4("mvp", p * v * m);
program.setVec3("albedo", fvec3(1.f, 1.f, 1.f));
program.setFloat("specular", 0.f);
program.setVec3("lightDir", normalize(fvec3(0.f, 0.f, 1.f)));
}
void error(int error, const char *description) {
cerr << error << ": " << description << endl;
}
void mouseInput(GLFWwindow *, double x, double y) {
if (pressing) {
rotateX += 0.01f * static_cast<float>(x - previousCursorX);
rotateY += 0.01f * static_cast<float>(y - previousCursorY);
previousCursorX = x;
previousCursorY = y;
}
}
void scroll(GLFWwindow *, double, double dy) {
cameraOffset -= 0.01f * cameraOffset * (float)dy;
pressing = true;
}
void press(GLFWwindow *window, int button, int action, int) {
if (button == GLFW_MOUSE_BUTTON_LEFT && action == GLFW_PRESS) {
pressing = true;
glfwGetCursorPos(window, &previousCursorX, &previousCursorY);
}
if (button == GLFW_MOUSE_BUTTON_LEFT && action == GLFW_RELEASE) {
pressing = false;
}
}
void dumpObj(Mesh *mesh, string name);
int main(int argc, char *argv[]) {
cxxopts::Options options("opengl_viewer",
"An opengl viewer for paper: Discrete k-d Tree "
"Hierarchy for Isosurface Extraction");
options.add_options() //
("e,error", "Error threshold.", cxxopts::value<float>()->default_value("1e-7")) //
("s,structure", "oct/kd, extracting iso-surface using oct/kd tree.", cxxopts::value<std::string>()->default_value("oct")) //
("rotateX", "Camera eular angle x.", cxxopts::value<float>()->default_value("0")) //
("rotateY", "Camera eular angle y.", cxxopts::value<float>()->default_value("0")) //
("v,volume", "Volume source (tiff file)", cxxopts::value<std::string>()->default_value("")) //
("c, capture", "Capture first frame to a file.", cxxopts::value<std::string>()->default_value("null")) //
("o, output", "Output obj file", cxxopts::value<std::string>()->default_value("./result.obj")) //
("h,help", "Print help and exit.");
auto parameters = options.parse(argc, argv);
rotateX = parameters["rotateX"].as<float>();
rotateY = parameters["rotateY"].as<float>();
stringstream errorss;
errorss.setf(ios::scientific);
errorss << parameters["e"].as<float>();
string windowName = parameters["s"].as<std::string>() + " " + errorss.str();
if (parameters.count("help")) {
cout << options.help() << std::endl;
return 0;
}
if (parameters["s"].as<std::string>() != "oct" &&
parameters["s"].as<std::string>() != "kd") {
cout << options.help() << std::endl;
return 0;
}
if (!glfwInit()) {
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
GLFWwindow *window =
glfwCreateWindow(width, height, windowName.c_str(), nullptr, nullptr);
if (!window) {
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Initialize GLEW
glewExperimental = GL_TRUE; // Needed for core profile
if (glewInit() != GLEW_OK) {
fprintf(stderr, "Failed to initialize GLEW\n");
getchar();
glfwTerminate();
return -1;
}
cout << "OpenGL Version: " + string((const char *)glGetString(GL_VERSION))
<< endl;
glfwSetErrorCallback(error);
glfwSetCursorPosCallback(window, mouseInput);
glfwSetScrollCallback(window, scroll);
glfwSetMouseButtonCallback(window, press);
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
MeshBuffer meshBuffers[4];
std::unique_ptr<ScalarField> scalarField = std::make_unique<Transform>(
mat4(1.0),
// new Difference(
// new AABB(fvec3(-4), fvec3(4)),
// new Sphere(5.5, fvec3(0)))
// new Difference(
// new AABB(fvec3(-4), fvec3(4)),
// new Union(
// new Union(
// new Cylinder(fvec3(0.f, 0.f, 4.1f))
// new Transform(glm::rotate(mat4(), glm::radians(90.f),
// fvec3(1, 0, 0)),
// new Cylinder(fvec3(0.f, 0.f, 3.7f)))
// )
// new Transform(glm::rotate(mat4(), glm::radians(90.f), fvec3(1, 0, 0)),
new Intersection(
new Difference(
new Cylinder(fvec3(0.f, 0.f, 4.f)),
new Cylinder(fvec3(0.f, 0.f, 3.2f))),
new AABB(fvec3(-4), fvec3(4)))
// )
// new Difference(
// new Union(
// new AABB(fvec3(-4, -4, -0.4f), fvec3(4, 4, -0.2f)),
// new AABB(fvec3(-4, -4, 0.2f), fvec3(4, 4, 0.4f))
// )
// new Intersection(
// new Sphere(5)
// new AABB(fvec3(-5), fvec3(5, 5, 0))
// )
// new Sphere(5.2f)
// )
// new Difference(
// new Heart(5),
// new Difference(
// new Cylinder(fvec3(0.f, 0.f, 3.7f)),
// new Cylinder(fvec3(0.f, 0.f, 3.f))
// )
// )
// new Intersection(
// new AABB(fvec3(-4), fvec3(4))
// new ExpUnion(
// new Sphere(6, fvec3(4.5, 4.5, 0)),
// new Sphere(6, fvec3(-4.5, -4.5, 0)),
// 1
// )
// )
// new Intersection(
// new AABB(fvec3(-4.3), fvec3(4.3)),
// new Difference(
// new Transform(glm::rotate(mat4(), glm::radians(90.f), fvec3(1,
// 0, 0)),
// new Cylinder(fvec3(0.f, 0.f, 4.3f))),
// new Transform(glm::rotate(mat4(), glm::radians(90.f), fvec3(1,
// 0, 0)),
// new Cylinder(fvec3(0.f, 0.f, 3.8f)))
// )
// )
// new Union(
// new Intersection(
// new Cylinder(fvec3(0, 0, 0.5)),
// new AABB(fvec3(-6), fvec3(6.5))
// ),
// new Intersection(
// new Cylinder(fvec3(0, 0, 5)),
// new UnionList({
// new AABB(fvec3(-5, -5, -5), fvec3(5,
// -4.5, 5)), new AABB(fvec3(-5, -2.5, -5),
// fvec3(5, -2, 5)), new AABB(fvec3(-5, 0,
// -5), fvec3(5, 0.5, 5)), new
// AABB(fvec3(-5, 2.5, -5), fvec3(5, 3,
// 5)), new AABB(fvec3(-5, 5, -5),
// fvec3(5, 5.5, 5))
// })
// )
// )
// new AABB(fvec3(-5), fvec3(5))
// new Union(
// new Union(
// new Intersection(
// new AABB(fvec3(-5, -5.5, -5), fvec3(5, -5, 5)),
// new Cylinder(fvec3(0, 0, 5))
// ),
// static_cast<Topology *>(new Sphere(3.0, fvec3(0)))
// )
// new AABB(fvec3(-4, -4, 1), fvec3(4, 4, 4))
// new Union(
// new Union(
// new Sphere(1),
// new Sphere(1, fvec3(4, 4, 0))
// ),
// new Union(
// new Sphere(1, fvec3(4, 0, 4)),
// new Sphere(1, fvec3(0, 4, 4))
// )
// )
);
constexpr int octDepth = 8;
constexpr int octSize = 16;
cameraOffset = octSize * 2;
RectilinearGrid::setUnitSize((float)(octSize / std::pow(octDepth, 2)));
PositionCode sizeCode = PositionCode(1 << (octDepth - 1));
float threshold = parameters["e"].as<float>();
if (parameters.count("volume")) {
auto volumeData = new VolumeData(parameters["volume"].as<std::string>(), 256,
-sizeCode / 2, PositionCode(2));
volumeData->readTIFF();
scalarField.reset(volumeData);
cout << "vulome read" << endl;
}
clock_t begin = clock();
Octree *octree =
Octree::buildWithScalarField(-sizeCode / 2, octDepth, scalarField.get(),
parameters["s"].as<std::string>() == "kd");
if (!octree) {
std::cout << "no sign change found!, program exited." << std::endl;
exit(0);
}
clock_t oct_build = clock();
cout << "oct build time:" << (double)(oct_build - begin) / CLOCKS_PER_SEC
<< endl;
Kdtree *kdtree = nullptr;
auto *kdtreeVisual = new Mesh();
int intersectionPreservingVerticesCount = 0;
bool intersectionFree = false;
Mesh *mesh(nullptr);
if ((parameters["s"].as<std::string>()) == "kd") {
cout << "extract using kdtree" << endl;
kdtree = Kdtree::buildFromOctree(octree, -sizeCode / 2, sizeCode / 2,
scalarField.get(), 0);
clock_t kdbuild = clock();
Kdtree::drawKdtree(kdtree, kdtreeVisual, threshold);
kdbuild = clock();
cout << "kd build time:" << (double)(kdbuild - oct_build) / CLOCKS_PER_SEC
<< endl;
mesh = Kdtree::extractMesh(kdtree, scalarField.get(), threshold);
clock_t kdextract = clock();
cout << "kd extract time:" << (double)(kdextract - kdbuild) / CLOCKS_PER_SEC
<< endl;
}
else if ((parameters["s"].as<std::string>()) == "oct") {
cout << "extract using octree" << endl;
Octree::simplify(octree, threshold);
mesh = Octree::extractMesh(octree, scalarField.get(),
intersectionPreservingVerticesCount,
intersectionFree);
clock_t octextract = clock();
cout << "oct extract time:"
<< (double)(octextract - oct_build) / CLOCKS_PER_SEC << endl;
}
auto *octreeVisual = new Mesh();
Octree::drawOctrees(octree, octreeVisual);
cout << "intersectionFree: " << (intersectionFree ? "true" : "false") << endl;
cout << "triangle count: " << mesh->indices.size() / 3 << endl;
// cout << "vertex count: " << mesh->positions.size() << endl;
cout << "intersection contours: " << intersectionPreservingVerticesCount
<< endl;
mesh->generateFlatNormals();
// dump mesh
if (parameters.count("output")) {
dumpObj(mesh, parameters["output"].as<std::string>());
}
//
Program program;
if (!program.init(vert, frag)) {
cerr << "glsl error" << endl;
return -1;
}
addMesh(mesh, meshBuffers[0]);
addMesh(octreeVisual, meshBuffers[1]);
addMesh(kdtreeVisual, meshBuffers[2]);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
while (!glfwWindowShouldClose(window)) {
glfwPollEvents();
if (pressing || !inited) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
program.use();
setUniforms(program);
drawMesh(mesh, meshBuffers[0], program, true, false);
// drawMesh(octreeVisual, meshBuffers[1], program, false, true);
// drawMesh(kdtreeVisual, meshBuffers[2], program, false, true);
glfwSwapBuffers(window);
if (!inited) {
stringstream outss;
outss << parameters["c"].as<std::string>()
<< parameters["e"].as<float>() << "_"
<< parameters["s"].as<std::string>() << "_t"
<< mesh->indices.size() / 3;
if (parameters["c"].as<std::string>() != "null") {
unsigned char image[width * height * 3];
// unsigned char *p = image;
// glPixelStorei(GL_PACK_ALIGNMENT, 0);
// glPixelStorei(GL_PACK_ROW_LENGTH, );
// glPixelStorei(GL_PACK_SKIP_PIXELS, 0);
// glPixelStorei(GL_PACK_SKIP_ROWS, 0);
glReadPixels(width * 1 / 2, height * 1 / 2, width, height, GL_RGB,
GL_UNSIGNED_BYTE, image);
FILE *fp = fopen((outss.str() + ".png").c_str(), "wb");
svpng(fp, width, height, (unsigned char *)image, 0);
fclose(fp);
break;
}
}
inited = true;
}
}
delete mesh;
delete octreeVisual;
glfwTerminate();
return 0;
}
void dumpObj(Mesh *mesh, string path) {
bool vtEnable = false;
string spaceString = " ";
ofstream writeSteam(path);
if (!writeSteam) {
cout << "open file failed";
return;
}
auto vertexCount = mesh->positions.size();
auto vertexNormalCount = mesh->normals.size();
auto trignleCount = mesh->indices.size() / 3;
assert(vertexCount == vertexNormalCount);
writeSteam << "# vertex count" << vertexCount << endl;
writeSteam << "# triangle count" << trignleCount << endl;
writeSteam << "# vertex - v" << endl;
for (auto i = 0; i < vertexCount; i++) {
writeSteam << "v" << spaceString << mesh->positions[i].x << spaceString << mesh->positions[i].y << spaceString << mesh->positions[i].z << endl;
}
if (vtEnable) {
writeSteam << endl
<< endl
<< endl
<< "# texture - vt" << endl;
writeSteam << "vt" << spaceString << 0 << spaceString << 0 << endl;
}
writeSteam << endl
<< endl
<< endl
<< "# normal - vn" << endl;
for (auto i = 0; i < vertexNormalCount; i++) {
writeSteam << "vn" << spaceString << mesh->normals[i].x << spaceString << mesh->normals[i].y << spaceString << mesh->normals[i].z << endl;
}
writeSteam << endl
<< endl
<< endl
<< "# face (triangle) - f" << endl;
for (auto i = 0; i < trignleCount; i++) {
int offset = 1;
int index = 3 * i + 0;
if (vtEnable) {
writeSteam << "f" << spaceString
<< mesh->indices[index + 0] + offset << "/" << 1 << "/" << mesh->indices[index + 0] + offset << spaceString
<< mesh->indices[index + 1] + offset << "/" << 1 << "/" << mesh->indices[index + 1] + offset << spaceString
<< mesh->indices[index + 2] + offset << "/" << 1 << "/" << mesh->indices[index + 2] + offset << endl;
}
else {
writeSteam << "f" << spaceString
<< mesh->indices[index + 0] + offset << "//" << mesh->indices[index + 0] + offset << spaceString
<< mesh->indices[index + 1] + offset << "//" << mesh->indices[index + 1] + offset << spaceString
<< mesh->indices[index + 2] + offset << "//" << mesh->indices[index + 2] + offset << endl;
}
}
std::cout << "dump obj to " << path << std::endl;
writeSteam.close();
}
<|start_filename|>src/KdtreeISO/include/VolumeData.h<|end_filename|>
//
// Created by Danielhu on 2018/5/29.
//
#ifndef VOXELWORLD_VOLUME_DATA_H
#define VOXELWORLD_VOLUME_DATA_H
extern "C" {
#include "tiffio.h"
}
#include <cstdint>
#include <cstdlib>
#include <string>
#include <sstream>
#include <iomanip>
#include <utility>
#include "ScalarField.h"
#include "RectilinearGrid.h"
#include "Utils.h"
class VolumeData : public ScalarField {
float isovalue = 31.5f;
std::string pathToTiffs;
int levels;
int width = 0;
int height = 0;
std::vector<uint8_t> data;
PositionCode minCode;
PositionCode scale;
float index(const PositionCode &code) override;
public:
VolumeData(const std::string &pathToTiffs, int levels, const PositionCode &minCode, const PositionCode &scale)
: pathToTiffs(pathToTiffs),
levels(levels),
minCode(minCode),
scale(scale) {}
inline int codeToOffset(const PositionCode &code) {
return code.z * width * height + code.y * width + code.x;
}
float value(const glm::fvec3 &p) override;
float gradientOffset() override { return RectilinearGrid::getUnitSize(); }
bool solve(const glm::fvec3 &p1, const glm::fvec3 &p2, glm::fvec3 &out) override;
void readTIFF();
};
#endif //VOXELWORLD_VOLUME_DATA_H
<|start_filename|>src/KdtreeISO/lib/Octree.cpp<|end_filename|>
//
// Created by Danielhu on 2018/1/20.
//
#define GLM_ENABLE_EXPERIMENTAL
#define GLM_FORCE_CTOR_INIT
#include <unordered_map>
#include <unordered_set>
#include <numeric>
#include <set>
#include <glm/glm.hpp>
#include <glm/gtx/intersect.hpp>
#include <glm/gtx/fast_square_root.hpp>
#include "Octree.h"
#include "Mesh.h"
#include "Utils.h"
#include "Indicators.h"
Octree *Octree::buildWithScalarField(const PositionCode &minCode, int depth, ScalarField *scalarField, bool as_mipmap) {
PositionCode sizeCode = PositionCode(1 << (depth - 1));
auto root = new Octree(minCode, minCode + sizeCode, depth);
assert(depth > 0);
root->grid.assignSign(scalarField);
bool noChildren = true;
if (depth == 1) {
if (!root->grid.isSigned) {
delete root;
return nullptr;
}
root->grid.sampleQef(scalarField, true);
root->isLeaf = true;
}
else {
PositionCode subSizeCode = PositionCode(static_cast<uint16_t>(1 << (depth - 2)));
for (int i = 0; i < 8; ++i) {
PositionCode subMinCode = minCode + subSizeCode * decodeCell(i);
root->children[i] =
buildWithScalarField(subMinCode, depth - 1, scalarField, as_mipmap);
if (root->children[i]) {
noChildren = false;
root->children[i]->childIndex = static_cast<int8_t>(i);
root->grid.allQef.combine(root->children[i]->grid.allQef);
}
}
if (noChildren) {
delete root;
return nullptr;
}
calClusterbility(root, scalarField);
if (root->clusterable && !as_mipmap) {
root->combineComponents(scalarField);
}
root->isLeaf = false;
}
assert(root->grid.allQef.pointCount);
assert(!isnan(root->grid.allQef.btb));
if (!as_mipmap) {
for (int i = 0; i < root->grid.components.size(); ++i) {
root->grid.solveComponent(i);
}
}
root->grid.solve(root->grid.allQef, root->grid.approximate);
// assert(root->grid.error >= -0.001f);
return root;
}
void Octree::getSum(Octree *root, const PositionCode &minPos, const PositionCode &maxPos, QefSolver &out) {
if (!root) {
return;
}
if (glm::any(glm::greaterThanEqual(minPos, maxPos))) {
return;
}
if (glm::any(glm::greaterThanEqual(minPos, root->grid.maxCode)) || glm::any(glm::lessThanEqual(maxPos, root->grid.minCode))) {
return;
}
auto minPosBound = glm::max(root->grid.minCode, minPos);
auto maxPosBound = glm::min(root->grid.maxCode, maxPos);
if (minPosBound == root->grid.minCode && maxPosBound == root->grid.maxCode) {
out.combine(root->grid.allQef);
return;
}
for (int i = 0; i < 8; ++i) {
getSum(root->children[i], minPosBound, maxPosBound, out);
}
}
void Octree::simplify(Octree *root, float threshold) {
if (!root) {
return;
}
if (root->isLeaf) {
return;
}
for (int i = 0; i < 8; ++i) {
simplify(root->children[i], threshold);
}
if (root->clusterable && root->grid.approximate.error < threshold) {
for (auto &child : root->children) {
child = nullptr;
}
root->isLeaf = true;
}
}
Mesh *Octree::extractMesh(Octree *root,
ScalarField *geometry,
int &intersectionPreservingVerticesCount,
bool intersectionFree) {
assert(root);
auto *mesh = new Mesh();
std::unordered_set<Vertex *> indexed;
EdgePolygonSet edgePolygonSet;
generateVertexIndices(root, mesh, geometry, indexed);
contourCell(root, mesh, geometry, intersectionPreservingVerticesCount, edgePolygonSet, intersectionFree, 0);
return mesh;
}
void Octree::generateVertexIndices(Octree *node,
Mesh *mesh,
ScalarField *geometry,
std::unordered_set<Vertex *> &indexed) {
if (!node) {
return;
}
for (int i = 0; i < 8; ++i) {
generateVertexIndices(node->children[i], mesh, geometry, indexed);
}
if (node->isLeaf) {
for (auto &v : node->grid.vertices) {
mesh->addVertex(&v, geometry);
}
}
}
void Octree::contourCell(Octree *root,
Mesh *mesh,
ScalarField *geometry,
int &intersectionPreservingVerticesCount,
EdgePolygonSet &edgePolygonSet,
bool intersectionFree,
float threshold) {
if (!root || root->isLeaf) {
return;
}
for (int i = 0; i < 8; ++i) {
contourCell(root->children[i],
mesh,
geometry,
intersectionPreservingVerticesCount,
edgePolygonSet,
intersectionFree,
threshold);
}
for (int i = 0; i < 12; ++i) {
Octree *nodes[2] = {
root->children[cellProcFaceMask[i][0]],
root->children[cellProcFaceMask[i][1]],
};
contourFace(nodes,
cellProcFaceMask[i][2],
mesh,
geometry,
intersectionPreservingVerticesCount,
edgePolygonSet,
intersectionFree,
threshold);
}
for (int i = 0; i < 6; ++i) {
Octree *nodes[4];
for (int j = 0; j < 4; ++j) {
nodes[j] = root->children[cellProcEdgeMask[i][j]];
}
contourEdge(nodes,
cellProcEdgeMask[i][4],
(cellProcEdgeMask[i][4] + 2) % 3,
geometry,
intersectionPreservingVerticesCount,
edgePolygonSet,
intersectionFree,
mesh,
threshold);
}
}
void Octree::contourFace(Octree *nodes[2],
int dir,
Mesh *mesh,
ScalarField *geometry,
int &intersectionPreservingVerticesCount,
EdgePolygonSet &edgePolygonSet,
bool intersectionFree,
float threshold) {
if (!nodes[0] || !nodes[1]) {
return;
}
if (nodes[0]->isLeaf && nodes[1]->isLeaf) {
return;
}
// the subdivision of a face resulting 4 child faces;
for (int i = 0; i < 4; ++i) {
Octree *subdivision_face[2] = {nodes[0], nodes[1]};
for (int j = 0; j < 2; ++j) {
if (!subdivision_face[j]->isLeaf) {
subdivision_face[j] = subdivision_face[j]->children[faceProcFaceMask[dir][i][j]];
}
}
contourFace(subdivision_face,
faceProcFaceMask[dir][i][2],
mesh,
geometry,
intersectionPreservingVerticesCount,
edgePolygonSet,
intersectionFree,
threshold);
}
for (int i = 0; i < 4; ++i) {
Octree *edge_nodes[4];
const int c[4] =
{
faceProcEdgeMask[dir][i][1],
faceProcEdgeMask[dir][i][2],
faceProcEdgeMask[dir][i][3],
faceProcEdgeMask[dir][i][4],
};
for (int j = 0; j < 4; ++j) {
const int order = faceNodeOrder[j];
if (nodes[order]->isLeaf) {
edge_nodes[j] = nodes[order];
}
else {
edge_nodes[j] = nodes[order]->children[c[j]];
}
}
if (dir == 0 && faceProcEdgeMask[dir][i][5] == 2) {
;
}
contourEdge(edge_nodes,
faceProcEdgeMask[dir][i][5],
dir,
geometry,
intersectionPreservingVerticesCount,
edgePolygonSet,
intersectionFree,
mesh,
threshold);
}
}
void Octree::contourEdge(Octree **nodes,
int dir,
int quadDir2,
ScalarField *geometry,
int &intersectionPreservingVerticesCount,
EdgePolygonSet &edgePolygonSet,
bool intersectionFree,
Mesh *mesh,
float threshold) {
if (!nodes[0] || !nodes[1] || !nodes[2] || !nodes[3]) {
return;
}
if (nodes[0]->isLeaf && nodes[1]->isLeaf && nodes[2]->isLeaf && nodes[3]->isLeaf) {
generateQuad(nodes,
dir,
quadDir2,
geometry,
intersectionPreservingVerticesCount,
edgePolygonSet,
intersectionFree,
mesh,
threshold);
return;
}
int quadDir1 = 3 - dir - quadDir2;
// the subdivision of a edge resulting 2 child edges;
for (int i = 0; i < 2; ++i) {
Octree *subdivision_edge[4];
for (int j = 0; j < 4; ++j) {
if (!nodes[j]->isLeaf) {
PositionCode code;
code[dir] = i;
code[quadDir1] = (3 - j) % 2;
code[quadDir2] = (3 - j) / 2;
subdivision_edge[j] = nodes[j]->children[encodeCell(code)];
}
else {
subdivision_edge[j] = nodes[j];
}
}
contourEdge(subdivision_edge,
dir,
quadDir2,
geometry,
intersectionPreservingVerticesCount,
edgePolygonSet,
intersectionFree,
mesh,
threshold);
}
}
void Octree::generateQuad(Octree **nodes,
int dir,
int quadDir2,
ScalarField *t,
int &,
EdgePolygonSet &,
bool,
Mesh *mesh,
float threshold) {
std::array<Octree *, 4> array = {{nodes[0], nodes[1], nodes[2], nodes[3]}};
RectilinearGrid::generateQuad(array, 3 - quadDir2 - dir, quadDir2, mesh, t, threshold);
}
void Octree::drawOctrees(Octree *root, Mesh *mesh) {
if (!root) {
return;
}
if (root->isLeaf) {
root->grid.draw(mesh);
return;
}
for (int i = 0; i < 8; ++i) {
drawOctrees(root->children[i], mesh);
}
}
void Octree::calClusterbility(Octree *root, ScalarField *s) {
if (!root || root->isLeaf) {
return;
}
for (int i = 0; i < 8; ++i) {
if (root->children[i] && !root->children[i]->clusterable) {
root->clusterable = false;
return;
}
}
for (int i = 0; i < 12; ++i) {
int leftIndex = cellProcFaceMask[i][0];
int rightIndex = cellProcFaceMask[i][1];
auto left = root->children[leftIndex] ? &root->children[leftIndex]->grid : nullptr;
auto right = root->children[rightIndex] ? &root->children[rightIndex]->grid : nullptr;
;
auto dir = cellProcFaceMask[i][2];
auto halfSize = (root->grid.maxCode - root->grid.minCode) / 2;
auto minCode = root->grid.minCode + decodeCell(leftIndex) * halfSize;
auto maxCode = root->grid.minCode + halfSize + decodeCell(rightIndex) * halfSize;
bool clusterable = RectilinearGrid::calClusterability(left, right, dir, minCode, maxCode, s);
if (!clusterable) {
root->clusterable = false;
return;
}
}
root->clusterable = true;
}
void Octree::combineComponents(ScalarField *s) {
assert(grid.components.empty());
auto halfSize = (grid.maxCode - grid.minCode) / 2;
RectilinearGrid xgridPool[2], ygridPool[4];
RectilinearGrid *xgrids[2];
for (int x = 0; x < 2; ++x) {
auto yMinCode = PositionCode(x, 0, 0) * halfSize + grid.minCode;
auto yMaxCode = PositionCode(x, 1, 1) * halfSize + halfSize + grid.minCode;
RectilinearGrid *ygrids[2];
for (int y = 0; y < 2; ++y) {
auto zMinCode = PositionCode(x, y, 0) * halfSize + grid.minCode;
auto zMaxCode = PositionCode(x, y, 1) * halfSize + halfSize + grid.minCode;
auto l =
children[encodeCell(PositionCode(x, y, 0))] ? &children[encodeCell(PositionCode(x, y, 0))]->grid : nullptr;
auto r =
children[encodeCell(PositionCode(x, y, 1))] ? &children[encodeCell(PositionCode(x, y, 1))]->grid : nullptr;
if (!l && !r) {
ygrids[y] = nullptr;
}
else {
ygrids[y] = &ygridPool[x * 2 + y];
ygrids[y]->minCode = zMinCode;
ygrids[y]->maxCode = zMaxCode;
ygrids[y]->assignSign(s);
if (l)
ygrids[y]->allQef.combine(l->allQef);
if (r)
ygrids[y]->allQef.combine(r->allQef);
RectilinearGrid::combineAAGrid(l, r, 2, ygrids[y]);
}
}
if (!ygrids[0] && !ygrids[1]) {
xgrids[x] = nullptr;
}
else {
xgrids[x] = &xgridPool[x];
xgrids[x]->minCode = yMinCode;
xgrids[x]->maxCode = yMaxCode;
xgrids[x]->assignSign(s);
if (ygrids[0])
xgrids[x]->allQef.combine(ygrids[0]->allQef);
if (ygrids[1])
xgrids[x]->allQef.combine(ygrids[1]->allQef);
RectilinearGrid::combineAAGrid(ygrids[0], ygrids[1], 1, xgrids[x]);
}
}
RectilinearGrid::combineAAGrid(xgrids[0], xgrids[1], 0, &grid);
std::set<Vertex *> coarserVertices;
for (auto &v : grid.vertices) {
coarserVertices.insert(&v);
}
for (auto child : children) {
if (child) {
for (Vertex &v : child->grid.vertices) {
if (v.parent) {
while (v.parent->parent) {
v.parent = v.parent->parent;
}
}
if (coarserVertices.find(v.parent) == coarserVertices.end()) {
v.parent = nullptr;
}
}
}
}
// to avoid a specific MC case
int count = 0;
for (auto c : grid.components) {
count += c.pointCount;
}
if (count != grid.allQef.pointCount) {
clusterable = false;
for (auto child : children) {
if (child) {
for (Vertex &v : child->grid.vertices) {
v.parent = nullptr;
}
}
}
}
}
<|start_filename|>src/KdtreeISO/include/Vertex.h<|end_filename|>
//
// Created by Danielhu on 2018/5/1.
//
#ifndef VOXELWORLD_VERTEX_H
#define VOXELWORLD_VERTEX_H
#include <glm/glm.hpp>
struct Vertex {
Vertex *parent;
unsigned int vertexIndex;
float error;
glm::fvec3 hermiteP;
glm::fvec3 hermiteN;
explicit Vertex(glm::fvec3 hermiteP)
: parent(nullptr), vertexIndex(0), error(-1.f), hermiteP(hermiteP), hermiteN(glm::fvec3(0)) {}
Vertex() = default;
};
#endif //VOXELWORLD_VERTEX_H
<|start_filename|>src/KdtreeISO/lib/Topology.cpp<|end_filename|>
//
// Created by Danielhu on 2018/1/13.
//
#include <glm/glm.hpp>
#include <algorithm>
#include <iostream>
#include <RectilinearGrid.h>
#include "Topology.h"
#include "Utils.h"
using glm::dot;
using glm::fvec3;
using glm::fvec4;
using glm::max;
using glm::min;
float gradient_offset = 0.01f;
float divergence_offset = 0.01f;
float Topology::index(const PositionCode &code) {
return value(codeToPos(code, RectilinearGrid::getUnitSize()));
}
bool Topology::solve(const fvec3 &p1, const fvec3 &p2, fvec3 &out) {
auto offset = p2 - p1;
float min = 0.f;
float max = 1.f;
float mid = (min + max) / 2.f;
for (int i = 0; i < 16; ++i) {
float l = value(p1 + offset * min);
mid = (min + max) / 2.f;
float midsign = value(p1 + offset * mid);
if ((l >= 0 && midsign < 0) || (l < 0 && midsign >= 0)) {
max = mid;
}
else {
min = mid;
}
}
out = p1 + offset * mid;
return true;
}
float Topology::laplaceOperator(const fvec3 &p) {
float lx = gradient(p + fvec3(divergence_offset, 0.f, 0.f)).x - gradient(p - fvec3(divergence_offset, 0.f, 0.f)).x;
float ly = gradient(p + fvec3(0.f, divergence_offset, 0.f)).y - gradient(p - fvec3(0.f, divergence_offset, 0.f)).y;
float lz = gradient(p + fvec3(0.f, 0.f, divergence_offset)).z - gradient(p - fvec3(0.f, 0.f, divergence_offset)).z;
return (lx + ly + lz) / divergence_offset;
}
float Transform::value(const fvec3 &p) {
fvec3 t = fvec4(p, 1.0) * trans_;
return inner_->value(t);
}
float Sphere::value(const fvec3 &p) {
fvec3 off = fvec3(p.x - center.x, p.y - center.y, p.z - center.z);
float l = glm::length(off);
auto d = l - radius;
return d;
}
// bool Sphere::solve(const fvec3 &p1, const fvec3 &p2, fvec3 &out) {
// fvec3 p1p2 = p1 - p2;
// float a = dot(p1p2, p1p2);
// float b = 2 * dot(p2 - center, p1 - p2);
// fvec3 p2c = p2 - center;
// float c = dot(p2c, p2c) - dot(radius, radius);
// float delta = b * b - 4.f * a * c;
// if (delta < 0) {
// return false;
// }
// float sqrt_delta = sqrt(delta);
// float t1 = (-b + sqrt_delta) / (2 * a);
// float t2 = (-b - sqrt_delta) / (2 * a);
// if (t1 >= 0.f && t1 <= 1.f) {
// out = t1 * p1p2 + p2;
// return true;
// }
// if (t2 >= 0.f && t2 <= 1.f) {
// out = t2 * p1p2 + p2;
// return true;
// }
// return false;
// }
float AABB::value(const fvec3 &p) {
fvec3 offset = glm::abs(p - (min_ + max_) / 2.f);
offset -= (max_ - min_) / 2.f;
return min(length(offset), max(offset.x, max(offset.y, offset.z)));
}
float Heart::value(const fvec3 &p) {
fvec3 offset = (p - center) / scale;
float x = offset.x, y = offset.z, z = offset.y;
float a = x * x + 9.0f / 4.0f * y * y + z * z - 1;
return a * a * a - x * x * z * z * z - 9.0f / 80.0f * y * y * z * z * z;
}
<|start_filename|>src/KdtreeISO/lib/Qef.cpp<|end_filename|>
//
// Created by Danielhu on 2018/1/16.
//
#define GLM_FORCE_CTOR_INIT
#include <glm/glm.hpp>
#include "Qef.h"
#define SVD_NUM_SWEEPS 5
const float Tiny_Number = 1.e-4f;
// const float Tiny_Number_Erroring = 1.e-8;
glm::fvec3 diag_of_mul(const glm::fvec3 &v1T, const glm::fvec3 v2) {
return v1T * v2;
}
glm::fvec3 diag(const glm::mat3 &m) {
return glm::fvec3(m[0][0], m[1][1], m[2][2]);
}
glm::fvec3 svd_vmul_sym(const glm::mat3x3 &a, const glm::fvec3 &v) {
return glm::fvec3(
(a[0][0] * v.x) + (a[0][1] * v.y) + (a[0][2] * v.z),
(a[0][1] * v.x) + (a[1][1] * v.y) + (a[1][2] * v.z),
(a[0][2] * v.x) + (a[1][2] * v.y) + (a[2][2] * v.z));
}
float qef_calc_error(const glm::mat3x3 &ATA, const glm::fvec3 &x, const glm::fvec3 &ATb, const float btb) {
glm::fvec3 atax = svd_vmul_sym(ATA, x);
return glm::dot(x, atax) - 2 * glm::dot(x, ATb) + btb;
}
glm::fvec3 qef_calc_co_variance(const glm::mat3x3 &ATA, const glm::fvec3 &x, const glm::fvec3 &diag_ATc, const glm::fvec3 &diag_ctc) {
return x * diag(ATA) * x - 2.f * (x * diag_ATc) + diag_ctc;
}
void svd_rotate_xy(float &x, float &y, float c, float s) {
float u = x;
float v = y;
x = c * u - s * v;
y = s * u + c * v;
}
void svd_rotateq_xy(float &x, float &y, float a, float c, float s) {
float cc = c * c;
float ss = s * s;
float mx = 2.0f * c * s * a;
float u = x;
float v = y;
x = cc * u - mx + ss * v;
y = ss * u + mx + cc * v;
}
float svd_invdet(float x, float tol) {
return (std::abs(x) < tol || std::abs(1.0f / x) < tol) ? 0.0f : 1.0f / x;
}
void svd_pseudoinverse(glm::mat3x3 &o, const glm::fvec3 &sigma, const glm::mat3x3 &v) {
float d0 = svd_invdet(sigma[0], Tiny_Number);
float d1 = svd_invdet(sigma[1], Tiny_Number);
float d2 = svd_invdet(sigma[2], Tiny_Number);
o = glm::mat3(v[0][0] * d0 * v[0][0] + v[0][1] * d1 * v[0][1] + v[0][2] * d2 * v[0][2],
v[0][0] * d0 * v[1][0] + v[0][1] * d1 * v[1][1] + v[0][2] * d2 * v[1][2],
v[0][0] * d0 * v[2][0] + v[0][1] * d1 * v[2][1] + v[0][2] * d2 * v[2][2],
v[1][0] * d0 * v[0][0] + v[1][1] * d1 * v[0][1] + v[1][2] * d2 * v[0][2],
v[1][0] * d0 * v[1][0] + v[1][1] * d1 * v[1][1] + v[1][2] * d2 * v[1][2],
v[1][0] * d0 * v[2][0] + v[1][1] * d1 * v[2][1] + v[1][2] * d2 * v[2][2],
v[2][0] * d0 * v[0][0] + v[2][1] * d1 * v[0][1] + v[2][2] * d2 * v[0][2],
v[2][0] * d0 * v[1][0] + v[2][1] * d1 * v[1][1] + v[2][2] * d2 * v[1][2],
v[2][0] * d0 * v[2][0] + v[2][1] * d1 * v[2][1] + v[2][2] * d2 * v[2][2]);
}
void givens_coeffs_sym(float a_pp, float a_pq, float a_qq, float &c, float &s) {
if (a_pq == 0.0f) {
c = 1.0f;
s = 0.0f;
return;
}
float tau = (a_qq - a_pp) / (2.0f * a_pq);
float stt = sqrt(1.0f + tau * tau);
float tan = 1.0f / (tau >= 0.0f ? tau + stt : tau - stt);
c = 1.0f / sqrt(1.0f + tan * tan);
s = tan * c;
}
void svd_rotate(glm::mat3x3 &vtav, glm::mat3x3 &v, int a, int b) {
if (vtav[a][b] == 0.0)
return;
float c = 0.f, s = 0.f;
givens_coeffs_sym(vtav[a][a], vtav[a][b], vtav[b][b], c, s);
float x, y;
x = vtav[a][a];
y = vtav[b][b];
svd_rotateq_xy(x, y, vtav[a][b], c, s);
vtav[a][a] = x;
vtav[b][b] = y;
x = vtav[0][3 - b];
y = vtav[1 - a][2];
svd_rotate_xy(x, y, c, s);
vtav[0][3 - b] = x;
vtav[1 - a][2] = y;
vtav[a][b] = 0.0f;
x = v[0][a];
y = v[0][b];
svd_rotate_xy(x, y, c, s);
v[0][a] = x;
v[0][b] = y;
x = v[1][a];
y = v[1][b];
svd_rotate_xy(x, y, c, s);
v[1][a] = x;
v[1][b] = y;
x = v[2][a];
y = v[2][b];
svd_rotate_xy(x, y, c, s);
v[2][a] = x;
v[2][b] = y;
}
void svd_solve_sym(glm::mat3x3 vtav, glm::fvec3 &sigma, glm::mat3x3 &v) {
// assuming that A is symmetric: can optimize all operations for
// the upper right triagonal
// assuming V is identity: you can also pass a matrix the rotations
// should be applied to
// U is not computed
for (int i = 0; i < SVD_NUM_SWEEPS; ++i) {
svd_rotate(vtav, v, 0, 1);
svd_rotate(vtav, v, 0, 2);
svd_rotate(vtav, v, 1, 2);
}
sigma = glm::fvec3(vtav[0][0], vtav[1][1], vtav[2][2]);
}
glm::fvec3 svd_solve_ATA_ATb(const glm::mat3x3 &ATA, const glm::fvec3 &ATb) {
glm::mat3x3 V;
glm::fvec3 sigma;
svd_solve_sym(ATA, sigma, V);
// A = UEV^T; U = A / (E*V^T)
glm::mat3x3 Vinv;
svd_pseudoinverse(Vinv, sigma, V);
glm::fvec3 x = Vinv * ATb;
return x;
}
void QefSolver::reset() {
ATA = glm::mat4(0.f);
ATb = glm::fvec3(0.f);
btb = 0.f;
massPointSum = glm::fvec3(0.f);
averageNormalSum = glm::fvec3(0.f);
pointCount = 0;
}
void QefSolver::set(const QefSolver &other) {
ATA[0][0] = other.ATA[0][0];
ATA[1][1] = other.ATA[1][1];
ATA[2][2] = other.ATA[2][2];
ATA[0][1] = other.ATA[0][1];
ATA[0][2] = other.ATA[0][2];
ATA[1][2] = other.ATA[1][2];
ATb = other.ATb;
btb = other.btb;
massPointSum = other.massPointSum;
averageNormalSum = other.averageNormalSum;
pointCount = other.pointCount;
calRoughness();
}
void QefSolver::combine(const QefSolver &other) {
ATA[0][0] += other.ATA[0][0];
ATA[1][1] += other.ATA[1][1];
ATA[2][2] += other.ATA[2][2];
ATA[0][1] += other.ATA[0][1];
ATA[0][2] += other.ATA[0][2];
ATA[1][2] += other.ATA[1][2];
ATb += other.ATb;
diag_ATc += other.diag_ATc;
btb += other.btb;
diag_ctc += other.diag_ctc;
massPointSum += other.massPointSum;
pointCount += other.pointCount;
averageNormalSum += other.averageNormalSum;
calRoughness();
}
void QefSolver::separate(const QefSolver &other) {
ATA[0][0] -= other.ATA[0][0];
ATA[1][1] -= other.ATA[1][1];
ATA[2][2] -= other.ATA[2][2];
ATA[0][1] -= other.ATA[0][1];
ATA[0][2] -= other.ATA[0][2];
ATA[1][2] -= other.ATA[1][2];
ATb -= other.ATb;
btb -= other.btb;
massPointSum -= other.massPointSum;
pointCount -= other.pointCount;
averageNormalSum -= other.averageNormalSum;
calRoughness();
}
void QefSolver::add(const glm::fvec3 &p, const glm::fvec3 &n) {
ATA[0][0] += n.x * n.x;
ATA[0][1] += n.x * n.y;
ATA[0][2] += n.x * n.z;
ATA[1][1] += n.y * n.y;
ATA[1][2] += n.y * n.z;
ATA[2][2] += n.z * n.z;
float dotp = glm::dot(p, n);
glm::fvec3 c = p * n;
ATb += n * dotp;
diag_ATc += n * c;
btb += dotp * dotp;
diag_ctc += c * c;
pointCount++;
massPointSum += p;
averageNormalSum += n;
}
void QefSolver::calRoughness() {
roughness = 1.f - glm::length(averageNormalSum) / (float)pointCount;
}
float QefSolver::getError(const glm::fvec3 &p) {
return qef_calc_error(ATA, p, ATb, btb);
}
float QefSolver::getError() {
return qef_calc_error(ATA, ATb, ATb, btb);
}
glm::fvec3 QefSolver::getVariance(const glm::fvec3 &p) {
auto v = qef_calc_co_variance(ATA, p, diag_ATc, diag_ctc);
return v;
}
void QefSolver::solve(glm::fvec3 &hermiteP, float &error) {
if (pointCount > 0) {
calRoughness();
glm::fvec3 massPoint = massPointSum / (float)pointCount;
glm::fvec3 _ATb = ATb - svd_vmul_sym(ATA, massPoint);
hermiteP = svd_solve_ATA_ATb(ATA, _ATb);
hermiteP += massPoint;
error = qef_calc_error(ATA, hermiteP, ATb, btb);
assert(!isnan(hermiteP.x));
}
}
<|start_filename|>src/examples/opengl/program.cpp<|end_filename|>
#define GLM_FORCE_CTOR_INIT
#include "program.h"
#include <fstream>
#include <sstream>
#include <iostream>
// ----------------------------------------------------------------------------
Program::Program()
: id(0) {
}
// ----------------------------------------------------------------------------
Program::~Program() {
if (id > 0) {
glDeleteProgram(id);
}
}
// ----------------------------------------------------------------------------
bool Program::init(const char *vert, const char *frag) {
id = glCreateProgram();
return compile(GL_VERTEX_SHADER, vert) && compile(GL_FRAGMENT_SHADER, frag) && link();
}
void Program::use() {
glUseProgram(id);
}
// ----------------------------------------------------------------------------
void Program::printShaderInfo(GLuint shader) const {
int maxLength = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &maxLength);
static char buffer[2048];
int length = 0;
glGetShaderInfoLog(shader, maxLength, &length, buffer);
printf("%s\n", buffer);
}
// ----------------------------------------------------------------------------
void Program::printProgramInfo(GLuint program) const {
int maxLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &maxLength);
static char buffer[2048];
int length = 0;
glGetProgramInfoLog(program, maxLength, &length, buffer);
printf("%s\n", buffer);
}
bool Program::compile(GLenum type, const char *src) {
const GLuint shader = glCreateShader(type);
glShaderSource(shader, 1, &src, nullptr);
glCompileShader(shader);
GLint status = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
if (status == GL_FALSE) {
printShaderInfo(shader);
return false;
}
glAttachShader(id, shader);
shaders.push_back(shader);
return true;
}
// ----------------------------------------------------------------------------
bool Program::link() {
if (shaders.empty()) {
return false;
}
glLinkProgram(id);
printProgramInfo(id);
GLenum err = glGetError();
if (err != GL_NO_ERROR) {
return false;
}
GLint status = 0;
glGetProgramiv(id, GL_LINK_STATUS, &status);
if (status == GL_FALSE) {
return false;
}
for (unsigned int shader : shaders) {
glDetachShader(id, shader);
glDeleteShader(shader);
}
shaders.clear();
return true;
}
const GLint Program::getUniformLocation(const char *name) {
if (locations.find(name) == locations.end()) {
const GLint location = glGetUniformLocation(id, name);
locations.insert({std::string(name), (int)location});
}
return locations[name];
}
bool Program::setMat4(const char *name, const glm::mat4 &uniform) {
const GLint location = getUniformLocation(name);
if (location == -1) {
return false;
}
glUniformMatrix4fv(location, 1, GL_FALSE, glm::value_ptr(uniform));
return true;
}
bool Program::setVec3(const char *name, const glm::fvec3 &uniform) {
const GLint location = getUniformLocation(name);
if (location == -1) {
return false;
}
glUniform3fv(location, 1, glm::value_ptr(uniform));
return true;
}
bool Program::setFloat(const char *name, float uniform) {
const GLint location = getUniformLocation(name);
if (location == -1) {
return false;
}
glUniform1f(location, uniform);
return true;
}
<|start_filename|>src/KdtreeISO/lib/Mesh.cpp<|end_filename|>
//
// Created by Danielhu on 2018/1/22.
//
#include "Mesh.h"
#include "Indicators.h"
void Mesh::generateFlatNormals() {
normals.resize(positions.size());
std::vector<glm::fvec3> flat_positions;
std::vector<glm::fvec3> flat_normals;
std::vector<unsigned int> flat_indices;
for (unsigned int i = 0; i < indices.size() / 3; ++i) {
glm::fvec3 normal =
normals[indices[i * 3 + 0]] +
normals[indices[i * 3 + 1]] +
normals[indices[i * 3 + 2]];
glm::vec3 c1 = glm::normalize(positions[indices[i * 3 + 0]] - positions[indices[i * 3 + 1]]);
glm::vec3 c2 = glm::normalize(positions[indices[i * 3 + 0]] - positions[indices[i * 3 + 2]]);
c1 -= glm::dot(c1, c2) * c2;
c1 = glm::normalize(c1);
// float d = glm::dot(c1, c2);
normal -= glm::dot(normal, c1) * c1;
normal -= glm::dot(normal, c2) * c2;
if (normal == glm::fvec3(0.f)) {
continue;
}
normal = glm::normalize(normal);
for (unsigned int j = 0; j < 3; ++j) {
flat_normals.push_back(normal);
flat_positions.push_back(positions[indices[i * 3 + j]]);
flat_indices.push_back(3 * i + j);
}
}
positions = flat_positions;
normals = flat_normals;
indices = flat_indices;
}
void Mesh::addVertex(Vertex *v, ScalarField *g) {
g->normal(v->hermiteP, v->hermiteN);
v->vertexIndex = static_cast<unsigned int>(positions.size());
positions.push_back(v->hermiteP);
normals.push_back(v->hermiteN);
}
void Mesh::addTriangle(Vertex **vertices, ScalarField *g) {
for (int j = 0; j < 3; ++j) {
auto targetVert = vertices[j];
Vertex *adjacentVerts[2] = {vertices[(j + 1) % 3], vertices[(j + 2) % 3]};
glm::fvec3 offset =
adjacentVerts[1]->hermiteP - targetVert->hermiteP +
adjacentVerts[0]->hermiteP - targetVert->hermiteP;
offset *= 0.05f;
glm::fvec3 normal;
g->normal(targetVert->hermiteP + offset, normal);
if (glm::dot(normal, targetVert->hermiteN) < std::cos(glm::radians(15.f))) {
indices.push_back(static_cast<unsigned int>(positions.size()));
positions.push_back(targetVert->hermiteP);
normals.push_back(normal);
}
else {
indices.push_back(targetVert->vertexIndex);
}
}
}
void Mesh::drawAABBDebug(glm::fvec3 min, glm::fvec3 max) {
auto offset = max - min;
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 2; ++j) {
fvec3 quad[4];
for (int k = 0; k < 4; ++k) {
quad[k] = min + offset * min_offset_subdivision(cellProcFaceMask[i * 4 + k][j]);
}
int quadIndices[] = {0, 1, 2, 1, 2, 3};
for (auto quadIndex : quadIndices) {
positions.push_back(quad[quadIndex]);
normals.push_back(glm::normalize(quad[quadIndex] - (min + max) / 2.f));
indices.push_back(static_cast<unsigned int &&>(indices.size()));
}
}
}
}
<|start_filename|>src/KdtreeISO/lib/VolumeData.cpp<|end_filename|>
//
// Created by Danielhu on 2018/5/29.
//
#include "VolumeData.h"
#include "Indicators.h"
#include "RectilinearGrid.h"
#include <glm/gtc/type_precision.hpp>
void VolumeData::readTIFF() {
TIFF *firstFile = TIFFOpen((pathToTiffs + "001.tif").c_str(), "r");
if (!firstFile)
throw "no .tif file found";
TIFFGetField(firstFile, TIFFTAG_IMAGEWIDTH, &width);
TIFFGetField(firstFile, TIFFTAG_IMAGELENGTH, &height);
data.resize(width * height * levels);
uint8_t *p = data.data();
for (int i = 0; i < levels; ++i) {
std::stringstream namess;
namess << std::setfill('0') << std::setw(3) << i + 1;
auto name = (pathToTiffs + namess.str() + ".tif");
if (TIFF *file = TIFFOpen(name.c_str(), "r")) {
for (int h = 0; h < height; ++h) {
TIFFReadScanline(file, p, h);
p += width;
}
TIFFClose(file);
}
}
}
float VolumeData::index(const PositionCode &code) {
auto offset = codeToOffset((code - minCode) * scale);
if (offset >= width * height * levels || offset < 0) {
return isovalue;
}
// return ISO_VAL - (float)data[codeToOffset((code - minCode) * scale)];
float result = 0;
for (int x = 0; x < scale.x; ++x)
for (int y = 0; y < scale.y; ++y)
for (int z = 0; z < scale.z; ++z) {
result += (float)data[codeToOffset((code - minCode) * scale + PositionCode(x, y, z))];
}
return result / (scale.x * scale.y * scale.z) - isovalue;
}
float VolumeData::value(const glm::fvec3 &p) {
float l = RectilinearGrid::getUnitSize();
return index(posToCode(p, l));
/* PositionCode samples[8];
float values[8];
for (int i = 0; i < 8; ++i) {
samples[i] = posToCodeFloor(p + l * min_offset_subdivision(i), l);
values[i] = index(samples[i]);
}
fvec3 d = (p - codeToPos(samples[0], l)) / l;
d = glm::max(fvec3(0), glm::min(fvec3(1), d));
float c00 = values[0b000] * (1 - d.x) + values[0b100] * d.x;
float c01 = values[0b001] * (1 - d.x) + values[0b101] * d.x;
float c10 = values[0b010] * (1 - d.x) + values[0b110] * d.x;
float c11 = values[0b011] * (1 - d.x) + values[0b111] * d.x;
float c0 = c00 * (1 - d.y) + c10 * d.y;
float c1 = c01 * (1 - d.y) + c11 * d.y;
float c = c0 * (1 - d.z) + c1 * d.z;
return c;*/
}
bool VolumeData::solve(const glm::fvec3 &p1, const glm::fvec3 &p2,
glm::fvec3 &out) {
float v1 = value(p1);
float v2 = value(p2);
if (v2 - v1 == 0.f) {
out = (p1 + p2) / 2.f;
}
else {
out = p1 - (p2 - p1) * v1 / (v2 - v1);
}
return true;
}
<|start_filename|>src/KdtreeISO/include/Indicators.h<|end_filename|>
//
// Created by Danielhu on 2018/4/20.
//
#ifndef VOXELWORLD_INDICATORS_H
#define VOXELWORLD_INDICATORS_H
#include "Utils.h"
using glm::fvec2;
using glm::fvec3;
using glm::ivec3;
inline int encodeCell(ivec3 code) {
return code.x * 4 + code.y * 2 + code.z;
}
inline const fvec3 &min_offset_subdivision(int i) {
static const fvec3 offsets[8] = {
fvec3(0.f, 0.f, 0.f),
fvec3(0.f, 0.f, 1.f),
fvec3(0.f, 1.f, 0.f),
fvec3(0.f, 1.f, 1.f),
fvec3(1.f, 0.f, 0.f),
fvec3(1.f, 0.f, 1.f),
fvec3(1.f, 1.f, 0.f),
fvec3(1.f, 1.f, 1.f),
};
assert(i >= 0 && i < 8);
return offsets[i];
};
inline const PositionCode &decodeCell(int i) {
static const PositionCode offsets[8] = {
PositionCode(0, 0, 0),
PositionCode(0, 0, 1),
PositionCode(0, 1, 0),
PositionCode(0, 1, 1),
PositionCode(1, 0, 0),
PositionCode(1, 0, 1),
PositionCode(1, 1, 0),
PositionCode(1, 1, 1),
};
assert(i >= 0 && i < 8);
return offsets[i];
};
inline const fvec3 &directionMap(int i) {
static const fvec3 offsets[3] = {
fvec3(1.f, 0.f, 0.f),
fvec3(0.f, 1.f, 0.f),
fvec3(0.f, 0.f, 1.f),
};
assert(i >= 0 && i < 3);
return offsets[i];
}
// from original dc implementation
const int edge_map[12][2] = {
{0, 4}, {1, 5}, {2, 6}, {3, 7}, // x-axis
{0, 2},
{1, 3},
{4, 6},
{5, 7}, // y-axis
{0, 1},
{2, 3},
{4, 5},
{6, 7} // z-axis
};
const int cellProcFaceMask[12][3] =
{{0, 4, 0}, {1, 5, 0}, {2, 6, 0}, {3, 7, 0}, {0, 2, 1}, {4, 6, 1}, {1, 3, 1}, {5, 7, 1}, {0, 1, 2}, {2, 3, 2}, {4, 5, 2}, {6, 7, 2}};
const int cellProcEdgeMask[6][5] =
{{0, 2, 1, 3, 0}, {4, 6, 5, 7, 0}, {0, 1, 4, 5, 1}, {2, 3, 6, 7, 1}, {0, 4, 2, 6, 2}, {1, 5, 3, 7, 2}};
const int faceProcFaceMask[3][4][3] = {
{{4, 0, 0}, {5, 1, 0}, {6, 2, 0}, {7, 3, 0}},
{{2, 0, 1}, {6, 4, 1}, {3, 1, 1}, {7, 5, 1}},
{{1, 0, 2}, {3, 2, 2}, {5, 4, 2}, {7, 6, 2}}};
const int edgeTestNodeOrder[4][2] = {{0, 1}, {3, 2}, {1, 2}, {0, 3}};
inline const fvec3 &faceSubDivision(int dir, int i) {
static const fvec3 offsets[3][4] = {
{
fvec3(0.f, 0.f, 0.f),
fvec3(0.f, 0.f, 1.f),
fvec3(0.f, 1.f, 0.f),
fvec3(0.f, 1.f, 1.f),
},
{
fvec3(0.f, 0.f, 0.f),
fvec3(1.f, 0.f, 0.f),
fvec3(0.f, 0.f, 1.f),
fvec3(1.f, 0.f, 1.f),
},
{
fvec3(0.f, 0.f, 0.f),
fvec3(0.f, 1.f, 0.f),
fvec3(1.f, 0.f, 0.f),
fvec3(1.f, 1.f, 0.f),
},
};
assert(i >= 0 && i < 4);
return offsets[dir][i];
};
const int faceNodeOrder[4] = {0, 0, 1, 1};
const int faceProcEdgeMask[3][4][6] = {
{{1, 4, 5, 0, 1, 1}, {1, 6, 7, 2, 3, 1}, {0, 4, 6, 0, 2, 2}, {0, 5, 7, 1, 3, 2}},
{{0, 2, 3, 0, 1, 0}, {0, 6, 7, 4, 5, 0}, {1, 2, 6, 0, 4, 2}, {1, 3, 7, 1, 5, 2}},
{{1, 1, 3, 0, 2, 0}, {1, 5, 7, 4, 6, 0}, {0, 1, 5, 0, 4, 1}, {0, 3, 7, 2, 6, 1}}};
inline const fvec3 &edgeProcDir(int i, int j) {
const static fvec3 dirs[3][4] = {
{
fvec3(0.f, -1.f, -1.f),
fvec3(0.f, -1.f, 1.f),
fvec3(0.f, 1.f, 1.f),
fvec3(0.f, 1.f, -1.f),
},
{
fvec3(-1.f, 0.f, -1.f),
fvec3(-1.f, 0.f, 1.f),
fvec3(1.f, 0.f, 1.f),
fvec3(1.f, 0.f, -1.f),
},
{
fvec3(-1.f, -1.f, 0.f),
fvec3(1.f, -1.f, 0.f),
fvec3(1.f, 1.f, 0.f),
fvec3(-1.f, 1.f, 0.f),
},
};
assert(i >= 0 && i < 3 && i >= 0 && i < 4);
return dirs[i][j];
};
const int edgeProcEdgeMask[3][2][4] = {
{{3, 1, 2, 0}, {7, 5, 6, 4}},
{{5, 4, 1, 0}, {7, 6, 3, 2}},
{{6, 2, 4, 0}, {7, 3, 5, 1}},
};
const int planeSpreadingDir[3][2][4] = {
{{0, 2, 3, 1}, {4, 6, 7, 5}},
{{0, 1, 5, 4}, {2, 3, 7, 6}},
{{0, 4, 6, 2}, {1, 5, 7, 2}},
};
const int intergralOrder[8] = {
0, 1, 2, 4, 3, 5, 6, 7};
//
//const int planeSpreadingMask[8][8] = {
// {0, 1, 2, 4, 3, 5, 6, 7},
// {1, 0, 3, 5, 2, 4, 7, 6},
// {2, 0, 3, 6, 1, 4, 7, 5},
// {3, 1, 2, 7, 0, 5, 6, 4},
// {4, 0, 5, 6, 1, 2, 7, 3},
// {5, 1, 4, 7, 0, 3, 6, 2},
// {6, 2, 4, 7, 0, 3, 5, 1},
// {7, 3, 5, 6, 1, 2, 4, 0},
//};
//
//const int adjacentNodes[8][8] = {
// {0, 1, 1, 0, 1, 0, 0, 0,},
// {1, 0, 0, 1, 0, 1, 0, 0,},
// {1, 0, 0, 1, 0, 0, 1, 0,},
// {0, 1, 1, 0, 0, 0, 0, 1,},
// {1, 0, 0, 0, 0, 1, 1, 0,},
// {0, 1, 0, 0, 1, 0, 0, 1,},
// {0, 0, 1, 0, 1, 0, 0, 1,},
// {0, 0, 0, 1, 0, 1, 1, 0,},
//};
const int dirRelatedEdge[8][8][3] = {
{
{-1, -1, -1},
{-1, 2, 6},
{-1, 1, 10},
{-1, -1, 0},
{-1, 5, 9},
{-1, -1, 4},
{-1, -1, 8},
{0, 4, 8},
},
{
{-1, 3, 11},
{-1, -1, -1},
{-1, -1, 1},
{-1, 0, 10},
{-1, -1, 5},
{-1, 4, 9},
{1, 5, 8},
{-1, -1, 8},
},
{
{-1, 11, 3},
{-1, -1, 2},
{-1, -1, -1},
{-1, 0, 6},
{-1, -1, 9},
{2, 4, 9},
{-1, 5, 8},
{-1, -1, 4},
},
{
{-1, -1, 3},
{-1, 2, 11},
{-1, 1, 7},
{-1, -1, -1},
{3, 5, 9},
{-1, -1, 9},
{-1, -1, 5},
{-1, 4, 8},
},
{
{-1, 7, 11},
{-1, -1, 5},
{-1, -1, 10},
{0, 6, 10},
{-1, -1, -1},
{-1, 2, 4},
{-1, 1, 8},
{-1, -1, 0},
},
{{-1, -1, 7}, {-1, 0, 11}, {1, 7, 10}, {-1, -1, 10}, {-1, 3, 5}, {-1, -1, -1}, {-1, -1, 1}, {-1, 1, 8}},
{
{-1, -1, 11},
{2, 6, 11},
{-1, 7, 10},
{-1, -1, 6},
{-1, 3, 9},
{-1, -1, 2},
{-1, -1, -1},
{-1, 1, 4},
},
{
{3, 7, 11},
{-1, -1, 11},
{-1, -1, 7},
{-1, 6, 10},
{-1, -1, 3},
{-1, 2, 9},
{-1, 1, 5},
{-1, -1, -1},
}};
const int processEdgeMask[3][4] = {{3, 2, 1, 0}, {7, 5, 6, 4}, {11, 10, 9, 8}};
constexpr int oppositeQuadIndex(int i) {
return (i / 2) * 2 + 1 - i % 2;
}
constexpr int symmetryQuadIndex(int i) {
return (1 - i / 2) * 2 + 1 - i % 2;
}
inline void quadIndex(int quadDir1, int quadDir2, int i, int &p1, int &p2) {
ivec3 code(0);
code[quadDir1] = i % 2;
code[quadDir2] = i / 2;
p1 = encodeCell(code);
code[3 - quadDir1 - quadDir2] = 1;
p2 = encodeCell(code);
}
//const int triangleIndices[6] = {0, 1, 2, 0, 2, 3};
//const int triangleIndicesFlip[6] = {0, 3, 2, 0, 2, 1};
#endif //VOXELWORLD_INDICATORS_H
<|start_filename|>src/KdtreeISO/include/Qef.h<|end_filename|>
//
// Created by Danielhu on 2018/1/16.
//
#ifndef VOXELWORLD_QEF_H
#define VOXELWORLD_QEF_H
#include <glm/glm.hpp>
struct QefSolver {
glm::mat3x3 ATA;
glm::fvec3 ATb;
glm::fvec3 diag_ATc;
float btb;
glm::fvec3 diag_ctc;
glm::fvec3 massPointSum;
glm::fvec3 averageNormalSum;
float roughness;
int pointCount;
void reset();
void set(const QefSolver &other);
void combine(const QefSolver &other);
void separate(const QefSolver &other);
void add(const glm::fvec3 &p, const glm::fvec3 &n);
void solve(glm::fvec3 &hermiteP, float &error);
void calRoughness();
float getError(const glm::fvec3 &p);
float getError();
glm::fvec3 getVariance(const glm::fvec3 &p);
QefSolver()
: ATA(glm::mat4(0.0)),
ATb(glm::fvec3(0.0)),
diag_ATc(0.0),
btb(0.f),
diag_ctc(glm::fvec3(0.f)),
massPointSum(glm::fvec3(0.f)),
averageNormalSum(glm::fvec3(0.f)),
roughness(0.f),
pointCount(0) {}
};
#endif //VOXELWORLD_QEF_H
<|start_filename|>src/KdtreeISO/include/Utils.h<|end_filename|>
//
// Created by Danielhu on 2018/4/2.
//
#ifndef VOXELWORLD_UTILS_H
#define VOXELWORLD_UTILS_H
#define GLM_FORCE_EXPLICIT_CTOR
#include <glm/glm.hpp>
#include <cmath>
#include <iostream>
using glm::fvec3;
using PositionCode = glm::ivec3;
#ifndef NDEBUG
#define LOGV(v) \
std::cout << #v << " " << v.x << " " << v.y << " " << v.z << " " << std::endl;
#define LOGF(v) \
std::cout << #v << " " << v << std::endl;
#define LOGV4(v) \
std::cout << #v << " " << v.x << " " << v.y << " " << v.z << " " << v.w << std::endl;
#else
#define LOGV(v)
#define LOGV4(v)
#define LOGF(v)
#endif
inline bool segmentFaceIntersection(const fvec3 &va, const fvec3 &vb, const fvec3 &min, const fvec3 &max, int dir) {
float l = (vb - va)[dir];
fvec3 p = (min - va)[dir] / l * vb + (vb - min)[dir] / l * va;
for (int i = 0; i < 3; ++i) {
if (dir != i) {
if (p[i] < min[i] || p[i] > max[i]) {
return false;
}
}
}
return true;
}
template <class T>
inline void hash_combine(std::size_t &seed, const T &v) {
std::hash<T> hasher;
seed ^= hasher(v) + 0x9e3779b9 + (seed << 6) + (seed >> 2);
}
template <typename C>
struct ContainerHasher {
typedef typename C::value_type value_type;
inline size_t operator()(const C &c) const {
size_t seed = 0;
for (typename C::const_iterator it = c.begin(), end = c.end(); it != end; ++it) {
hash_combine<value_type>(seed, *it);
}
return seed;
}
};
inline fvec3 codeToPos(const PositionCode &code, float cellSize) {
// LOGV(code)
auto result = fvec3(static_cast<float>(code.x) * cellSize, static_cast<float>(code.y) * cellSize, static_cast<float>(code.z) * cellSize);
return result;
}
inline PositionCode posToCode(const glm::fvec3 &pos, float cellSize) {
return PositionCode(std::round(pos.x / cellSize), std::round(pos.y / cellSize), std::round(pos.z / cellSize));
}
inline PositionCode posToCodeFloor(const glm::fvec3 &pos, float cellSize) {
return PositionCode(pos.x / cellSize, pos.y / cellSize, pos.z / cellSize);
}
#endif //VOXELWORLD_UTILS_H
<|start_filename|>src/KdtreeISO/include/Octree.h<|end_filename|>
//
// Created by Danielhu on 2018/1/14.
//
#ifndef VOXELWORLD_OCTREE_H
#define VOXELWORLD_OCTREE_H
#include <glm/glm.hpp>
#include <vector>
#include <unordered_set>
#include <unordered_map>
#include <set>
#include <tuple>
#include <memory>
#include "Topology.h"
#include "VolumeData.h"
#include "Vertex.h"
#include "Qef.h"
#include "Utils.h"
#include "Mesh.h"
#include "Kdtree.h"
typedef std::unordered_set<std::set<Vertex *>, ContainerHasher<std::set<Vertex *>>> EdgePolygonSet;
struct Octree {
public:
static Octree *buildWithScalarField(const PositionCode &minCode, int depth, ScalarField *scalarField, bool as_mipmap);
static void getSum(Octree *root, const PositionCode &minPos, const PositionCode &maxPos, QefSolver &out);
static void simplify(Octree *root, float threshold);
static void calClusterbility(Octree *root, ScalarField *s);
static Mesh *extractMesh(Octree *root,
ScalarField *geometry,
int &intersectionPreservingVerticesCount,
bool intersectionFree = true);
static void drawOctrees(Octree *root, Mesh *mesh);
void combineComponents(ScalarField *s);
Octree(const PositionCode &minCode, const PositionCode &maxCode, int depth) : grid(minCode, maxCode),
childIndex(-1),
isLeaf(false),
depth(depth){};
~Octree() = default;
protected:
static void contourCell(Octree *root,
Mesh *mesh,
ScalarField *geometry,
int &intersectionPreservingVerticesCount,
EdgePolygonSet &edgePolygonSet,
bool intersectionFree,
float threshold);
static void contourFace(Octree *nodes[2],
int dir,
Mesh *mesh,
ScalarField *geometry,
int &intersectionPreservingVerticesCount,
EdgePolygonSet &edgePolygonSet,
bool intersectionFree,
float threshold);
static void contourEdge(Octree **nodes,
int dir,
int quadDir2,
ScalarField *geometry,
int &intersectionPreservingVerticesCount,
EdgePolygonSet &edgePolygonSet,
bool intersectionFree,
Mesh *mesh,
float threshold);
static void generateVertexIndices(Octree *node,
Mesh *mesh,
ScalarField *geometry,
std::unordered_set<Vertex *> &indexed);
static void generateQuad(Octree **nodes,
int dir,
int quadDir2,
ScalarField *g,
int &intersectionPreservingVerticesCount,
EdgePolygonSet &edgePolygonSet,
bool intersectionFree,
Mesh *mesh,
float threshold);
public:
RectilinearGrid grid;
Octree *children[8]{nullptr};
int childIndex;
bool isLeaf;
bool clusterable{true};
int depth;
};
#endif //VOXELWORLD_OCTREE_H
<|start_filename|>src/KdtreeISO/include/AxisAlignedLine.h<|end_filename|>
//
// Created by Danielhu on 2018/5/2.
//
#ifndef VOXELWORLD_AXISALIGNEDLINE_H
#define VOXELWORLD_AXISALIGNEDLINE_H
#include <glm/glm.hpp>
#include "Utils.h"
struct AALine {
PositionCode point;
int dir;
bool operator==(const AALine &other) {
return point[(dir + 1) % 3] == other.point[(dir + 1) % 3] && point[(dir + 2) % 3] == other.point[(dir + 2) % 3];
}
AALine(const PositionCode &axes, int dotPlane) : point(axes), dir(dotPlane) {}
AALine() = default;
};
#endif //VOXELWORLD_AXISALIGNEDLINE_H
<|start_filename|>src/examples/opengl/program.h<|end_filename|>
#ifndef HAS_GLSL_PROGRAM_H_BEEN_INCLUDED
#define HAS_GLSL_PROGRAM_H_BEEN_INCLUDED
#define GL_SILENCE_DEPRECATION
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <vector>
#include <glm/glm.hpp>
#include <glm/ext.hpp>
#include <map>
class Program {
public:
Program();
~Program();
bool init(const char *vert, const char *frag);
void use();
bool setMat4(const char *name, const glm::mat4 &uniform);
bool setVec3(const char *name, const glm::fvec3 &uniform);
bool setFloat(const char *name, float uniform);
private:
void printShaderInfo(GLuint shader) const;
void printProgramInfo(GLuint program) const;
bool compile(GLenum type, const char *src);
bool link();
GLuint id;
std::vector<GLuint> shaders;
std::map<std::string, int> locations;
const GLint getUniformLocation(const char *name);
};
#endif // HAS_GLSL_PROGRAM_H_BEEN_INCLUDED
<|start_filename|>src/KdtreeISO/include/Kdtree.h<|end_filename|>
//
// Created by Danielhu on 2018/4/19.
//
#ifndef VOXELWORLD_KDTREE_H
#define VOXELWORLD_KDTREE_H
#include <vector>
#include <array>
#include "Utils.h"
#include "Qef.h"
#include "RectilinearGrid.h"
#include "Mesh.h"
#include "AxisAlignedLine.h"
struct Octree;
struct Kdtree {
typedef std::array<Kdtree *, 2> FaceKd;
typedef std::array<Kdtree *, 4> EdgeKd;
RectilinearGrid grid;
int planeDir;
int depth;
bool clusterable{true};
Kdtree *children[2]{nullptr, nullptr};
Kdtree(QefSolver sum,
const PositionCode &minCode,
const PositionCode &maxCode,
int dir,
int depth)
: grid(minCode, maxCode, sum),
planeDir(dir),
depth(depth) {}
inline bool isContouringLeaf(float threshold) const {
if (!children[0] && !children[1]) {
return true;
}
// if (grid.error > threshold) {
// return false;
// }
for (auto &v : grid.vertices) {
if (v.error > threshold) {
return false;
}
}
return clusterable;
}
inline bool isLeaf() const {
return !children[0] && !children[1];
}
inline int axis() {
assert(!isLeaf());
if (children[0]) {
return children[0]->grid.maxCode[planeDir];
}
return children[1]->grid.minCode[planeDir];
}
inline Kdtree *getChild(int i, float threshold) {
if (grid.approximate.error < threshold) {
return this;
}
return children[i];
}
~Kdtree() {
delete children[0];
delete children[1];
}
void combineQef();
void calClusterability(ScalarField *t);
static void generateVertexIndices(Kdtree *root, Mesh *mesh, ScalarField *t, float threshold);
static void contourCell(Kdtree *node, Mesh *mesh, ScalarField *t, float threshold);
static void contourFace(FaceKd &nodes,
int dir,
int axis,
Mesh *mesh,
ScalarField *t,
float threshold);
static void detectQuad(EdgeKd &nodes, AALine line, float threshold);
static void contourEdge(EdgeKd &nodes,
const AALine &line,
int quadDir1,
ScalarField *t,
float threshold,
Mesh *mesh);
static void generateQuad(EdgeKd &nodes,
int quadDir1,
int quadDir2,
Mesh *mesh,
ScalarField *t,
float threshold);
static int chooseAxisDir(QefSolver &qef, const PositionCode &minCode, const PositionCode &maxCode);
static Kdtree *buildFromOctree(Octree *octree,
const PositionCode &minCode,
const PositionCode &maxCode,
ScalarField *t,
int depth);
static void drawKdtree(Kdtree *root, Mesh *mesh, float threshold);
static Mesh *extractMesh(Kdtree *root, ScalarField *t, float threshold);
};
#endif //VOXELWORLD_KDTREE_H
<|start_filename|>src/KdtreeISO/lib/Kdtree.cpp<|end_filename|>
//
// Created by Danielhu on 2018/4/22.
//
#define GLM_FORCE_CTOR_INIT
#include <set>
#include <map>
#include "Kdtree.h"
#include "Octree.h"
#include "Indicators.h"
#include "Mesh.h"
#include "AxisAlignedLine.h"
using glm::max;
using glm::min;
void Kdtree::calClusterability(ScalarField *t) {
bool selfClusterable = RectilinearGrid::calClusterability(
children[0] ? &children[0]->grid : nullptr,
children[1] ? &children[1]->grid : nullptr,
planeDir, grid.minCode, grid.maxCode, t);
if (!selfClusterable) {
clusterable = false;
return;
}
for (auto child : children) {
if (child && !child->clusterable) {
clusterable = false;
return;
}
}
clusterable = true;
return;
}
void Kdtree::combineQef() {
if (!clusterable || isLeaf()) {
return;
}
RectilinearGrid::combineAAGrid(children[0] ? &children[0]->grid : nullptr,
children[1] ? &children[1]->grid : nullptr,
planeDir,
&grid);
}
Kdtree *Kdtree::buildFromOctree(Octree *octree, const PositionCode &minCode, const PositionCode &maxCode, ScalarField *t, int depth) {
if (glm::any(glm::greaterThanEqual(minCode, maxCode))) {
return nullptr;
}
QefSolver sum;
Octree::getSum(octree, minCode, maxCode, sum);
if (sum.pointCount == 0) {
return nullptr;
}
int strategy = 1;
PositionCode bestRightMinCode = maxCode, bestLeftMaxCode = minCode;
int dir = chooseAxisDir(sum, minCode, maxCode);
int minAxis = minCode[dir];
int maxAxis = maxCode[dir];
if (strategy == 0) {
float error;
glm::fvec3 approximate;
sum.solve(approximate, error);
int plane = static_cast<int>(std::round(approximate[dir] / RectilinearGrid::getUnitSize()));
if (maxCode[dir] - minCode[dir] > 1) {
plane = std::min(maxCode[dir] - 1, std::max(minCode[dir] + 1, plane));
bestLeftMaxCode = maxCode;
bestRightMinCode = minCode;
bestLeftMaxCode[dir] = bestRightMinCode[dir] = plane;
}
}
else {
QefSolver leftSum, rightSum;
float minError = std::numeric_limits<float>::max();
while (maxAxis - minAxis > 1) {
int mid = (maxAxis + minAxis) / 2;
PositionCode rightMinCode = minCode;
rightMinCode[dir] = mid;
PositionCode leftMaxCode = maxCode;
leftMaxCode[dir] = mid;
glm::fvec3 leftApproximate, rightApproximate;
leftSum.reset();
rightSum.reset();
Octree::getSum(octree, minCode, leftMaxCode, leftSum);
Octree::getSum(octree, rightMinCode, maxCode, rightSum);
float leftError = 0.f;
float rightError = 0.f;
leftSum.solve(leftApproximate, leftError);
rightSum.solve(rightApproximate, rightError);
if (abs(leftError - rightError) < minError) {
minError = abs(leftError - rightError);
bestLeftMaxCode = leftMaxCode;
bestRightMinCode = rightMinCode;
}
if (leftError > rightError) {
maxAxis = mid;
}
else if (leftError < rightError) {
minAxis = mid;
}
else {
break;
}
}
}
auto kd = new Kdtree(sum, minCode, maxCode, dir, depth);
kd->children[0] = buildFromOctree(octree, minCode, bestLeftMaxCode, t, depth + 1);
kd->children[1] = buildFromOctree(octree, bestRightMinCode, maxCode, t, depth + 1);
if (kd->isLeaf()) {
kd->grid.assignSign(t);
kd->grid.sampleQef(t, false);
}
else {
kd->grid.assignSign(t);
kd->calClusterability(t);
kd->combineQef();
}
if (kd->clusterable) {
for (int i = 0; i < kd->grid.components.size(); ++i) {
kd->grid.solveComponent(i);
}
}
return kd;
}
int Kdtree::chooseAxisDir(QefSolver &qef, const PositionCode &minCode, const PositionCode &maxCode) {
// naive approach
int dir = 0;
int strategy = 1;
auto size = maxCode - minCode;
if (strategy == 0) {
int maxDir = 0, minDir = 1;
if (size[1] > size[0]) {
maxDir = 1;
minDir = 0;
}
if (size[2] > size[maxDir]) {
maxDir = 2;
}
if (size[minDir] > size[2]) {
minDir = 2;
}
dir = maxDir;
}
else {
// variance approach
glm::fvec3 approximate;
float error;
qef.solve(approximate, error);
auto variance = qef.getVariance(approximate);
variance[0] *= size[0];
variance[1] *= size[1];
variance[2] *= size[2];
int maxVarDir = 0, minVarDir = 1;
if (variance[1] > variance[0]) {
maxVarDir = 1;
minVarDir = 0;
}
if (variance[2] > variance[maxVarDir]) {
maxVarDir = 2;
}
if (variance[minVarDir] > variance[2]) {
minVarDir = 2;
}
dir = maxVarDir;
if (size[maxVarDir] < 2) {
dir = 3 - maxVarDir - minVarDir;
if (size[3 - maxVarDir - minVarDir] < 2) {
dir = minVarDir;
}
}
}
return dir;
}
void Kdtree::drawKdtree(Kdtree *root, Mesh *mesh, float threshold) {
if (!root) {
return;
}
if (root->isContouringLeaf(threshold)) {
root->grid.draw(mesh);
return;
}
drawKdtree(root->children[0], mesh, threshold);
drawKdtree(root->children[1], mesh, threshold);
}
Mesh *Kdtree::extractMesh(Kdtree *root, ScalarField *t, float threshold) {
Mesh *mesh = new Mesh;
generateVertexIndices(root, mesh, t, threshold);
contourCell(root, mesh, t, threshold);
return mesh;
}
void Kdtree::generateVertexIndices(Kdtree *root, Mesh *mesh, ScalarField *t, float threshold) {
if (!root) {
return;
}
// mesh->addVertex(&root->grid.approximate, t);
for (int i = 0; i < root->grid.vertices.size(); ++i) {
auto &v = root->grid.vertices[i];
mesh->addVertex(&v, t);
// v.hermiteN = glm::normalize(root->grid.components[i].averageNormalSum);
}
generateVertexIndices(root->children[0], mesh, t, threshold);
generateVertexIndices(root->children[1], mesh, t, threshold);
}
AALine constructLine(const Kdtree::FaceKd &faceNodes, int side, int originFaceDir, int axis, float
#ifndef NDEBUG
threshold
#endif
) {
AALine line;
line.point[originFaceDir] = axis;
assert(!faceNodes[side]->isContouringLeaf(threshold));
line.dir = 3 - originFaceDir - faceNodes[side]->planeDir;
line.point[faceNodes[side]->planeDir] = faceNodes[side]->axis();
return line;
}
void Kdtree::contourCell(Kdtree *node, Mesh *mesh, ScalarField *t, float threshold) {
if (!node || node->isContouringLeaf(threshold)) {
return;
}
FaceKd faceNodes = {node->children[0], node->children[1]};
contourFace(faceNodes, node->planeDir, node->axis(), mesh, t, threshold);
contourCell(node->children[0], mesh, t, threshold);
contourCell(node->children[1], mesh, t, threshold);
}
bool checkMinialFace(const Kdtree::FaceKd &nodes, int dir, PositionCode &faceMin, PositionCode &faceMax) {
faceMax = min(nodes[0]->grid.maxCode, nodes[1]->grid.maxCode);
faceMin = max(nodes[0]->grid.minCode, nodes[1]->grid.minCode);
auto offset = faceMax - faceMin;
return offset[(dir + 1) % 3] > 0 && offset[(dir + 2) % 3] > 0;
}
void Kdtree::contourFace(FaceKd &nodes,
const int dir,
const int axis,
Mesh *mesh,
ScalarField *t,
float threshold) {
if (!nodes[0] || !nodes[1]) {
return;
}
if (nodes[0]->isContouringLeaf(threshold) && nodes[1]->isContouringLeaf(threshold)) {
return;
}
PositionCode faceMin, faceMax;
if (!checkMinialFace(nodes, dir, faceMin, faceMax)) {
return;
}
for (int i = 0; i < 2; ++i) {
while (!nodes[i]->isContouringLeaf(threshold) && nodes[i]->planeDir == dir) {
nodes[i] = nodes[i]->children[1 - i];
if (!nodes[i]) {
return;
}
}
}
for (int i = 0; i < 2; ++i) {
if (!nodes[i]->isContouringLeaf(threshold)) {
for (int j = 0; j < 2; ++j) {
FaceKd nextFace = nodes;
nextFace[i] = nodes[i]->children[j];
contourFace(nextFace, dir, axis, mesh, t, threshold);
}
if (nodes[i]->axis() > faceMin[nodes[i]->planeDir] && nodes[i]->axis() < faceMax[nodes[i]->planeDir]) {
EdgeKd edgeNodes = {nodes[0], nodes[0], nodes[1], nodes[1]};
edgeNodes[i * 2] = nodes[i]->children[0];
edgeNodes[i * 2 + 1] = nodes[i]->children[1];
AALine line = constructLine(nodes, i, dir, axis, threshold);
contourEdge(edgeNodes, line, nodes[i]->planeDir, t, threshold, mesh);
}
return;
}
}
}
bool checkMinialEdge(const Kdtree::EdgeKd &nodes, const AALine &line, PositionCode &minEnd, PositionCode &maxEnd) {
minEnd = maxEnd = line.point;
int dir = line.dir;
minEnd[dir] = max(max(nodes[0]->grid.minCode, nodes[1]->grid.minCode),
max(nodes[2]->grid.minCode, nodes[3]->grid.minCode))[dir];
maxEnd[dir] = min(min(nodes[0]->grid.maxCode, nodes[1]->grid.maxCode),
min(nodes[2]->grid.maxCode, nodes[3]->grid.maxCode))[dir];
return minEnd[dir] < maxEnd[dir];
}
int nextQuadIndex(int dir1, int dir2, int planeDir, int i) {
PositionCode pos;
pos[dir1] = 1 - i % 2;
pos[dir2] = 1 - i / 2;
return pos[planeDir];
}
void Kdtree::detectQuad(EdgeKd &nodes, AALine line, float threshold) {
for (int i = 0; i < 2; ++i) {
while (
nodes[i * 2] && nodes[i * 2 + 1] && !nodes[i * 2]->isContouringLeaf(threshold) && nodes[2 * i] == nodes[2 * i + 1] && nodes[i * 2]->planeDir != line.dir) {
auto commonNode = nodes[i * 2];
if (nodes[i * 2]->axis() == line.point[nodes[i * 2]->planeDir]) {
nodes[i * 2] = commonNode->children[0];
nodes[i * 2 + 1] = commonNode->children[1];
}
else if (nodes[i * 2]->axis() > line.point[nodes[i * 2]->planeDir]) {
nodes[i * 2] = commonNode->children[0];
nodes[i * 2 + 1] = commonNode->children[0];
}
else {
nodes[i * 2] = commonNode->children[1];
nodes[i * 2 + 1] = commonNode->children[1];
}
}
}
}
void setQuadNode(Kdtree::EdgeKd &nodes, int i, Kdtree *p) {
if (nodes[oppositeQuadIndex(i)] == nodes[i]) {
nodes[oppositeQuadIndex(i)] = p;
}
nodes[i] = p;
}
void Kdtree::contourEdge(EdgeKd &nodes,
const AALine &line,
const int quadDir1,
ScalarField *t,
float threshold,
Mesh *mesh) {
detectQuad(nodes, line, threshold);
for (auto n : nodes) {
if (!n) {
return;
}
}
assert(quadDir1 >= 0 && quadDir1 < 3);
const int quadDir2 = 3 - quadDir1 - line.dir;
PositionCode minEndCode, maxEndCode;
if (!checkMinialEdge(nodes, line, minEndCode, maxEndCode)) {
return;
}
glm::fvec3 minEnd = codeToPos(minEndCode, RectilinearGrid::getUnitSize());
glm::fvec3 maxEnd = codeToPos(maxEndCode, RectilinearGrid::getUnitSize());
for (int i = 0; i < 4; ++i) {
if (nodes[i] != nodes[oppositeQuadIndex(i)]) {
while (!nodes[i]->isContouringLeaf(threshold) && nodes[i]->planeDir != line.dir) {
nodes[i] = nodes[i]->children[nextQuadIndex(quadDir1, quadDir2, nodes[i]->planeDir, i)];
if (!nodes[i]) {
return;
}
}
}
}
// assert(nodes[0]->grid.minCode[quadDir1] <= nodes[1]->grid.minCode[quadDir1]);
// assert(nodes[2]->grid.minCode[quadDir1] <= nodes[3]->grid.minCode[quadDir1]);
// assert(nodes[0]->grid.minCode[quadDir2] <= nodes[2]->grid.minCode[quadDir2]);
// assert(nodes[1]->grid.minCode[quadDir2] <= nodes[3]->grid.minCode[quadDir2]);
// if ((maxEndCode - minEndCode)[0] == 1) {
// mesh->drawAABBDebug(codeToPos(minEndCode, RectilinearGrid::getUnitSize()), codeToPos(maxEndCode, RectilinearGrid::getUnitSize()));
// }
if (nodes[0]->isContouringLeaf(threshold) && nodes[1]->isContouringLeaf(threshold) && nodes[2]->isContouringLeaf(threshold) && nodes[3]->isContouringLeaf(threshold)) {
// only for debug
if ((t->value(minEnd) >= 0 && t->value(maxEnd) >= 0) || (t->value(minEnd) < 0 && t->value(maxEnd) < 0)) {
// return;
}
generateQuad(nodes, quadDir1, quadDir2, mesh, t, threshold);
return;
}
for (int i = 0; i < 4; ++i) {
EdgeKd nextNodes = nodes;
if (!nodes[i]->isContouringLeaf(threshold) && nodes[i]->planeDir == line.dir) {
setQuadNode(nextNodes, i, nodes[i]->children[0]);
contourEdge(nextNodes, line, quadDir1, t, threshold, mesh);
nextNodes = nodes;
setQuadNode(nextNodes, i, nodes[i]->children[1]);
contourEdge(nextNodes, line, quadDir1, t, threshold, mesh);
return;
}
}
}
void Kdtree::generateQuad(EdgeKd &nodes,
int quadDir1,
int quadDir2,
Mesh *mesh,
ScalarField *t,
float threshold) {
// for (auto n : nodes) {
// n->grid.vertices[0].hermiteP = codeToPos(n->grid.minCode + n->grid.maxCode, RectilinearGrid::getUnitSize()) / 2.f;
// // mesh->drawAABBDebug(codeToPos(n->grid.minCode, RectilinearGrid::getUnitSize()), codeToPos(n->grid.maxCode, RectilinearGrid::getUnitSize()));
// }
// return;
// assert(glm::all(glm::greaterThanEqual(nodes[1]->grid.minCode, nodes[0]->grid.minCode)));
// assert(glm::all(glm::greaterThanEqual(nodes[3]->grid.minCode, nodes[2]->grid.minCode)));
// assert(glm::all(glm::greaterThanEqual(nodes[3]->grid.minCode, nodes[0]->grid.minCode)));
// assert(glm::all(glm::greaterThanEqual(nodes[3]->grid.minCode, nodes[1]->grid.minCode)));
// for (auto &n : nodes) {
// assert(n->clusterable);
// for (auto c : n->grid.vertices) {
// assert(c.vertexIndex);
// }
// }
RectilinearGrid::generateQuad(nodes, quadDir1, quadDir2, mesh, t, threshold);
}
| Danielhu229/KdtreeISO |
<|start_filename|>ntlmtransport.go<|end_filename|>
package httpntlm
import (
"errors"
"io"
"io/ioutil"
"net/http"
"strings"
"github.com/vadimi/go-ntlm/ntlm"
)
var errEmptyNtlm = errors.New("empty NTLM challenge")
// NtlmTransport is implementation of http.RoundTripper interface
type NtlmTransport struct {
Domain string
User string
Password string
http.RoundTripper
Jar http.CookieJar
}
// RoundTrip method send http request and tries to perform NTLM authentication
func (t NtlmTransport) RoundTrip(req *http.Request) (res *http.Response, err error) {
client := http.Client{}
if t.RoundTripper != nil {
client.Transport = t.RoundTripper
}
if t.Jar != nil {
client.Jar = t.Jar
}
resp, err := t.ntlmRoundTrip(client, req)
// retry once in case of an empty ntlm challenge
if err != nil && errors.Is(err, errEmptyNtlm) {
return t.ntlmRoundTrip(client, req)
}
return resp, err
}
func (t NtlmTransport) ntlmRoundTrip(client http.Client, req *http.Request) (*http.Response, error) {
// first send NTLM Negotiate header
r, _ := http.NewRequest("GET", req.URL.String(), strings.NewReader(""))
r.Header.Add("Authorization", "NTLM "+encBase64(negotiate()))
resp, err := client.Do(r)
if err != nil {
return nil, err
}
if err == nil && resp.StatusCode == http.StatusUnauthorized {
// it's necessary to reuse the same http connection
// in order to do that it's required to read Body and close it
_, err = io.Copy(ioutil.Discard, resp.Body)
if err != nil {
return nil, err
}
err = resp.Body.Close()
if err != nil {
return nil, err
}
// retrieve Www-Authenticate header from response
authHeaders := resp.Header.Values("WWW-Authenticate")
if len(authHeaders) == 0 {
return nil, errors.New("WWW-Authenticate header missing")
}
// there could be multiple WWW-Authenticate headers, so we need to pick the one that starts with NTLM
ntlmChallengeFound := false
var ntlmChallengeString string
for _, h := range authHeaders {
if strings.HasPrefix(h, "NTLM") {
ntlmChallengeFound = true
ntlmChallengeString = strings.TrimSpace(h[4:])
break
}
}
if ntlmChallengeString == "" {
if ntlmChallengeFound {
return nil, errEmptyNtlm
}
return nil, errors.New("wrong WWW-Authenticate header")
}
challengeBytes, err := decBase64(ntlmChallengeString)
if err != nil {
return nil, err
}
session, err := ntlm.CreateClientSession(ntlm.Version2, ntlm.ConnectionlessMode)
if err != nil {
return nil, err
}
session.SetUserInfo(t.User, t.Password, t.Domain)
// parse NTLM challenge
challenge, err := ntlm.ParseChallengeMessage(challengeBytes)
if err != nil {
return nil, err
}
err = session.ProcessChallengeMessage(challenge)
if err != nil {
return nil, err
}
// authenticate user
authenticate, err := session.GenerateAuthenticateMessage()
if err != nil {
return nil, err
}
// set NTLM Authorization header
req.Header.Set("Authorization", "NTLM "+encBase64(authenticate.Bytes()))
return client.Do(req)
}
return resp, err
}
| vadimi/go-http-ntlm |
<|start_filename|>tonic-suite/asr/src/gmm/mle-am-diag-gmm.cc<|end_filename|>
// gmm/mle-am-diag-gmm.cc
// Copyright 2009-2011 Saarland University (Author: <NAME>);
// Microsoft Corporation; <NAME>; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "gmm/am-diag-gmm.h"
#include "gmm/mle-am-diag-gmm.h"
#include "util/stl-utils.h"
namespace kaldi {
const AccumDiagGmm &AccumAmDiagGmm::GetAcc(int32 index) const {
KALDI_ASSERT(index >= 0 &&
index < static_cast<int32>(gmm_accumulators_.size()));
return *(gmm_accumulators_[index]);
}
AccumDiagGmm &AccumAmDiagGmm::GetAcc(int32 index) {
KALDI_ASSERT(index >= 0 &&
index < static_cast<int32>(gmm_accumulators_.size()));
return *(gmm_accumulators_[index]);
}
AccumAmDiagGmm::~AccumAmDiagGmm() { DeletePointers(&gmm_accumulators_); }
void AccumAmDiagGmm::Init(const AmDiagGmm &model, GmmFlagsType flags) {
DeletePointers(&gmm_accumulators_); // in case was non-empty when called.
gmm_accumulators_.resize(model.NumPdfs(), NULL);
for (int32 i = 0; i < model.NumPdfs(); i++) {
gmm_accumulators_[i] = new AccumDiagGmm();
gmm_accumulators_[i]->Resize(model.GetPdf(i), flags);
}
}
void AccumAmDiagGmm::Init(const AmDiagGmm &model, int32 dim,
GmmFlagsType flags) {
KALDI_ASSERT(dim > 0);
DeletePointers(&gmm_accumulators_); // in case was non-empty when called.
gmm_accumulators_.resize(model.NumPdfs(), NULL);
for (int32 i = 0; i < model.NumPdfs(); i++) {
gmm_accumulators_[i] = new AccumDiagGmm();
gmm_accumulators_[i]->Resize(model.GetPdf(i).NumGauss(), dim, flags);
}
}
void AccumAmDiagGmm::SetZero(GmmFlagsType flags) {
for (size_t i = 0; i < gmm_accumulators_.size(); i++) {
gmm_accumulators_[i]->SetZero(flags);
}
}
BaseFloat AccumAmDiagGmm::AccumulateForGmm(const AmDiagGmm &model,
const VectorBase<BaseFloat> &data,
int32 gmm_index, BaseFloat weight) {
KALDI_ASSERT(static_cast<size_t>(gmm_index) < gmm_accumulators_.size());
BaseFloat log_like = gmm_accumulators_[gmm_index]->AccumulateFromDiag(
model.GetPdf(gmm_index), data, weight);
total_log_like_ += log_like * weight;
total_frames_ += weight;
return log_like;
}
BaseFloat AccumAmDiagGmm::AccumulateForGmmTwofeats(
const AmDiagGmm &model, const VectorBase<BaseFloat> &data1,
const VectorBase<BaseFloat> &data2, int32 gmm_index, BaseFloat weight) {
KALDI_ASSERT(static_cast<size_t>(gmm_index) < gmm_accumulators_.size());
const DiagGmm &gmm = model.GetPdf(gmm_index);
AccumDiagGmm &acc = *(gmm_accumulators_[gmm_index]);
Vector<BaseFloat> posteriors;
BaseFloat log_like = gmm.ComponentPosteriors(data1, &posteriors);
posteriors.Scale(weight);
acc.AccumulateFromPosteriors(data2, posteriors);
total_log_like_ += log_like * weight;
total_frames_ += weight;
return log_like;
}
void AccumAmDiagGmm::AccumulateFromPosteriors(
const AmDiagGmm &model, const VectorBase<BaseFloat> &data, int32 gmm_index,
const VectorBase<BaseFloat> &posteriors) {
KALDI_ASSERT(gmm_index >= 0 && gmm_index < NumAccs());
gmm_accumulators_[gmm_index]->AccumulateFromPosteriors(data, posteriors);
total_frames_ += posteriors.Sum();
}
void AccumAmDiagGmm::AccumulateForGaussian(const AmDiagGmm &am,
const VectorBase<BaseFloat> &data,
int32 gmm_index, int32 gauss_index,
BaseFloat weight) {
KALDI_ASSERT(gmm_index >= 0 && gmm_index < NumAccs());
KALDI_ASSERT(gauss_index >= 0 &&
gauss_index < am.GetPdf(gmm_index).NumGauss());
gmm_accumulators_[gmm_index]->AccumulateForComponent(data, gauss_index,
weight);
}
void AccumAmDiagGmm::Read(std::istream &in_stream, bool binary, bool add) {
int32 num_pdfs;
ExpectToken(in_stream, binary, "<NUMPDFS>");
ReadBasicType(in_stream, binary, &num_pdfs);
KALDI_ASSERT(num_pdfs > 0);
if (!add || (add && gmm_accumulators_.empty())) {
gmm_accumulators_.resize(num_pdfs, NULL);
for (std::vector<AccumDiagGmm *>::iterator it = gmm_accumulators_.begin(),
end = gmm_accumulators_.end();
it != end; ++it) {
if (*it != NULL) delete *it;
*it = new AccumDiagGmm();
(*it)->Read(in_stream, binary, add);
}
} else {
if (gmm_accumulators_.size() != static_cast<size_t>(num_pdfs))
KALDI_ERR << "Adding accumulators but num-pdfs do not match: "
<< (gmm_accumulators_.size()) << " vs. " << (num_pdfs);
for (std::vector<AccumDiagGmm *>::iterator it = gmm_accumulators_.begin(),
end = gmm_accumulators_.end();
it != end; ++it)
(*it)->Read(in_stream, binary, add);
}
// TODO(arnab): Bad hack! Need to make this self-delimiting.
in_stream.peek(); // This will set the EOF bit for older accs.
if (!in_stream.eof()) {
double like, frames;
ExpectToken(in_stream, binary, "<total_like>");
ReadBasicType(in_stream, binary, &like);
total_log_like_ = (add) ? total_log_like_ + like : like;
ExpectToken(in_stream, binary, "<total_frames>");
ReadBasicType(in_stream, binary, &frames);
total_frames_ = (add) ? total_frames_ + frames : frames;
}
}
void AccumAmDiagGmm::Write(std::ostream &out_stream, bool binary) const {
int32 num_pdfs = gmm_accumulators_.size();
WriteToken(out_stream, binary, "<NUMPDFS>");
WriteBasicType(out_stream, binary, num_pdfs);
for (std::vector<AccumDiagGmm *>::const_iterator
it = gmm_accumulators_.begin(),
end = gmm_accumulators_.end();
it != end; ++it) {
(*it)->Write(out_stream, binary);
}
WriteToken(out_stream, binary, "<total_like>");
WriteBasicType(out_stream, binary, total_log_like_);
WriteToken(out_stream, binary, "<total_frames>");
WriteBasicType(out_stream, binary, total_frames_);
}
// BaseFloat AccumAmDiagGmm::TotCount() const {
// BaseFloat ans = 0.0;
// for (int32 pdf = 0; pdf < NumAccs(); pdf++)
// ans += gmm_accumulators_[pdf]->occupancy().Sum();
// return ans;
// }
void ResizeModel(int32 dim, AmDiagGmm *am_gmm) {
for (int32 pdf_id = 0; pdf_id < am_gmm->NumPdfs(); pdf_id++) {
DiagGmm &pdf = am_gmm->GetPdf(pdf_id);
pdf.Resize(pdf.NumGauss(), dim);
Matrix<BaseFloat> inv_vars(pdf.NumGauss(), dim);
inv_vars.Set(1.0); // make all vars 1.
pdf.SetInvVars(inv_vars);
pdf.ComputeGconsts();
}
}
void MleAmDiagGmmUpdate(const MleDiagGmmOptions &config,
const AccumAmDiagGmm &am_diag_gmm_acc,
GmmFlagsType flags, AmDiagGmm *am_gmm,
BaseFloat *obj_change_out, BaseFloat *count_out) {
if (am_diag_gmm_acc.Dim() != am_gmm->Dim()) {
KALDI_ASSERT(am_diag_gmm_acc.Dim() != 0);
KALDI_WARN << "Dimensions of accumulator " << am_diag_gmm_acc.Dim()
<< " and gmm " << am_gmm->Dim() << " do not match, resizing "
<< " GMM and setting to zero-mean, unit-variance.";
ResizeModel(am_diag_gmm_acc.Dim(), am_gmm);
}
KALDI_ASSERT(am_gmm != NULL);
KALDI_ASSERT(am_diag_gmm_acc.NumAccs() == am_gmm->NumPdfs());
if (obj_change_out != NULL) *obj_change_out = 0.0;
if (count_out != NULL) *count_out = 0.0;
BaseFloat tmp_obj_change, tmp_count;
BaseFloat *p_obj = (obj_change_out != NULL) ? &tmp_obj_change : NULL,
*p_count = (count_out != NULL) ? &tmp_count : NULL;
for (int32 i = 0; i < am_diag_gmm_acc.NumAccs(); i++) {
MleDiagGmmUpdate(config, am_diag_gmm_acc.GetAcc(i), flags,
&(am_gmm->GetPdf(i)), p_obj, p_count);
if (obj_change_out != NULL) *obj_change_out += tmp_obj_change;
if (count_out != NULL) *count_out += tmp_count;
}
}
void MapAmDiagGmmUpdate(const MapDiagGmmOptions &config,
const AccumAmDiagGmm &am_diag_gmm_acc,
GmmFlagsType flags, AmDiagGmm *am_gmm,
BaseFloat *obj_change_out, BaseFloat *count_out) {
KALDI_ASSERT(am_gmm != NULL && am_diag_gmm_acc.Dim() == am_gmm->Dim() &&
am_diag_gmm_acc.NumAccs() == am_gmm->NumPdfs());
if (obj_change_out != NULL) *obj_change_out = 0.0;
if (count_out != NULL) *count_out = 0.0;
BaseFloat tmp_obj_change, tmp_count;
BaseFloat *p_obj = (obj_change_out != NULL) ? &tmp_obj_change : NULL,
*p_count = (count_out != NULL) ? &tmp_count : NULL;
for (int32 i = 0; i < am_diag_gmm_acc.NumAccs(); i++) {
MapDiagGmmUpdate(config, am_diag_gmm_acc.GetAcc(i), flags,
&(am_gmm->GetPdf(i)), p_obj, p_count);
if (obj_change_out != NULL) *obj_change_out += tmp_obj_change;
if (count_out != NULL) *count_out += tmp_count;
}
}
BaseFloat AccumAmDiagGmm::TotStatsCount() const {
double ans = 0.0;
for (int32 i = 0; i < NumAccs(); i++) {
const AccumDiagGmm &acc = GetAcc(i);
ans += acc.occupancy().Sum();
}
return ans;
}
void AccumAmDiagGmm::Scale(BaseFloat scale) {
for (int32 i = 0; i < NumAccs(); i++) {
AccumDiagGmm &acc = GetAcc(i);
acc.Scale(scale, acc.Flags());
}
total_frames_ *= scale;
total_log_like_ *= scale;
}
void AccumAmDiagGmm::Add(BaseFloat scale, const AccumAmDiagGmm &other) {
total_frames_ += scale * other.total_frames_;
total_log_like_ += scale * other.total_log_like_;
int32 num_accs = NumAccs();
KALDI_ASSERT(num_accs == other.NumAccs());
for (int32 i = 0; i < num_accs; i++)
gmm_accumulators_[i]->Add(scale, *(other.gmm_accumulators_[i]));
}
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/kwsbin/lattice-to-kws-index.cc<|end_filename|>
// kwsbin/lattice-to-kws-index.cc
// Copyright 2012 Johns Hopkins University (Author: <NAME>)
// <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "fstext/fstext-utils.h"
#include "lat/kaldi-lattice.h"
#include "lat/lattice-functions.h"
#include "lat/kaldi-kws.h"
#include "lat/kws-functions.h"
#include "fstext/epsilon-property.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using fst::VectorFst;
typedef kaldi::int32 int32;
typedef kaldi::uint64 uint64;
const char *usage =
"Create an inverted index of the given lattices. The output index is "
"in the T*T*T\n"
"semiring. For details for the semiring, please refer to <NAME> and "
"Muran Saraclar's"
"lattice indexing paper."
"\n"
"Usage: lattice-to-kws-index [options] utter-symtab-rspecifier "
"lattice-rspecifier index-wspecifier\n"
" e.g.: lattice-to-kws-index ark:utter.symtab ark:1.lats "
"ark:global.idx\n";
ParseOptions po(usage);
int32 max_silence_frames = 50;
bool strict = true;
bool allow_partial = true;
BaseFloat max_states_scale = 4;
po.Register("max-silence-frames", &max_silence_frames,
"Maximum #frames for"
" silence arc.");
po.Register("strict", &strict,
"Setting --strict=false will cause successful "
"termination even if we processed no lattices.");
po.Register("max-states-scale", &max_states_scale,
"Number of states in the"
" original lattice times this scale is the number of states "
"allowed when optimizing the index. Negative number means no "
"limit on the number of states.");
po.Register("allow-partial", &allow_partial,
"Allow partial output if fails"
" to determinize, otherwise skip determinization if it fails.");
po.Read(argc, argv);
if (po.NumArgs() < 3 || po.NumArgs() > 4) {
po.PrintUsage();
exit(1);
}
std::string usymtab_rspecifier = po.GetOptArg(1),
lats_rspecifier = po.GetArg(2),
index_wspecifier = po.GetOptArg(3);
// We use RandomAccessInt32Reader to read the utterance symtab table.
RandomAccessInt32Reader usymtab_reader(usymtab_rspecifier);
// We read the lattice in as CompactLattice; We need the CompactLattice
// structure for the rest of the work
SequentialCompactLatticeReader clat_reader(lats_rspecifier);
TableWriter<fst::VectorFstTplHolder<KwsLexicographicArc> > index_writer(
index_wspecifier);
int32 n_done = 0;
int32 n_fail = 0;
int32 max_states = -1;
for (; !clat_reader.Done(); clat_reader.Next()) {
std::string key = clat_reader.Key();
CompactLattice clat = clat_reader.Value();
clat_reader.FreeCurrent();
KALDI_LOG << "Processing lattice " << key;
if (max_states_scale > 0) {
max_states = static_cast<int32>(
max_states_scale * static_cast<BaseFloat>(clat.NumStates()));
}
// Check if we have the corresponding utterance id.
if (!usymtab_reader.HasKey(key)) {
KALDI_WARN << "Cannot find utterance id for " << key;
n_fail++;
continue;
}
// Topologically sort the lattice, if not already sorted.
uint64 props = clat.Properties(fst::kFstProperties, false);
if (!(props & fst::kTopSorted)) {
if (fst::TopSort(&clat) == false) {
KALDI_WARN << "Cycles detected in lattice " << key;
n_fail++;
continue;
}
}
// Get the alignments
vector<int32> state_times;
CompactLatticeStateTimes(clat, &state_times);
// Cluster the arcs in the CompactLattice, write the cluster_id on the
// output label side.
// ClusterLattice() corresponds to the second part of the preprocessing in
// Dogan and Murat's paper -- clustering. Note that we do the first part
// of preprocessing (the weight pushing step) later when generating the
// factor transducer.
KALDI_VLOG(1) << "Arc clustering...";
bool success = false;
success = ClusterLattice(&clat, state_times);
if (!success) {
KALDI_WARN << "State id's and alignments do not match for lattice "
<< key;
n_fail++;
continue;
}
// The next part is something new, not in the Dogan and Can paper. It is
// necessary because we have epsilon arcs, due to silences, in our
// lattices. We modify the factor transducer, while maintaining
// equivalence, to ensure that states don't have both epsilon *and*
// non-epsilon arcs entering them. (and the same, with "entering"
// replaced with "leaving"). Later we will find out which states have
// non-epsilon arcs leaving/entering them and use it to be more selective
// in adding arcs to connect them with the initial/final states. The goal
// here is to disallow silences at the beginning or ending of a keyword
// occurrence.
if (true) {
EnsureEpsilonProperty(&clat);
fst::TopSort(&clat);
// We have to recompute the state times because they will have changed.
CompactLatticeStateTimes(clat, &state_times);
}
// Generate factor transducer
// CreateFactorTransducer() corresponds to the "Factor Generation" part of
// Dogan and Murat's paper. But we also move the weight pushing step to
// this function as we have to compute the alphas and betas anyway.
KALDI_VLOG(1) << "Generating factor transducer...";
KwsProductFst factor_transducer;
int32 utterance_id = usymtab_reader.Value(key);
success = CreateFactorTransducer(clat, state_times, utterance_id,
&factor_transducer);
if (!success) {
KALDI_WARN << "Cannot generate factor transducer for lattice " << key;
n_fail++;
}
MaybeDoSanityCheck(factor_transducer);
// Remove long silence arc
// We add the filtering step in our implementation. This is because gap
// between two successive words in a query term should be less than 0.5s
KALDI_VLOG(1) << "Removing long silence...";
RemoveLongSilences(max_silence_frames, state_times, &factor_transducer);
MaybeDoSanityCheck(factor_transducer);
// Do factor merging, and return a transducer in T*T*T semiring. This step
// corresponds to the "Factor Merging" part in Dogan and Murat's paper.
KALDI_VLOG(1) << "Merging factors...";
KwsLexicographicFst index_transducer;
DoFactorMerging(&factor_transducer, &index_transducer);
MaybeDoSanityCheck(index_transducer);
// Do factor disambiguation. It corresponds to the "Factor Disambiguation"
// step in Dogan and Murat's paper.
KALDI_VLOG(1) << "Doing factor disambiguation...";
DoFactorDisambiguation(&index_transducer);
MaybeDoSanityCheck(index_transducer);
// Optimize the above factor transducer. It corresponds to the
// "Optimization" step in the paper.
KALDI_VLOG(1) << "Optimizing factor transducer...";
OptimizeFactorTransducer(&index_transducer, max_states, allow_partial);
MaybeDoSanityCheck(index_transducer);
// Write result
index_writer.Write(key, index_transducer);
n_done++;
}
KALDI_LOG << "Done " << n_done << " lattices, failed for " << n_fail;
if (strict == true)
return (n_done != 0 ? 0 : 1);
else
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/gst-plugin/gst-online-gmm-decode-faster.h<|end_filename|>
// gst-plugin/gst-online-decode-faster.h
// Copyright 2013 <NAME>, Tallinn University of Technology
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_GST_PLUGIN_GST_ONLINE_GMM_DECODE_FASTER_H_
#define KALDI_GST_PLUGIN_GST_ONLINE_GMM_DECODE_FASTER_H_
#include <vector>
#include <gst/gst.h>
#include "feat/feature-mfcc.h"
#include "online/online-audio-source.h"
#include "online/online-feat-input.h"
#include "online/online-decodable.h"
#include "online/online-faster-decoder.h"
#include "online/onlinebin-util.h"
#include "util/simple-options.h"
#include "gst-plugin/gst-audio-source.h"
namespace kaldi {
typedef OnlineFeInput<Mfcc> FeInput;
G_BEGIN_DECLS
/* #defines don't like whitespacey bits */
#define GST_TYPE_ONLINEGMMDECODEFASTER (gst_online_gmm_decode_faster_get_type())
#define GST_ONLINEGMMDECODEFASTER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_ONLINEGMMDECODEFASTER, \
GstOnlineGmmDecodeFaster))
#define GST_ONLINEGMMDECODEFASTER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_ONLINEGMMDECODEFASTER, \
GstOnlineGmmDecodeFasterClass))
#define GST_IS_ONLINEGMMDECODEFASTER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_ONLINEGMMDECODEFASTER))
#define GST_IS_ONLINEGMMDECODEFASTER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_ONLINEGMMDECODEFASTER))
typedef struct _GstOnlineGmmDecodeFaster GstOnlineGmmDecodeFaster;
typedef struct _GstOnlineGmmDecodeFasterClass GstOnlineGmmDecodeFasterClass;
uint32 kSampleFreq = 16000;
struct _GstOnlineGmmDecodeFaster {
GstElement element;
GstPad *sinkpad_, *srcpad_;
bool silent_;
OnlineFasterDecoder *decoder_;
Matrix<BaseFloat> *lda_transform_;
TransitionModel *trans_model_;
AmDiagGmm *am_gmm_;
fst::Fst<fst::StdArc> *decode_fst_;
fst::SymbolTable *word_syms_;
fst::VectorFst<LatticeArc> *out_fst_;
GstBufferSource *au_src_;
gchar *model_rspecifier_;
gchar *fst_rspecifier_;
gchar *word_syms_filename_;
gchar *lda_mat_rspecifier_;
std::vector<int32> *silence_phones_;
BaseFloat acoustic_scale_;
int32 cmn_window_;
int32 min_cmn_window_;
int32 right_context_, left_context_;
OnlineFasterDecoderOpts *decoder_opts_;
OnlineFeatureMatrixOptions *feature_reading_opts_;
SimpleOptions *simple_options_;
};
struct _GstOnlineGmmDecodeFasterClass {
GstElementClass parent_class;
void (*hyp_word)(GstElement *element, const gchar *hyp_str);
};
GType gst_online_gmm_decode_faster_get_type(void);
G_END_DECLS
}
#endif // KALDI_GST_PLUGIN_GST_ONLINE_GMM_DECODE_FASTER_H_
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-make-regtree.cc<|end_filename|>
// gmmbin/gmm-make-regtree.cc
// Copyright 2009-2011 Saarland University; Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/kaldi-io.h"
#include "util/text-utils.h"
#include "gmm/mle-am-diag-gmm.h"
#include "tree/context-dep.h"
#include "hmm/transition-model.h"
#include "transform/regression-tree.h"
int main(int argc, char *argv[]) {
try {
typedef kaldi::int32 int32;
typedef kaldi::BaseFloat BaseFloat;
const char *usage =
"Build regression class tree.\n"
"Usage: gmm-make-regtree [options] <model-file> <regtree-out>\n"
"E.g.: gmm-make-regtree --silphones=1:2:3 --state-occs=1.occs 1.mdl "
"1.regtree\n"
" [Note: state-occs come from --write-occs option of gmm-est]\n";
std::string occs_in_filename;
std::string sil_phones_str;
bool binary_write = true;
int32 max_leaves = 1;
kaldi::ParseOptions po(usage);
po.Register(
"state-occs", &occs_in_filename,
"File containing state occupancies (use --write-occs in gmm-est)");
po.Register("sil-phones", &sil_phones_str,
"Colon-separated list of integer ids of silence phones, e.g. "
"1:2:3; if used, create top-level speech/sil split (only one "
"reg-class for silence).");
po.Register("binary", &binary_write, "Write output in binary mode");
po.Register("max-leaves", &max_leaves,
"Maximum number of leaves in regression tree.");
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
std::string model_in_filename = po.GetArg(1),
tree_out_filename = po.GetArg(2);
kaldi::AmDiagGmm am_gmm;
kaldi::TransitionModel trans_model;
{
bool binary_read;
kaldi::Input ki(model_in_filename, &binary_read);
trans_model.Read(ki.Stream(), binary_read);
am_gmm.Read(ki.Stream(), binary_read);
}
kaldi::Vector<BaseFloat> state_occs;
if (occs_in_filename != "") {
bool binary_read;
kaldi::Input ki(occs_in_filename, &binary_read);
state_occs.Read(ki.Stream(), binary_read);
} else {
KALDI_LOG
<< "--state-occs option not provided so using constant occupancies.";
state_occs.Resize(am_gmm.NumPdfs());
state_occs.Set(1.0);
}
std::vector<int32> sil_pdfs;
if (sil_phones_str != "") {
std::vector<int32> sil_phones;
if (!kaldi::SplitStringToIntegers(sil_phones_str, ":", false,
&sil_phones))
KALDI_ERR << "invalid sil-phones option " << sil_phones_str;
std::sort(sil_phones.begin(), sil_phones.end());
bool ans = GetPdfsForPhones(trans_model, sil_phones, &sil_pdfs);
if (!ans)
KALDI_WARN
<< "Pdfs associated with silence phones are not only "
"associated with silence phones: your speech-silence split "
"may not be meaningful.";
}
kaldi::RegressionTree regtree;
regtree.BuildTree(state_occs, sil_pdfs, am_gmm, max_leaves);
// Write out the regression tree
{
kaldi::Output ko(tree_out_filename, binary_write);
regtree.Write(ko.Stream(), binary_write);
}
KALDI_LOG << "Written regression tree to " << tree_out_filename;
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/nlp/src/SENNA_nn.cpp<|end_filename|>
#include "SENNA_nn.h"
#include "SENNA_utils.h"
#include <string.h>
#include <float.h>
#ifdef USE_ATLAS_BLAS
#define USE_BLAS
#include "cblas.h"
#endif
#ifdef USE_MKL_BLAS
#define USE_BLAS
#include "mkl_cblas.h"
#endif
void SENNA_nn_lookup(float *dest, int dest_stride, const float *wordweights,
int wordsize, int maxwordidx, const int *wordindices,
int nword, int padidx, int npad) {
int i;
if (padidx < 0 || padidx >= maxwordidx)
SENNA_error("lookup: padding index out of range");
for (i = 0; i < npad; i++)
memcpy(dest + i * dest_stride, wordweights + padidx * wordsize,
wordsize * sizeof(float));
for (i = 0; i < nword; i++) {
int wordidx = wordindices[i];
if (wordidx < 0 || wordidx >= maxwordidx)
SENNA_error("lookup: index out of range");
memcpy(dest + (i + npad) * dest_stride, wordweights + wordidx * wordsize,
wordsize * sizeof(float));
}
for (i = 0; i < npad; i++)
memcpy(dest + (i + npad + nword) * dest_stride,
wordweights + padidx * wordsize, wordsize * sizeof(float));
}
void SENNA_nn_hardtanh(float *output, float *input, int size) {
int i;
for (i = 0; i < size; i++) {
float z = input[i];
if (z >= -1 && z <= 1)
output[i] = z;
else if (z < -1)
output[i] = -1;
else
output[i] = 1;
}
}
void SENNA_nn_linear(float *output, int output_size, float *weights,
float *biases, float *input, int input_size) {
#ifdef USE_BLAS
if (biases) cblas_scopy(output_size, biases, 1, output, 1);
cblas_sgemv(CblasColMajor, CblasTrans, input_size, output_size, 1.0, weights,
input_size, input, 1, (biases ? 1.0 : 0.0), output, 1);
#else
int i, j;
for (i = 0; i < output_size; i++) {
float z = (biases ? biases[i] : 0);
float *weights_row = weights + i * input_size;
for (j = 0; j < input_size; j++) z += input[j] * weights_row[j];
output[i] = z;
}
#endif
}
void SENNA_nn_max(float *value_, int *idx_, float *input, int input_size) {
float value = -FLT_MAX;
int idx = -1;
int i;
for (i = 0; i < input_size; i++) {
if (input[i] > value) {
value = input[i];
idx = i;
}
}
if (value_) *value_ = value;
if (idx_) *idx_ = idx;
}
void SENNA_nn_temporal_convolution(float *output, int output_frame_size,
float *weights, float *biases, float *input,
int input_frame_size, int n_frames,
int k_w) {
#ifdef USE_BLAS
if (k_w == 1) {
if (biases) {
int t;
for (t = 0; t < n_frames; t++)
cblas_scopy(output_frame_size, biases, 1,
output + t * output_frame_size, 1);
}
cblas_sgemm(CblasColMajor, CblasTrans, CblasNoTrans, output_frame_size,
n_frames, input_frame_size, 1.0, weights, input_frame_size,
input, input_frame_size, (biases ? 1.0 : 0.0), output,
output_frame_size);
} else
#endif
{
int t;
for (t = 0; t < n_frames - k_w + 1; t++)
SENNA_nn_linear(output + t * output_frame_size, output_frame_size,
weights, biases, input + t * input_frame_size,
input_frame_size * k_w);
}
}
void SENNA_nn_temporal_max_convolution(float *output, float *bias, float *input,
int input_frame_size, int n_frames,
int k_w) {
int i, j, k;
int h_k_w = (k_w - 1) / 2;
for (k = 0; k < n_frames; k++) {
for (i = 0; i < input_frame_size; i++) {
float maxval = -FLT_MAX;
for (j = -k; j < n_frames - k; j++) {
int jbias = j + h_k_w;
int jinput = k + j;
float z;
if (jbias < 0) jbias = 0;
if (jbias >= k_w) jbias = k_w - 1;
z = input[i + jinput * input_frame_size] +
bias[i + jbias * input_frame_size];
if (z > maxval) maxval = z;
}
output[i + k * input_frame_size] = maxval;
}
}
}
void SENNA_nn_temporal_max(float *output, float *input, int N, int T) {
int n, t;
for (n = 0; n < N; n++) {
float z = -FLT_MAX;
for (t = 0; t < T; t++) {
if (input[t * N + n] > z) z = input[t * N + n];
}
output[n] = z;
}
}
#define NN_MIN(a, b) ((a) < (b) ? (a) : (b))
#define NN_MAX(a, b) ((a) > (b) ? (a) : (b))
void SENNA_nn_distance(int *dest, int idx, int max_idx, int sentence_size,
int padding_size) {
int i;
max_idx = (max_idx - 1) / 2;
for (i = 0; i < padding_size; i++)
dest[i] =
NN_MAX(NN_MIN(i - padding_size - idx, max_idx), -max_idx) + max_idx;
for (i = 0; i < sentence_size; i++)
dest[i + padding_size] =
NN_MAX(NN_MIN(i - idx, max_idx), -max_idx) + max_idx;
for (i = 0; i < padding_size; i++)
dest[i + padding_size + sentence_size] =
NN_MAX(NN_MIN(i + sentence_size - idx, max_idx), -max_idx) + max_idx;
}
void SENNA_nn_viterbi(int *path, float *init, float *transition,
float *emission, int N, int T) {
float *delta, *deltap;
int *phi;
int i, j, t;
/* misc allocations */
delta = SENNA_malloc(sizeof(float), N);
deltap = SENNA_malloc(sizeof(float), N);
phi = SENNA_malloc(sizeof(float), N * T);
/* init */
for (i = 0; i < N; i++) deltap[i] = init[i] + emission[i];
/* recursion */
for (t = 1; t < T; t++) {
float *deltan = delta;
for (j = 0; j < N; j++) {
float maxValue = -FLT_MAX;
int maxIndex = 0;
for (i = 0; i < N; i++) {
float z = deltap[i] + transition[i + j * N];
if (z > maxValue) {
maxValue = z;
maxIndex = i;
}
}
delta[j] = maxValue + emission[j + t * N];
phi[j + t * N] = maxIndex;
}
delta = deltap;
deltap = deltan;
}
{
float maxValue = -FLT_MAX;
int maxIndex = 0;
for (j = 0; j < N; j++) {
if (deltap[j] > maxValue) {
maxValue = deltap[j];
maxIndex = j;
}
}
path[T - 1] = maxIndex;
}
for (t = T - 2; t >= 0; t--) path[t] = phi[path[t + 1] + (t + 1) * N];
SENNA_free(delta);
SENNA_free(deltap);
SENNA_free(phi);
}
<|start_filename|>tonic-suite/asr/src/fstext/push-special.h<|end_filename|>
// fstext/push-special.h
// Copyright 2012 Johns Hopkins Universithy (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_FSTEXT_PUSH_SPECIAL_H_
#define KALDI_FSTEXT_PUSH_SPECIAL_H_
#include <fst/fstlib.h>
#include <fst/fst-decl.h>
#include "util/const-integer-set.h"
namespace fst {
/*
This function does weight-pushing, in the log semiring,
but in a special way, such that any "leftover weight" after pushing
gets distributed evenly along the FST, and doesn't end up either
at the start or at the end. Basically it pushes the weights such
that the total weight of each state (i.e. the sum of the arc
probabilities plus the final-prob) is the same for all states.
*/
void PushSpecial(VectorFst<StdArc> *fst, float delta = kDelta);
}
#endif
<|start_filename|>tonic-suite/asr/src/matrix/optimization.h<|end_filename|>
// matrix/optimization.h
// Copyright 2012 Johns Hopkins University (author: <NAME>)
//
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
//
// (*) incorporates, with permission, FFT code from his book
// "Signal Processing with Lapped Transforms", Artech, 1992.
#ifndef KALDI_MATRIX_OPTIMIZATION_H_
#define KALDI_MATRIX_OPTIMIZATION_H_
#include "matrix/kaldi-vector.h"
#include "matrix/kaldi-matrix.h"
namespace kaldi {
/// @addtogroup matrix_optimization
/// @{
struct LinearCgdOptions {
int32 max_iters; // Maximum number of iters (if >= 0).
BaseFloat max_error; // Maximum 2-norm of the residual A x - b (convergence
// test)
// Every time the residual 2-norm decreases by this recompute_residual_factor
// since the last time it was computed from scratch, recompute it from
// scratch. This helps to keep the computed residual accurate even in the
// presence of roundoff.
BaseFloat recompute_residual_factor;
LinearCgdOptions()
: max_iters(-1), max_error(0.0), recompute_residual_factor(0.01) {}
};
/*
This function uses linear conjugate gradient descent to approximately solve
the system A x = b. The value of x at entry corresponds to the initial guess
of x. The algorithm continues until the number of iterations equals b.Dim(),
or until the 2-norm of (A x - b) is <= max_error, or until the number of
iterations equals max_iter, whichever happens sooner. It is a requirement
that A be positive definite.
It returns the number of iterations that were actually executed (this is
useful for testing purposes).
*/
template <typename Real>
int32 LinearCgd(const LinearCgdOptions &opts, const SpMatrix<Real> &A,
const VectorBase<Real> &b, VectorBase<Real> *x);
/**
This is an implementation of L-BFGS. It pushes responsibility for
determining when to stop, onto the user. There is no call-back here:
everything is done via calls to the class itself (see the example in
matrix-lib-test.cc). This does not implement constrained L-BFGS, but it will
handle constrained problems correctly as long as the function approaches
+infinity (or -infinity for maximization problems) when it gets close to the
bound of the constraint. In these types of problems, you just let the
function value be +infinity for minimization problems, or -infinity for
maximization problems, outside these bounds).
*/
struct LbfgsOptions {
bool minimize; // if true, we're minimizing, else maximizing.
int m; // m is the number of stored vectors L-BFGS keeps.
float first_step_learning_rate; // The very first step of L-BFGS is
// like gradient descent. If you want to configure the size of that step,
// you can do it using this variable.
float first_step_length; // If this variable is >0.0, it overrides
// first_step_learning_rate; on the first step we choose an approximate
// Hessian that is the multiple of the identity that would generate this
// step-length, or 1.0 if the gradient is zero.
float first_step_impr; // If this variable is >0.0, it overrides
// first_step_learning_rate; on the first step we choose an approximate
// Hessian that is the multiple of the identity that would generate this
// amount of objective function improvement (assuming the "real" objf
// was linear).
float c1; // A constant in Armijo rule = Wolfe condition i)
float c2; // A constant in Wolfe condition ii)
float d; // An amount > 1.0 (default 2.0) that we initially multiply or
// divide the step length by, in the line search.
int max_line_search_iters; // after this many iters we restart L-BFGS.
int avg_step_length; // number of iters to avg step length over, in
// RecentStepLength().
LbfgsOptions(bool minimize = true)
: minimize(minimize),
m(10),
first_step_learning_rate(1.0),
first_step_length(0.0),
first_step_impr(0.0),
c1(1.0e-04),
c2(0.9),
d(2.0),
max_line_search_iters(50),
avg_step_length(4) {}
};
template <typename Real>
class OptimizeLbfgs {
public:
/// Initializer takes the starting value of x.
OptimizeLbfgs(const VectorBase<Real> &x, const LbfgsOptions &opts);
/// This returns the value of the variable x that has the best objective
/// function so far, and the corresponding objective function value if
/// requested. This would typically be called only at the end.
const VectorBase<Real> &GetValue(Real *objf_value = NULL) const;
/// This returns the value at which the function wants us
/// to compute the objective function and gradient.
const VectorBase<Real> &GetProposedValue() const { return new_x_; }
/// Returns the average magnitude of the last n steps (but not
/// more than the number we have stored). Before we have taken
/// any steps, returns +infinity. Note: if the most recent
/// step length was 0, it returns 0, regardless of the other
/// step lengths. This makes it suitable as a convergence test
/// (else we'd generate NaN's).
Real RecentStepLength() const;
/// The user calls this function to provide the class with the
/// function and gradient info at the point GetProposedValue().
/// If this point is outside the constraints you can set function_value
/// to {+infinity,-infinity} for {minimization,maximization} problems.
/// In this case the gradient, and also the second derivative (if you call
/// the second overloaded version of this function) will be ignored.
void DoStep(Real function_value, const VectorBase<Real> &gradient);
/// The user can call this version of DoStep() if it is desired to set some
/// kind of approximate Hessian on this iteration. Note: it is a prerequisite
/// that diag_approx_2nd_deriv must be strictly positive (minimizing), or
/// negative (maximizing).
void DoStep(Real function_value, const VectorBase<Real> &gradient,
const VectorBase<Real> &diag_approx_2nd_deriv);
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(OptimizeLbfgs);
// The following variable says what stage of the computation we're at.
// Refer to Algorithm 7.5 (L-BFGS) of Nodecdal & Wright, "Numerical
// Optimization", 2nd edition.
// kBeforeStep means we're about to do
/// "compute p_k <-- - H_k \delta f_k" (i.e. Algorithm 7.4).
// kWithinStep means we're at some point within line search; note
// that line search is iterative so we can stay in this state more
// than one time on each iteration.
enum ComputationState {
kBeforeStep,
kWithinStep, // This means we're within the step-size computation, and
// have not yet done the 1st function evaluation.
};
inline MatrixIndexT Dim() { return x_.Dim(); }
inline MatrixIndexT M() { return opts_.m; }
SubVector<Real> Y(MatrixIndexT i) {
return SubVector<Real>(data_, (i % M()) * 2); // vector y_i
}
SubVector<Real> S(MatrixIndexT i) {
return SubVector<Real>(data_, (i % M()) * 2 + 1); // vector s_i
}
// The following are subroutines within DoStep():
bool AcceptStep(Real function_value, const VectorBase<Real> &gradient);
void Restart(const VectorBase<Real> &x, Real function_value,
const VectorBase<Real> &gradient);
void ComputeNewDirection(Real function_value,
const VectorBase<Real> &gradient);
void ComputeHifNeeded(const VectorBase<Real> &gradient);
void StepSizeIteration(Real function_value, const VectorBase<Real> &gradient);
void RecordStepLength(Real s);
LbfgsOptions opts_;
SignedMatrixIndexT
k_; // Iteration number, starts from zero. Gets set back to zero
// when we restart.
ComputationState computation_state_;
bool H_was_set_; // True if the user specified H_; if false,
// we'll use a heuristic to estimate it.
Vector<Real> x_; // current x.
Vector<Real> new_x_; // the x proposed in the line search.
Vector<Real> best_x_; // the x with the best objective function so far
// (either the same as x_ or something in the current line search.)
Vector<Real> deriv_; // The most recently evaluated derivative-- at x_k.
Vector<Real> temp_;
Real f_; // The function evaluated at x_k.
Real best_f_; // the best objective function so far.
Real d_; // a number d > 1.0, but during an iteration we may decrease this,
// when
// we switch between armijo and wolfe failures.
int num_wolfe_i_failures_; // the num times we decreased step size.
int num_wolfe_ii_failures_; // the num times we increased step size.
enum {
kWolfeI,
kWolfeII,
kNone
} last_failure_type_; // last type of step-search
// failure on this iter.
Vector<Real> H_; // Current inverse-Hessian estimate. May be computed by
// this class itself,
// or provided by user using 2nd form of SetGradientInfo().
Matrix<Real> data_; // dimension (m*2) x dim. Even rows store
// gradients y_i, odd rows store steps s_i.
Vector<Real> rho_; // dimension m; rho_(m) = 1/(y_m^T s_m), Eq. 7.17.
std::vector<Real> step_lengths_; // The step sizes we took on the last
// (up to m) iterations; these are not stored in a rotating buffer but
// are shifted by one each time (this is more convenient when we
// restart, as we keep this info past restarting).
};
/// @}
} // end namespace kaldi
#endif
<|start_filename|>tonic-suite/asr/src/matrix/kaldi-vector-inl.h<|end_filename|>
// matrix/kaldi-vector-inl.h
// Copyright 2009-2011 <NAME>; Microsoft Corporation;
// <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
// This is an internal header file, included by other library headers.
// You should not attempt to use it directly.
#ifndef KALDI_MATRIX_KALDI_VECTOR_INL_H_
#define KALDI_MATRIX_KALDI_VECTOR_INL_H_ 1
namespace kaldi {
template <typename Real>
std::ostream &operator<<(std::ostream &os, const VectorBase<Real> &rv) {
rv.Write(os, false);
return os;
}
template <typename Real>
std::istream &operator>>(std::istream &is, VectorBase<Real> &rv) {
rv.Read(is, false);
return is;
}
template <typename Real>
std::istream &operator>>(std::istream &is, Vector<Real> &rv) {
rv.Read(is, false);
return is;
}
template <>
template <>
void VectorBase<float>::AddVec(const float alpha, const VectorBase<float> &rv);
template <>
template <>
void VectorBase<double>::AddVec<double>(const double alpha,
const VectorBase<double> &rv);
} // namespace kaldi
#endif // KALDI_MATRIX_KALDI_VECTOR_INL_H_
<|start_filename|>tonic-suite/asr/src/bin/acc-lda.cc<|end_filename|>
// bin/acc-lda.cc
// Copyright 2009-2011 Microsoft Corporation, Go-Vivace Inc.
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/transition-model.h"
#include "hmm/posterior.h"
#include "transform/lda-estimate.h"
/** @brief Accumulate LDA statistics based on pdf-ids. Inputs are the
source models, that serve as the input (and may potentially contain
the current transformation), the un-transformed features and state
posterior probabilities */
int main(int argc, char *argv[]) {
using namespace kaldi;
typedef kaldi::int32 int32;
try {
const char *usage =
"Accumulate LDA statistics based on pdf-ids.\n"
"Usage: acc-lda [options] <transition-gmm/model> "
"<features-rspecifier> <posteriors-rspecifier> <lda-acc-out>\n"
"Typical usage:\n"
" ali-to-post ark:1.ali ark:- | lda-acc 1.mdl \"ark:splice-feats "
"scp:train.scp|\" ark:- ldaacc.1\n";
bool binary = true;
BaseFloat rand_prune = 0.0;
ParseOptions po(usage);
po.Register("binary", &binary, "Write accumulators in binary mode.");
po.Register("rand-prune", &rand_prune,
"Randomized pruning threshold for posteriors");
po.Read(argc, argv);
if (po.NumArgs() != 4) {
po.PrintUsage();
exit(1);
}
std::string model_rxfilename = po.GetArg(1);
std::string features_rspecifier = po.GetArg(2);
std::string posteriors_rspecifier = po.GetArg(3);
std::string acc_wxfilename = po.GetArg(4);
TransitionModel trans_model;
{
bool binary_read;
Input ki(model_rxfilename, &binary_read);
trans_model.Read(ki.Stream(), binary_read);
// discard rest of file.
}
LdaEstimate lda;
SequentialBaseFloatMatrixReader feature_reader(features_rspecifier);
RandomAccessPosteriorReader posterior_reader(posteriors_rspecifier);
int32 num_done = 0, num_fail = 0;
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string utt = feature_reader.Key();
if (!posterior_reader.HasKey(utt)) {
KALDI_WARN << "No posteriors for utterance " << utt;
num_fail++;
continue;
}
const Posterior &post(posterior_reader.Value(utt));
const Matrix<BaseFloat> &feats(feature_reader.Value());
if (lda.Dim() == 0) lda.Init(trans_model.NumPdfs(), feats.NumCols());
if (feats.NumRows() != static_cast<int32>(post.size())) {
KALDI_WARN << "Posterior vs. feats size mismatch " << feats.NumRows()
<< " vs. " << post.size();
num_fail++;
continue;
}
if (lda.Dim() != 0 && lda.Dim() != feats.NumCols()) {
KALDI_WARN << "Feature dimension mismatch " << lda.Dim() << " vs. "
<< feats.NumCols();
num_fail++;
continue;
}
Posterior pdf_post;
ConvertPosteriorToPdfs(trans_model, post, &pdf_post);
for (int32 i = 0; i < feats.NumRows(); i++) {
SubVector<BaseFloat> feat(feats, i);
for (size_t j = 0; j < pdf_post[i].size(); j++) {
int32 pdf_id = pdf_post[i][j].first;
BaseFloat weight = RandPrune(pdf_post[i][j].second, rand_prune);
if (weight != 0.0) {
lda.Accumulate(feat, pdf_id, weight);
}
}
}
num_done++;
if (num_done % 100 == 0)
KALDI_LOG << "Done " << num_done << " utterances.";
}
KALDI_LOG << "Done " << num_done << " files, failed for " << num_fail;
Output ko(acc_wxfilename, binary);
lda.Write(ko.Stream(), binary);
KALDI_LOG << "Written statistics.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/latbin/lattice-to-phone-lattice.cc<|end_filename|>
// latbin/lattice-to-phone-lattice.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "fstext/fstext-lib.h"
#include "lat/kaldi-lattice.h"
#include "lat/lattice-functions.h"
#include "hmm/transition-model.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
using fst::SymbolTable;
using fst::VectorFst;
using fst::StdArc;
const char *usage =
"Convert the words or transition-ids into phones, which are worked "
"out\n"
"from the transition-ids. If --replace-words=true (true by default),\n"
"replaces the words with phones, otherwise replaces the "
"transition-ids.\n"
"\n"
"Usage: lattice-to-phone-lattice [options] model lattice-rspecifier "
"lattice-wspecifier\n"
" e.g.: lattice-to-phone-lattice 1.mdl ark:1.lats ark:phones.lats\n";
ParseOptions po(usage);
bool replace_words = true;
po.Register("replace-words", &replace_words,
"If true, replace words with phones; otherwise replace "
"transition-ids with phones.");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string model_rxfilename = po.GetArg(1), lats_rspecifier = po.GetArg(2),
lats_wspecifier = po.GetArg(3);
int32 n_done = 0;
TransitionModel trans_model;
ReadKaldiObject(model_rxfilename, &trans_model);
SequentialCompactLatticeReader clat_reader(lats_rspecifier);
CompactLatticeWriter clat_writer(lats_wspecifier); // write as compact.
for (; !clat_reader.Done(); clat_reader.Next()) {
if (replace_words) {
Lattice lat;
ConvertLattice(clat_reader.Value(), &lat);
ConvertLatticeToPhones(trans_model,
&lat); // this function replaces words -> phones
CompactLattice clat;
ConvertLattice(lat, &clat);
clat_writer.Write(clat_reader.Key(), clat);
} else { // replace transition-ids with phones.
CompactLattice clat(clat_reader.Value());
ConvertCompactLatticeToPhones(trans_model, &clat);
// this function replaces transition-ids with phones. We do it in the
// CompactLattice form, in order to preserve the alignment of
// transition-id sequences/phones-sequences to words [e.g. if you just
// did lattice-align-words].
clat_writer.Write(clat_reader.Key(), clat);
}
n_done++;
}
KALDI_LOG << "Done converting " << n_done << " lattices.";
return (n_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/latbin/nbest-to-linear.cc<|end_filename|>
// latbin/nbest-to-linear.cc
// Copyright 2012 Johns Hopkins University (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "fstext/fstext-lib.h"
#include "lat/kaldi-lattice.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
using fst::SymbolTable;
using fst::VectorFst;
using fst::StdArc;
const char *usage =
"Takes as input lattices/n-bests which must be linear (single path);\n"
"convert from lattice to up to 4 archives containing transcriptions, "
"alignments,\n"
"and acoustic and LM costs (note: use ark:/dev/null for unwanted "
"outputs)\n"
"Usage: nbest-to-linear [options] <nbest-rspecifier> "
"<alignments-wspecifier> "
"[<transcriptions-wspecifier> [<lm-cost-wspecifier> "
"[<ac-cost-wspecifier>]]]\n"
" e.g.: lattice-to-nbest --n=10 ark:1.lats ark:- | \\\n"
" nbest-to-linear ark:1.lats ark,t:1.ali ark,t:1.tra\n";
ParseOptions po(usage);
po.Read(argc, argv);
if (po.NumArgs() < 2 || po.NumArgs() > 5) {
po.PrintUsage();
exit(1);
}
std::string lats_rspecifier = po.GetArg(1), ali_wspecifier = po.GetArg(2),
trans_wspecifier = po.GetOptArg(3),
lm_cost_wspecifier = po.GetOptArg(4),
ac_cost_wspecifier = po.GetOptArg(5);
SequentialLatticeReader lattice_reader(lats_rspecifier);
Int32VectorWriter ali_writer(ali_wspecifier);
Int32VectorWriter trans_writer(trans_wspecifier);
BaseFloatWriter lm_cost_writer(lm_cost_wspecifier);
BaseFloatWriter ac_cost_writer(ac_cost_wspecifier);
int32 n_done = 0, n_err = 0;
for (; !lattice_reader.Done(); lattice_reader.Next()) {
std::string key = lattice_reader.Key();
Lattice lat = lattice_reader.Value();
vector<int32> ilabels;
vector<int32> olabels;
LatticeWeight weight;
if (!GetLinearSymbolSequence(lat, &ilabels, &olabels, &weight)) {
KALDI_WARN
<< "Lattice/nbest for key " << key
<< " had wrong format: "
"note, this program expects input with one path, e.g. from "
"lattice-to-nbest.";
n_err++;
} else {
if (ali_wspecifier != "") ali_writer.Write(key, ilabels);
if (trans_wspecifier != "") trans_writer.Write(key, olabels);
if (lm_cost_wspecifier != "")
lm_cost_writer.Write(key, weight.Value1());
if (ac_cost_wspecifier != "")
ac_cost_writer.Write(key, weight.Value2());
n_done++;
}
}
KALDI_LOG << "Done " << n_done << " n-best entries, " << n_err
<< " had errors.";
return (n_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/gmm/mle-diag-gmm-test.cc<|end_filename|>
// gmm/mle-diag-gmm-test.cc
// Copyright 2009-2011 <NAME>; <NAME>; Saarland University;
// Microsoft Corporation; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "gmm/diag-gmm.h"
#include "gmm/diag-gmm-normal.h"
#include "gmm/mle-diag-gmm.h"
#include "util/kaldi-io.h"
using namespace kaldi;
void TestComponentAcc(const DiagGmm &gmm, const Matrix<BaseFloat> &feats) {
MleDiagGmmOptions config;
AccumDiagGmm est_atonce; // updates all components
AccumDiagGmm est_compwise; // updates single components
// Initialize estimators
est_atonce.Resize(gmm.NumGauss(), gmm.Dim(), kGmmAll);
est_atonce.SetZero(kGmmAll);
est_compwise.Resize(gmm.NumGauss(), gmm.Dim(), kGmmAll);
est_compwise.SetZero(kGmmAll);
// accumulate estimators
for (int32 i = 0; i < feats.NumRows(); i++) {
est_atonce.AccumulateFromDiag(gmm, feats.Row(i), 1.0F);
Vector<BaseFloat> post(gmm.NumGauss());
gmm.ComponentPosteriors(feats.Row(i), &post);
for (int32 m = 0; m < gmm.NumGauss(); m++) {
est_compwise.AccumulateForComponent(feats.Row(i), m, post(m));
}
}
DiagGmm gmm_atonce; // model with all components accumulated together
DiagGmm gmm_compwise; // model with each component accumulated separately
gmm_atonce.Resize(gmm.NumGauss(), gmm.Dim());
gmm_compwise.Resize(gmm.NumGauss(), gmm.Dim());
MleDiagGmmUpdate(config, est_atonce, kGmmAll, &gmm_atonce, NULL, NULL);
MleDiagGmmUpdate(config, est_compwise, kGmmAll, &gmm_compwise, NULL, NULL);
// the two ways of updating should result in the same model
double loglike0 = 0.0;
double loglike1 = 0.0;
double loglike2 = 0.0;
for (int32 i = 0; i < feats.NumRows(); i++) {
loglike0 += static_cast<double>(gmm.LogLikelihood(feats.Row(i)));
loglike1 += static_cast<double>(gmm_atonce.LogLikelihood(feats.Row(i)));
loglike2 += static_cast<double>(gmm_compwise.LogLikelihood(feats.Row(i)));
}
std::cout << "Per-frame log-likelihood before update = "
<< (loglike0 / feats.NumRows()) << '\n';
std::cout << "Per-frame log-likelihood (accumulating at once) = "
<< (loglike1 / feats.NumRows()) << '\n';
std::cout << "Per-frame log-likelihood (accumulating component-wise) = "
<< (loglike2 / feats.NumRows()) << '\n';
AssertEqual(loglike1, loglike2, 1.0e-6);
if (est_atonce.NumGauss() != gmm.NumGauss()) {
KALDI_WARN << "Unable to pass test_update_flags() test because of "
"component removal during Update() call (this is normal)";
return;
} else {
AssertGeq(loglike1, loglike0, 1.0e-6);
AssertGeq(loglike2, loglike0, 1.0e-6);
}
}
void test_flags_driven_update(const DiagGmm &gmm,
const Matrix<BaseFloat> &feats,
GmmFlagsType flags) {
MleDiagGmmOptions config;
AccumDiagGmm est_gmm_allp; // updates all params
// let's trust that all-params update works
AccumDiagGmm est_gmm_somep; // updates params indicated by flags
// warm-up estimators
est_gmm_allp.Resize(gmm.NumGauss(), gmm.Dim(), kGmmAll);
est_gmm_allp.SetZero(kGmmAll);
est_gmm_somep.Resize(gmm.NumGauss(), gmm.Dim(), flags);
est_gmm_somep.SetZero(flags);
// accumulate estimators
for (int32 i = 0; i < feats.NumRows(); i++) {
est_gmm_allp.AccumulateFromDiag(gmm, feats.Row(i), 1.0F);
est_gmm_somep.AccumulateFromDiag(gmm, feats.Row(i), 1.0F);
}
DiagGmm gmm_all_update; // model with all params updated
DiagGmm gmm_some_update; // model with some params updated
gmm_all_update.CopyFromDiagGmm(gmm); // init with orig. model
gmm_some_update.CopyFromDiagGmm(gmm); // init with orig. model
MleDiagGmmUpdate(config, est_gmm_allp, kGmmAll, &gmm_all_update, NULL, NULL);
MleDiagGmmUpdate(config, est_gmm_somep, flags, &gmm_some_update, NULL, NULL);
if (est_gmm_allp.NumGauss() != gmm.NumGauss()) {
KALDI_WARN << "Unable to pass test_update_flags() test because of "
"component removal during Update() call (this is normal)";
return;
}
// now back-off the gmm_all_update params that were not updated
// in gmm_some_update to orig.
if (~flags & kGmmWeights) gmm_all_update.SetWeights(gmm.weights());
if (~flags & kGmmMeans) {
Matrix<BaseFloat> means(gmm.NumGauss(), gmm.Dim());
gmm.GetMeans(&means);
gmm_all_update.SetMeans(means);
}
if (~flags & kGmmVariances) {
Matrix<BaseFloat> vars(gmm.NumGauss(), gmm.Dim());
gmm.GetVars(&vars);
vars.InvertElements();
gmm_all_update.SetInvVars(vars);
}
gmm_all_update.ComputeGconsts();
// now both models gmm_all_update, gmm_all_update have the same params updated
// compute loglike for models for check
double loglike0 = 0.0;
double loglike1 = 0.0;
double loglike2 = 0.0;
for (int32 i = 0; i < feats.NumRows(); i++) {
loglike0 += static_cast<double>(gmm.LogLikelihood(feats.Row(i)));
loglike1 += static_cast<double>(gmm_all_update.LogLikelihood(feats.Row(i)));
loglike2 +=
static_cast<double>(gmm_some_update.LogLikelihood(feats.Row(i)));
}
if ((flags & kGmmVariances) && !(flags & kGmmMeans))
return; // Don't run the test as the variance update gives a different
// answer if you don't update the mean.
AssertEqual(loglike1, loglike2, 1.0e-6);
}
void test_io(const DiagGmm &gmm, const AccumDiagGmm &est_gmm, bool binary,
const Matrix<BaseFloat> &feats) {
std::cout << "Testing I/O, binary = " << binary << '\n';
est_gmm.Write(Output("tmp_stats", binary).Stream(), binary);
bool binary_in;
AccumDiagGmm est_gmm2;
est_gmm2.Resize(est_gmm.NumGauss(), est_gmm.Dim(), kGmmAll);
Input ki("tmp_stats", &binary_in);
est_gmm2.Read(ki.Stream(), binary_in, false); // not adding
Input ki2("tmp_stats", &binary_in);
est_gmm2.Read(ki2.Stream(), binary_in, true); // adding
est_gmm2.Scale(0.5, kGmmAll);
// 0.5 -> make it same as what it would have been if we read just once.
// [may affect it due to removal of components with small counts].
MleDiagGmmOptions config;
DiagGmm gmm1;
DiagGmm gmm2;
gmm1.CopyFromDiagGmm(gmm);
gmm2.CopyFromDiagGmm(gmm);
MleDiagGmmUpdate(config, est_gmm, est_gmm.Flags(), &gmm1, NULL, NULL);
MleDiagGmmUpdate(config, est_gmm2, est_gmm2.Flags(), &gmm2, NULL, NULL);
BaseFloat loglike1 = 0.0;
BaseFloat loglike2 = 0.0;
for (int32 i = 0; i < feats.NumRows(); i++) {
loglike1 += gmm1.LogLikelihood(feats.Row(i));
loglike2 += gmm2.LogLikelihood(feats.Row(i));
}
AssertEqual(loglike1, loglike2, 1.0e-6);
unlink("tmp_stats");
}
void UnitTestEstimateDiagGmm() {
size_t dim = 15; // dimension of the gmm
size_t nMix = 9; // number of mixtures in the data
size_t maxiterations = 20; // number of iterations for estimation
// maximum number of densities in the GMM
// larger than the number of mixtures in the data
// so that we can test the removal of unseen components
int32 maxcomponents = 10;
// generate random feature vectors
Matrix<BaseFloat> means_f(nMix, dim), vars_f(nMix, dim);
// first, generate random mean and variance vectors
for (size_t m = 0; m < nMix; m++) {
for (size_t d = 0; d < dim; d++) {
means_f(m, d) = kaldi::RandGauss() * 100.0F;
vars_f(m, d) = exp(kaldi::RandGauss()) * 1000.0F + 1.0F;
}
// std::cout << "Gauss " << m << ": Mean = " << means_f.Row(m) << '\n'
// << "Vars = " << vars_f.Row(m) << '\n';
}
// second, generate 1000 feature vectors for each of the mixture components
size_t counter = 0, multiple = 200;
Matrix<BaseFloat> feats(nMix * multiple, dim);
for (size_t m = 0; m < nMix; m++) {
for (size_t i = 0; i < multiple; i++) {
for (size_t d = 0; d < dim; d++) {
feats(counter, d) =
means_f(m, d) + kaldi::RandGauss() * std::sqrt(vars_f(m, d));
}
counter++;
}
}
// Compute the global mean and variance
Vector<BaseFloat> mean_acc(dim);
Vector<BaseFloat> var_acc(dim);
Vector<BaseFloat> featvec(dim);
for (size_t i = 0; i < counter; i++) {
featvec.CopyRowFromMat(feats, i);
mean_acc.AddVec(1.0, featvec);
featvec.ApplyPow(2.0);
var_acc.AddVec(1.0, featvec);
}
mean_acc.Scale(1.0F / counter);
var_acc.Scale(1.0F / counter);
var_acc.AddVec2(-1.0, mean_acc);
// std::cout << "Mean acc = " << mean_acc << '\n' << "Var acc = "
// << var_acc << '\n';
// write the feature vectors to a file
// std::ofstream of("tmpfeats");
// of.precision(10);
// of << feats;
// of.close();
// now generate randomly initial values for the GMM
Vector<BaseFloat> weights(1);
Matrix<BaseFloat> means(1, dim), vars(1, dim), invvars(1, dim);
for (size_t d = 0; d < dim; d++) {
means(0, d) = kaldi::RandGauss() * 100.0F;
vars(0, d) = exp(kaldi::RandGauss()) * 10.0F + 1e-5F;
}
weights(0) = 1.0F;
invvars.CopyFromMat(vars);
invvars.InvertElements();
// new GMM
DiagGmm *gmm = new DiagGmm();
gmm->Resize(1, dim);
gmm->SetWeights(weights);
gmm->SetInvVarsAndMeans(invvars, means);
gmm->ComputeGconsts();
{
KALDI_LOG << "Testing natural<>normal conversion";
DiagGmmNormal ngmm(*gmm);
DiagGmm rgmm;
rgmm.Resize(1, dim);
ngmm.CopyToDiagGmm(&rgmm);
// check contents
KALDI_ASSERT(ApproxEqual(weights(0), 1.0F, 1e-6));
KALDI_ASSERT(ApproxEqual(gmm->weights()(0), rgmm.weights()(0), 1e-6));
for (int32 d = 0; d < dim; d++) {
KALDI_ASSERT(ApproxEqual(means.Row(0)(d), ngmm.means_.Row(0)(d), 1e-6));
KALDI_ASSERT(
ApproxEqual(1. / invvars.Row(0)(d), ngmm.vars_.Row(0)(d), 1e-6));
KALDI_ASSERT(ApproxEqual(gmm->means_invvars().Row(0)(d),
rgmm.means_invvars().Row(0)(d), 1e-6));
KALDI_ASSERT(ApproxEqual(gmm->inv_vars().Row(0)(d),
rgmm.inv_vars().Row(0)(d), 1e-6));
}
KALDI_LOG << "OK";
}
AccumDiagGmm est_gmm;
// var_acc.Scale(0.1);
// est_gmm.config_.p_variance_floor_vector = &var_acc;
MleDiagGmmOptions config;
config.min_variance = 0.01;
GmmFlagsType flags = kGmmAll; // Should later try reducing this.
est_gmm.Resize(gmm->NumGauss(), gmm->Dim(), flags);
// iterate
size_t iteration = 0;
float lastloglike = 0.0;
int32 lastloglike_nM = 0;
while (iteration < maxiterations) {
Vector<BaseFloat> featvec(dim);
est_gmm.Resize(gmm->NumGauss(), gmm->Dim(), flags);
est_gmm.SetZero(flags);
double loglike = 0.0;
for (size_t i = 0; i < counter; i++) {
featvec.CopyRowFromMat(feats, i);
loglike +=
static_cast<double>(est_gmm.AccumulateFromDiag(*gmm, featvec, 1.0F));
}
std::cout << "Loglikelihood before iteration " << iteration << " : "
<< std::scientific << loglike
<< " number of components: " << gmm->NumGauss() << '\n';
// every 5th iteration check loglike change and update lastloglike
if (iteration % 5 == 0) {
// likelihood should be increasing on the long term
if ((iteration > 0) && (gmm->NumGauss() >= lastloglike_nM)) {
KALDI_ASSERT(loglike - lastloglike >= -1.0);
}
lastloglike = loglike;
lastloglike_nM = gmm->NumGauss();
}
// binary write
est_gmm.Write(Output("tmp_stats", true).Stream(), true);
// binary read
bool binary_in;
Input ki("tmp_stats", &binary_in);
est_gmm.Read(ki.Stream(), binary_in, false); // false = not adding.
BaseFloat obj, count;
MleDiagGmmUpdate(config, est_gmm, flags, gmm, &obj, &count);
KALDI_LOG << "ML objective function change = " << (obj / count)
<< " per frame, over " << (count) << " frames.";
if ((iteration % 3 == 1) && (gmm->NumGauss() * 2 <= maxcomponents)) {
gmm->Split(gmm->NumGauss() * 2, 0.001);
}
if (iteration == 5) { // run following tests with not too overfitted model
std::cout << "Testing flags-driven updates" << '\n';
test_flags_driven_update(*gmm, feats, kGmmAll);
test_flags_driven_update(*gmm, feats, kGmmWeights);
test_flags_driven_update(*gmm, feats, kGmmMeans);
test_flags_driven_update(*gmm, feats, kGmmVariances);
test_flags_driven_update(*gmm, feats, kGmmWeights | kGmmMeans);
std::cout << "Testing component-wise accumulation" << '\n';
TestComponentAcc(*gmm, feats);
}
iteration++;
}
{ // I/O tests
GmmFlagsType flags_all = kGmmAll;
est_gmm.Resize(gmm->NumGauss(), gmm->Dim(), flags_all);
est_gmm.SetZero(flags_all);
float loglike = 0.0;
for (size_t i = 0; i < counter; i++) {
loglike += est_gmm.AccumulateFromDiag(*gmm, feats.Row(i), 1.0F);
}
test_io(*gmm, est_gmm, false, feats); // ASCII mode
test_io(*gmm, est_gmm, true, feats); // Binary mode
}
{ // Test multi-threaded update.
GmmFlagsType flags_all = kGmmAll;
est_gmm.Resize(gmm->NumGauss(), gmm->Dim(), flags_all);
est_gmm.SetZero(flags_all);
Vector<BaseFloat> weights(counter);
for (size_t i = 0; i < counter; i++) weights(i) = 0.5 + 0.1 * (Rand() % 10);
float loglike = 0.0;
for (size_t i = 0; i < counter; i++) {
loglike += weights(i) *
est_gmm.AccumulateFromDiag(*gmm, feats.Row(i), weights(i));
}
AccumDiagGmm est_gmm2(*gmm, flags_all);
int32 num_threads = 2;
float loglike2 = est_gmm2.AccumulateFromDiagMultiThreaded(
*gmm, feats, weights, num_threads);
AssertEqual(loglike, loglike2);
est_gmm.AssertEqual(est_gmm2);
}
delete gmm;
unlink("tmp_stats");
}
int main() {
// repeat the test five times
for (int i = 0; i < 2; i++) UnitTestEstimateDiagGmm();
std::cout << "Test OK.\n";
}
<|start_filename|>tonic-suite/asr/src/featbin/interpolate-pitch.cc<|end_filename|>
// featbin/interpolate-pitch.cc
// Copyright 2013 <NAME>
// Johns Hopkins University (author: <NAME>)
//
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
namespace kaldi {
struct PitchInterpolatorOptions {
BaseFloat pitch_interval; // Discretization interval [affects efficiency]
BaseFloat
interpolator_factor; // This affects the tendency of the algorithm to
// follow the observed pitch contours versus pick its own path which will tend
// to be closer to a straight line.
BaseFloat max_voicing_prob; // p(voicing) we use at the end of the range when
// it was observed
// at one. (probably 0.9 is suitable; allows to not follow observed pitch even
// if p(voicing)=1.
BaseFloat max_pitch_change_per_frame;
PitchInterpolatorOptions()
: pitch_interval(4.0),
interpolator_factor(1.0e-05),
max_voicing_prob(0.9),
max_pitch_change_per_frame(10.0) {}
void Register(OptionsItf *po) {
po->Register("pitch-interval", &pitch_interval,
"Frequency interval in Hz, used "
"for the pitch interpolation and smoothing algorithm.");
po->Register(
"interpolator-factor", &interpolator_factor,
"Factor affecting the "
"interpolation algorithm; setting it closer to zero will cause "
"it to follow the measured pitch more faithfully but less "
"smoothly");
po->Register(
"max-voicing-prob", &max_voicing_prob,
"Probability of voicing the "
"algorithm uses as the observed p(voicing) approaches 1; having "
"value <1 allows it to interpolate even if p(voicing) = 1");
po->Register(
"max-pitch-change-per-frame", &max_pitch_change_per_frame,
"This value should be set large enough to no longer affect the "
"results, but the larger it is the slower the algorithm will be.");
}
void Check() const {
KALDI_ASSERT(pitch_interval > 0.0 && pitch_interval < 20.0 &&
interpolator_factor > 0.0 && interpolator_factor < 1.0 &&
max_voicing_prob <= 1.0 && max_voicing_prob >= 0.5 &&
max_pitch_change_per_frame > 2.0 * pitch_interval);
}
};
struct PitchInterpolatorStats {
int64 num_frames_tot;
int64 num_frames_zero; // #frames that were zero in original pitch.
int64 num_frames_changed; // #frames that were not zero originally, but
// which the algorithm changed.
PitchInterpolatorStats()
: num_frames_tot(0), num_frames_zero(0), num_frames_changed(0) {}
void Print() {
BaseFloat zero_percent = num_frames_zero * 100.0 / num_frames_tot,
changed_percent = num_frames_changed * 100.0 / num_frames_tot;
KALDI_LOG << "Over " << num_frames_tot << " frames, " << zero_percent
<< "% were zero at input, and " << changed_percent
<< "% were not zero but were changed.";
}
};
class PitchInterpolator {
public:
PitchInterpolator(const PitchInterpolatorOptions &opts,
Matrix<BaseFloat> *mat, PitchInterpolatorStats *stats)
: opts_(opts) {
opts.Check();
InitValues(*mat);
Forward();
Backtrace(mat, stats);
}
private:
void InitValues(const Matrix<BaseFloat> &mat) {
BaseFloat pitch_interval = opts_.pitch_interval;
num_frames_ = mat.NumRows();
KALDI_ASSERT(mat.NumCols() == 2);
BaseFloat min_pitch = 1.0e+10, max_pitch = 0.0;
pitch_.resize(num_frames_);
p_voicing_.resize(num_frames_);
for (int32 f = 0; f < num_frames_; f++) {
BaseFloat p_voicing = mat(f, 0), pitch = mat(f, 1);
p_voicing *= opts_.max_voicing_prob;
if (pitch == 0.0) {
p_voicing = 0.0; // complete uncertainty about real pitch.
} else {
if (pitch < min_pitch) min_pitch = pitch;
if (pitch > max_pitch) max_pitch = pitch;
}
p_voicing_[f] = p_voicing;
}
if (max_pitch == 0.0) { // No voiced frames at all.
min_pitch = 100.0;
max_pitch = 100.0;
}
if (max_pitch <= min_pitch + (2.0 * pitch_interval)) {
max_pitch = min_pitch + 2.0 * pitch_interval;
} // avoid crashes.
// Note: the + 2 here is for edge effects.
num_pitches_ = floor((max_pitch - min_pitch) / pitch_interval + 0.5) + 2;
KALDI_ASSERT(num_pitches_ >= 3);
min_pitch_.resize(num_frames_);
for (int32 f = 0; f < num_frames_; f++) {
min_pitch_[f] = min_pitch - pitch_interval * RandUniform(); // bottom of
// discretization range for each frame is randomly different.
BaseFloat pitch = mat(f, 1);
if (pitch == 0.0) {
pitch_[f] =
0; // This will actually be a don't-care value; we just put in
// some value that won't crash the algorithm.
} else {
int32 int_pitch = floor((pitch - min_pitch_[f]) / pitch_interval + 0.5);
KALDI_ASSERT(int_pitch >= 0 && int_pitch < num_pitches_);
pitch_[f] = int_pitch;
}
}
}
void MultiplyObsProb(int32 t) {
// For the forward computation:
// Multiplies the observation probabilities into alpha at time t.
// constant_prob is the constant part that does not depend on the pitch
// value:
BaseFloat constant_prob = (1.0 - p_voicing_[t]) * opts_.interpolator_factor,
specified_prob = p_voicing_[t] + constant_prob;
// specified_prob adds in the extra probability mass at the observed pitch
// value.
BaseFloat log_constant_prob = log(constant_prob),
log_ratio = log(specified_prob / constant_prob);
log_alpha_.Add(log_constant_prob); // add log_constant_prob to all pitches
// at this time.
log_alpha_(pitch_[t]) += log_ratio; // corrects this to be like adding
// log(specified_prob) to the observed pitch at this time. Note: if
// pitch_[t] == 0,
// this won't have any effect because log_ratio will be zero too.
Vector<BaseFloat> temp_rand(num_pitches_);
temp_rand.SetRandn(); // Set to Gaussian noise. Type of noise doesn't
// really matter.
log_alpha_.AddVec(0.01,
temp_rand); // We add a small amount of noise to the
// observation probabilities; this has the effect of breaking symmetries in
// a more random way to overcome certain weirdnesses that could otherwise
// happen due to the discretization.
}
// This function updates log_alpha_, as a function of prev_log_alpha_; it also
// updates back_pointers_[t];
void ComputeTransitionProb(int32 t) {
KALDI_ASSERT(t > 0);
BaseFloat pitch_interval = opts_.pitch_interval;
back_pointers_[t].resize(num_pitches_);
// Transition probability between pitch p and p' on times t-1 and t
// is (p - p')^2, with the pitch measured in Hz. We're doing Viterbi,
// so always pick the max over the previous frame's t.
KALDI_ASSERT(t > 0 && t < num_frames_);
int32 K = floor(opts_.max_pitch_change_per_frame / pitch_interval + 0.5);
// K is max #bins we can move; a kind of pruning, for speed.
for (int32 p = 0; p < num_pitches_; p++) {
int32 min_prev_p = p - K, max_prev_p = p + K;
if (min_prev_p < 0) min_prev_p = 0;
if (max_prev_p >= num_pitches_) max_prev_p = num_pitches_ - 1;
BaseFloat best_logprob = -1.0e+10;
int32 best_prev_p = -1;
for (int32 prev_p = min_prev_p; prev_p <= max_prev_p; prev_p++) {
BaseFloat delta_pitch = (min_pitch_[t - 1] + prev_p * pitch_interval) -
(min_pitch_[t] + p * pitch_interval);
BaseFloat this_logprob =
prev_log_alpha_(prev_p) - 0.5 * delta_pitch * delta_pitch;
if (this_logprob > best_logprob) {
best_logprob = this_logprob;
best_prev_p = prev_p;
}
}
back_pointers_[t][p] = best_prev_p;
log_alpha_(p) = best_logprob;
}
}
void Forward() {
// Viterbi in a discrete model of the pitch, in which the observation
// probability of a pitch is p(voicing) at the observed pitch, and
// interpolator_factor_ * 1.0 - p(voicing) at all other pitches. the
// transition log-probability is -0.5 times the squared difference in pitch.
// [We measure this in Hz, not in integer values, to make it more invariant
// to the discretization interval].
back_pointers_.resize(num_frames_);
log_alpha_.Resize(num_pitches_);
prev_log_alpha_.Resize(num_pitches_);
log_alpha_.Set(0.0);
MultiplyObsProb(0);
for (int32 t = 1; t < num_frames_; t++) {
log_alpha_.Swap(&prev_log_alpha_);
ComputeTransitionProb(t);
MultiplyObsProb(t);
}
}
void Backtrace(Matrix<BaseFloat> *mat, PitchInterpolatorStats *stats) {
const BaseFloat pitch_interval = opts_.pitch_interval;
BaseFloat *p_begin = log_alpha_.Data(), *p_end = p_begin + num_pitches_,
*p_best = std::max_element(p_begin, p_end);
std::vector<int32> best_pitch(num_frames_);
int32 best_p = p_best - p_begin; // best discrete pitch p at time T-1.
for (int32 t = num_frames_ - 1; t >= 0; t--) {
{ // Update stats:
stats->num_frames_tot++;
if (pitch_[t] == 0)
stats->num_frames_zero++;
else if (best_p != pitch_[t])
stats->num_frames_changed++;
}
BaseFloat pitch = min_pitch_[t] + pitch_interval * best_p;
(*mat)(t, 1) = pitch;
KALDI_ASSERT(best_p >= 0 && best_p < num_pitches_);
if (t > 0) best_p = back_pointers_[t][best_p];
}
}
const PitchInterpolatorOptions &opts_;
std::vector<BaseFloat> min_pitch_; // Bottom of discretization range...
// previously this was a BaseFloat, but for better pseudo-randomization we
// have a slightly perturbed value for each frame now, so it's a vector.
int32 num_frames_; // number of frames;
int32 num_pitches_; // Number of discrete pitch intervals.
std::vector<int32> pitch_; // observed pitch, discretized; [it's don't-care
// if algorithm had no
// observation (0)]
std::vector<BaseFloat>
p_voicing_; // p(voicing) times max_voicing_prob_; or zero if
// pitch was 0.0 for this frame.
std::vector<std::vector<int32> >
back_pointers_; // at each t, points to best pitch
// on time t-1.
Vector<BaseFloat> log_alpha_;
Vector<BaseFloat> prev_log_alpha_;
};
// Linear Interpolation for places where the pitch value is zero
void LinearlyInterpolatePitch(Matrix<BaseFloat> *mat) {
int32 num_frames = mat->NumRows();
int i = 0;
Matrix<BaseFloat> &features = *mat;
while (i < num_frames) {
if (features(i, 1) == 0.0) {
int start = i - 1;
int end = i;
while ((features(end, 1)) == 0.0 && (end < num_frames)) end++;
BaseFloat end_value = -1, start_value = -1;
if (end < num_frames) end_value = features(end, 1);
if (start > 0) start_value = features(start, 1);
if (start_value < 0 && end_value < 0) {
// the whole file is unvoiced -> just put an arbitrary value,
// it will all be normalized out anyway.
start_value = 1.0;
end_value = 1.0;
}
// If we don't have a value for one end of the range, i.e. at the start or
// end, set it to 0.9 times the pitch value that we have at the other end
// of the range. The reason we don't set it to that value itself, is that
// then over this segment we would have zero time-derivative, so if we
// took time derivatives we would have an artificial spike at zero.
if (start_value < 0.0) start_value = 0.9 * end_value;
if (end_value < 0.0) end_value = 0.9 * start_value;
for (int k = start + 1; k < end; k++)
features(k, 1) =
start_value +
(end_value - start_value) / (end - start) * (k - start);
i = end;
}
i++;
}
}
} // namespace kaldi
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"This is a rather special-purpose program which processes "
"2-dimensional\n"
"features consisting of (prob-of-voicing, pitch). By default we do "
"model-based\n"
"pitch smoothing and interpolation (see code), or if "
"--linear-interpolation=true,\n"
"just linear interpolation across gaps where pitch == 0 (not "
"predicted).\n"
"Usage: interpolate-pitch [options...] <feats-rspecifier> "
"<feats-wspecifier>\n";
// construct all the global objects
ParseOptions po(usage);
bool linear_interpolation = false;
PitchInterpolatorOptions interpolate_opts;
po.Register("linear-interpolation", &linear_interpolation,
"If true, just do simple linear "
"interpolation across gaps (else, model-based)");
interpolate_opts.Register(&po);
// parse options (+filling the registered variables)
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
std::string input_rspecifier = po.GetArg(1);
std::string output_wspecifier = po.GetArg(2);
SequentialBaseFloatMatrixReader reader(input_rspecifier);
BaseFloatMatrixWriter kaldi_writer; // typedef to TableWriter<something>.
if (!kaldi_writer.Open(output_wspecifier))
KALDI_ERR << "Could not initialize output with wspecifier "
<< output_wspecifier;
int32 num_done = 0, num_err = 0;
PitchInterpolatorStats stats;
for (; !reader.Done(); reader.Next()) {
std::string utt = reader.Key();
Matrix<BaseFloat> features = reader.Value();
int num_frames = features.NumRows();
if (num_frames == 0 && features.NumCols() != 2) {
KALDI_WARN << "Feature file has bad size " << features.NumRows()
<< " by " << features.NumCols();
num_err++;
continue;
}
if (linear_interpolation)
LinearlyInterpolatePitch(&features);
else {
// work happens in constructor of this class.
PitchInterpolator pi(interpolate_opts, &features, &stats);
}
kaldi_writer.Write(utt, features);
num_done++;
if (num_done % 10 == 0)
KALDI_LOG << "Processed " << num_done << " utterances";
KALDI_VLOG(2) << "Processed features for key " << utt;
}
if (!linear_interpolation) stats.Print();
KALDI_LOG << "Done " << num_done << " utterances, " << num_err
<< " with errors.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-nnet-test.cc<|end_filename|>
// nnet2/nnet-nnet-test.cc
// Copyright 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet2/nnet-nnet.h"
namespace kaldi {
namespace nnet2 {
void UnitTestNnet() {
int32 input_dim = 40, output_dim = 500;
Nnet *nnet = GenRandomNnet(input_dim, output_dim);
bool binary = (rand() % 2 == 0);
std::ostringstream os;
nnet->Write(os, binary);
Nnet nnet2;
std::istringstream is(os.str());
nnet2.Read(is, binary);
std::ostringstream os2;
nnet2.Write(os2, binary);
KALDI_ASSERT(os2.str() == os.str());
delete nnet;
}
} // namespace nnet2
} // namespace kaldi
#include "matrix/matrix-functions.h"
int main() {
using namespace kaldi;
using namespace kaldi::nnet2;
UnitTestNnet();
return 0;
}
<|start_filename|>tonic-suite/asr/src/nnet2bin/nnet-train-discriminative-parallel.cc<|end_filename|>
// nnet2bin/nnet-train-discriminative-parallel.cc
// Copyright 2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/transition-model.h"
#include "nnet2/am-nnet.h"
#include "nnet2/nnet-compute-discriminative-parallel.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace kaldi::nnet2;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
const char *usage =
"Train the neural network parameters with a discriminative objective\n"
"function (MMI, SMBR or MPFE). This uses training examples prepared "
"with\n"
"nnet-get-egs-discriminative\n"
"This version uses multiple threads (but no GPU)"
"\n"
"Usage: nnet-train-discriminative-parallel [options] <model-in> "
"<training-examples-in> <model-out>\n"
"e.g.:\n"
"nnet-train-discriminative-parallel --num-threads=8 1.nnet ark:1.degs "
"2.nnet\n";
bool binary_write = true;
std::string use_gpu = "yes";
int32 num_threads = 1;
NnetDiscriminativeUpdateOptions update_opts;
ParseOptions po(usage);
po.Register("binary", &binary_write, "Write output in binary mode");
po.Register("num-threads", &num_threads, "Number of threads to use");
update_opts.Register(&po);
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string nnet_rxfilename = po.GetArg(1),
examples_rspecifier = po.GetArg(2),
nnet_wxfilename = po.GetArg(3);
TransitionModel trans_model;
AmNnet am_nnet;
{
bool binary_read;
Input ki(nnet_rxfilename, &binary_read);
trans_model.Read(ki.Stream(), binary_read);
am_nnet.Read(ki.Stream(), binary_read);
}
NnetDiscriminativeStats stats;
SequentialDiscriminativeNnetExampleReader example_reader(
examples_rspecifier);
NnetDiscriminativeUpdateParallel(am_nnet, trans_model, update_opts,
num_threads, &example_reader,
&(am_nnet.GetNnet()), &stats);
{
Output ko(nnet_wxfilename, binary_write);
trans_model.Write(ko.Stream(), binary_write);
am_nnet.Write(ko.Stream(), binary_write);
}
return (stats.tot_t == 0 ? 1 : 0);
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/feat/online-feature.h<|end_filename|>
// feat/online-feature.h
// Copyright 2013 Johns Hopkins University (author: <NAME>)
// 2014 <NAME>, <NAME>,
// <NAME>, <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_ONLINE2_ONLINE_FEATURE_H_
#define KALDI_ONLINE2_ONLINE_FEATURE_H_
#include <string>
#include <vector>
#include <deque>
#include "matrix/matrix-lib.h"
#include "util/common-utils.h"
#include "base/kaldi-error.h"
#include "feat/feature-functions.h"
#include "feat/feature-mfcc.h"
#include "feat/feature-plp.h"
#include "feat/feature-fbank.h"
#include "itf/online-feature-itf.h"
namespace kaldi {
/// @addtogroup onlinefeat OnlineFeatureExtraction
/// @{
template <class C>
class OnlineGenericBaseFeature : public OnlineBaseFeature {
public:
//
// First, functions that are present in the interface:
//
virtual int32 Dim() const { return mfcc_or_plp_.Dim(); }
// Note: this will only ever return true if you call InputFinished(), which
// isn't really necessary to do unless you want to make sure to flush out the
// last few frames of delta or LDA features to exactly match a non-online
// decode of some data.
virtual bool IsLastFrame(int32 frame) const;
virtual int32 NumFramesReady() const { return num_frames_; }
virtual void GetFrame(int32 frame, VectorBase<BaseFloat> *feat);
//
// Next, functions that are not in the interface.
//
explicit OnlineGenericBaseFeature(const typename C::Options &opts);
// This would be called from the application, when you get
// more wave data. Note: the sampling_rate is only provided so
// the code can assert that it matches the sampling rate
// expected in the options.
virtual void AcceptWaveform(BaseFloat sampling_rate,
const VectorBase<BaseFloat> &waveform);
// InputFinished() tells the class you won't be providing any
// more waveform. This will help flush out the last few frames
// of delta or LDA features.
virtual void InputFinished() { input_finished_ = true; }
private:
C mfcc_or_plp_; // class that does the MFCC or PLP computation
// features_ is the Mfcc or Plp or Fbank features that we have already
// computed.
Matrix<BaseFloat> features_;
// True if the user has called "InputFinished()"
bool input_finished_;
// num_frames_ is the number of frames of MFCC features we have
// already computed. It may be less than the size of features_,
// because when we resize that matrix we leave some extra room,
// so that we don't spend too much time resizing.
int32 num_frames_;
// The sampling frequency, extracted from the config. Should
// be identical to the waveform supplied.
BaseFloat sampling_frequency_;
// waveform_remainder_ is a short piece of waveform that we may need to keep
// after extracting all the whole frames we can (whatever length of feature
// will be required for the next phase of computation).
Vector<BaseFloat> waveform_remainder_;
};
typedef OnlineGenericBaseFeature<Mfcc> OnlineMfcc;
typedef OnlineGenericBaseFeature<Plp> OnlinePlp;
typedef OnlineGenericBaseFeature<Fbank> OnlineFbank;
/// This class takes a Matrix<BaseFloat> and wraps it as an
/// OnlineFeatureInterface: this can be useful where some earlier stage of
/// feature processing has been done offline but you want to use part of the
/// online pipeline.
class OnlineMatrixFeature : public OnlineFeatureInterface {
public:
/// Caution: this class maintains the const reference from the constructor, so
/// don't let it go out of scope while this object exists.
explicit OnlineMatrixFeature(const MatrixBase<BaseFloat> &mat) : mat_(mat) {}
virtual int32 Dim() const { return mat_.NumCols(); }
virtual int32 NumFramesReady() const { return mat_.NumRows(); }
virtual void GetFrame(int32 frame, VectorBase<BaseFloat> *feat) {
feat->CopyFromVec(mat_.Row(frame));
}
virtual bool IsLastFrame(int32 frame) const {
return (frame + 1 == mat_.NumRows());
}
private:
const MatrixBase<BaseFloat> &mat_;
};
// Note the similarity with SlidingWindowCmnOptions, but there
// are also differences. One which doesn't appear in the config
// itself, because it's a difference between the setups, is that
// in OnlineCmn, we carry over data from the previous utterance,
// or, if no previous utterance is available, from global stats,
// or, if previous utterances are available but the total amount
// of data is less than prev_frames, we pad with up to "global_frames"
// frames from the global stats.
struct OnlineCmvnOptions {
int32 cmn_window;
int32 speaker_frames; // must be <= cmn_window
int32 global_frames; // must be <= speaker_frames.
bool normalize_mean; // Must be true if normalize_variance==true.
bool normalize_variance;
int32 modulus; // not configurable from command line, relates to how the
// class computes the cmvn internally. smaller->more
// time-efficient but less memory-efficient. Must be >= 1.
int32 ring_buffer_size; // not configurable from command line; size of ring
// buffer used for caching CMVN stats.
std::string
skip_dims; // Colon-separated list of dimensions to skip normalization
// of, e.g. 13:14:15.
OnlineCmvnOptions()
: cmn_window(600),
speaker_frames(600),
global_frames(200),
normalize_mean(true),
normalize_variance(false),
modulus(20),
ring_buffer_size(20),
skip_dims("") {}
void Check() {
KALDI_ASSERT(speaker_frames <= cmn_window &&
global_frames <= speaker_frames && modulus > 0);
}
void Register(ParseOptions *po) {
po->Register("cmn-window", &cmn_window,
"Number of frames of sliding "
"context for cepstral mean normalization.");
po->Register("global-frames", &global_frames,
"Number of frames of "
"global-average cepstral mean normalization stats to use for "
"first utterance of a speaker");
po->Register("speaker-frames", &speaker_frames,
"Number of frames of "
"previous utterance(s) from this speaker to use in cepstral "
"mean normalization");
// we name the config string "norm-vars" for compatibility with
// ../featbin/apply-cmvn.cc
po->Register("norm-vars", &normalize_variance,
"If true, do "
"cepstral variance normalization in addition to cepstral mean "
"normalization ");
po->Register("norm-mean", &normalize_mean,
"If true, do mean normalization "
"(note: you cannot normalize the variance but not the mean)");
po->Register("skip-dims", &skip_dims,
"Dimensions to skip normalization of "
"(colon-separated list of integers)");
}
};
/** Struct OnlineCmvnState stores the state of CMVN adaptation between
utterances (but not the state of the computation within an utterance). It
stores the global CMVN stats and the stats of the current speaker (if we
have seen previous utterances for this speaker), and possibly will have a
member "frozen_state": if the user has called the function Freeze() of class
OnlineCmvn, to fix the CMVN so we can estimate fMLLR on top of the fixed
value of cmvn. If nonempty, "frozen_state" will reflect how we were
normalizing the mean and (if applicable) variance at the time when that
function was called.
*/
struct OnlineCmvnState {
// The following is the total CMVN stats for this speaker (up till now), in
// the same format.
Matrix<double> speaker_cmvn_stats;
// The following is the global CMVN stats, in the usual
// format, of dimension 2 x (dim+1), as [ sum-stats count
// sum-sqared-stats 0 ]
Matrix<double> global_cmvn_stats;
// If nonempty, contains CMVN stats representing the "frozen" state
// of CMVN that reflects how we were normalizing the data when the
// user called the Freeze() function in class OnlineCmvn.
Matrix<double> frozen_state;
OnlineCmvnState() {}
explicit OnlineCmvnState(const Matrix<double> &global_stats)
: global_cmvn_stats(global_stats) {}
// Copy constructor
OnlineCmvnState(const OnlineCmvnState &other);
// Use the default assignment operator.
};
class OnlineCmvn : public OnlineFeatureInterface {
public:
//
// First, functions that are present in the interface:
//
virtual int32 Dim() const { return src_->Dim(); }
virtual bool IsLastFrame(int32 frame) const {
return src_->IsLastFrame(frame);
}
// The online cmvn does not introduce any additional latency.
virtual int32 NumFramesReady() const { return src_->NumFramesReady(); }
virtual void GetFrame(int32 frame, VectorBase<BaseFloat> *feat);
//
// Next, functions that are not in the interface.
//
/// Initializer that sets the cmvn state.
OnlineCmvn(const OnlineCmvnOptions &opts, const OnlineCmvnState &cmvn_state,
OnlineFeatureInterface *src);
/// Initializer that does not set the cmvn state:
/// after calling this, you should call SetState().
OnlineCmvn(const OnlineCmvnOptions &opts, OnlineFeatureInterface *src);
// Outputs any state information from this utterance to "cmvn_state".
// The value of "cmvn_state" before the call does not matter: the output
// depends on the value of OnlineCmvnState the class was initialized
// with, the input feature values up to cur_frame, and the effects
// of the user possibly having called Freeze().
// If cur_frame is -1, it will just output the unmodified original
// state that was supplied to this object.
void GetState(int32 cur_frame, OnlineCmvnState *cmvn_state);
// This function can be used to modify the state of the CMVN computation
// from outside, but must only be called before you have processed any data
// (otherwise it will crash). This "state" is really just the information
// that is propagated between utterances, not the state of the computation
// inside an utterance.
void SetState(const OnlineCmvnState &cmvn_state);
// From this point it will freeze the CMN to what it would have been if
// measured at frame "cur_frame", and it will stop it from changing
// further. This also applies retroactively for this utterance, so if you
// call GetFrame() on previous frames, it will use the CMVN stats
// from cur_frame; and it applies in the future too if you then
// call OutputState() and use this state to initialize the next
// utterance's CMVN object.
void Freeze(int32 cur_frame);
virtual ~OnlineCmvn();
private:
/// Smooth the CMVN stats "stats" (which are stored in the normal format as a
/// 2 x (dim+1) matrix), by possibly adding some stats from "global_stats"
/// and/or "speaker_stats", controlled by the config. The best way to
/// understand the smoothing rule we use is just to look at the code.
static void SmoothOnlineCmvnStats(const MatrixBase<double> &speaker_stats,
const MatrixBase<double> &global_stats,
const OnlineCmvnOptions &opts,
MatrixBase<double> *stats);
/// Get the most recent cached frame of CMVN stats. [If no frames
/// were cached, sets up empty stats for frame zero and returns that].
void GetMostRecentCachedFrame(int32 frame, int32 *cached_frame,
Matrix<double> *stats);
/// Cache this frame of stats.
void CacheFrame(int32 frame, const Matrix<double> &stats);
/// Initialize ring buffer for caching stats.
inline void InitRingBufferIfNeeded();
/// Computes the raw CMVN stats for this frame, making use of (and updating if
/// necessary) the cached statistics in raw_stats_. This means the (x,
/// x^2, count) stats for the last up to opts_.cmn_window frames.
void ComputeStatsForFrame(int32 frame, MatrixBase<double> *stats);
OnlineCmvnOptions opts_;
std::vector<int32>
skip_dims_; // Skip CMVN for these dimensions. Derived from opts_.
OnlineCmvnState orig_state_; // reflects the state before we saw this
// utterance.
Matrix<double> frozen_state_; // If the user called Freeze(), this variable
// will reflect the CMVN state that we froze
// at.
// The variable below reflects the raw (count, x, x^2) statistics of the
// input, computed every opts_.modulus frames. raw_stats_[n / opts_.modulus]
// contains the (count, x, x^2) statistics for the frames from
// std::max(0, n - opts_.cmn_window) through n.
std::vector<Matrix<double> *> cached_stats_modulo_;
// the variable below is a ring-buffer of cached stats. the int32 is the
// frame index.
std::vector<std::pair<int32, Matrix<double> > > cached_stats_ring_;
OnlineFeatureInterface *src_; // Not owned here
};
struct OnlineSpliceOptions {
int32 left_context;
int32 right_context;
OnlineSpliceOptions() : left_context(4), right_context(4) {}
void Register(ParseOptions *po) {
po->Register("left-context", &left_context,
"Left-context for frame "
"splicing prior to LDA");
po->Register("right-context", &right_context,
"Right-context for frame "
"splicing prior to LDA");
}
};
class OnlineSpliceFrames : public OnlineFeatureInterface {
public:
//
// First, functions that are present in the interface:
//
virtual int32 Dim() const {
return src_->Dim() * (1 + left_context_ + right_context_);
}
virtual bool IsLastFrame(int32 frame) const {
return src_->IsLastFrame(frame);
}
virtual int32 NumFramesReady() const;
virtual void GetFrame(int32 frame, VectorBase<BaseFloat> *feat);
//
// Next, functions that are not in the interface.
//
OnlineSpliceFrames(const OnlineSpliceOptions &opts,
OnlineFeatureInterface *src)
: left_context_(opts.left_context),
right_context_(opts.right_context),
src_(src) {}
private:
int32 left_context_;
int32 right_context_;
OnlineFeatureInterface *src_; // Not owned here
};
/// This online-feature class implements any affine or linear transform.
class OnlineTransform : public OnlineFeatureInterface {
public:
//
// First, functions that are present in the interface:
//
virtual int32 Dim() const { return offset_.Dim(); }
virtual bool IsLastFrame(int32 frame) const {
return src_->IsLastFrame(frame);
}
virtual int32 NumFramesReady() const { return src_->NumFramesReady(); }
virtual void GetFrame(int32 frame, VectorBase<BaseFloat> *feat);
//
// Next, functions that are not in the interface.
//
/// The transform can be a linear transform, or an affine transform
/// where the last column is the offset.
OnlineTransform(const MatrixBase<BaseFloat> &transform,
OnlineFeatureInterface *src);
private:
OnlineFeatureInterface *src_; // Not owned here
Matrix<BaseFloat> linear_term_;
Vector<BaseFloat> offset_;
};
class OnlineDeltaFeature : public OnlineFeatureInterface {
public:
//
// First, functions that are present in the interface:
//
virtual int32 Dim() const;
virtual bool IsLastFrame(int32 frame) const {
return src_->IsLastFrame(frame);
}
virtual int32 NumFramesReady() const;
virtual void GetFrame(int32 frame, VectorBase<BaseFloat> *feat);
//
// Next, functions that are not in the interface.
//
OnlineDeltaFeature(const DeltaFeaturesOptions &opts,
OnlineFeatureInterface *src);
private:
OnlineFeatureInterface *src_; // Not owned here
DeltaFeaturesOptions opts_;
DeltaFeatures delta_features_; // This class contains just a few
// coefficients.
};
/// This feature type can be used to cache its input, to avoid
/// repetition of computation in a multi-pass decoding context.
class OnlineCacheFeature : public OnlineFeatureInterface {
public:
virtual int32 Dim() const { return src_->Dim(); }
virtual bool IsLastFrame(int32 frame) const {
return src_->IsLastFrame(frame);
}
virtual int32 NumFramesReady() const { return src_->NumFramesReady(); }
virtual void GetFrame(int32 frame, VectorBase<BaseFloat> *feat);
virtual ~OnlineCacheFeature() { ClearCache(); }
// Things that are not in the shared interface:
void ClearCache(); // this should be called if you change the underlying
// features in some way.
explicit OnlineCacheFeature(OnlineFeatureInterface *src) : src_(src) {}
private:
OnlineFeatureInterface *src_; // Not owned here
std::vector<Vector<BaseFloat> *> cache_;
};
/// This online-feature class implements combination of two feature
/// streams (such as pitch, plp) into one stream.
class OnlineAppendFeature : public OnlineFeatureInterface {
public:
virtual int32 Dim() const { return src1_->Dim() + src2_->Dim(); }
virtual bool IsLastFrame(int32 frame) const {
return (src1_->IsLastFrame(frame) || src2_->IsLastFrame(frame));
}
virtual int32 NumFramesReady() const {
return std::min(src1_->NumFramesReady(), src2_->NumFramesReady());
}
virtual void GetFrame(int32 frame, VectorBase<BaseFloat> *feat);
virtual ~OnlineAppendFeature() {}
OnlineAppendFeature(OnlineFeatureInterface *src1,
OnlineFeatureInterface *src2)
: src1_(src1), src2_(src2) {}
private:
OnlineFeatureInterface *src1_;
OnlineFeatureInterface *src2_;
};
/// @} End of "addtogroup onlinefeat"
} // namespace kaldi
#endif // KALDI_ONLINE2_ONLINE_FEATURE_H_
<|start_filename|>tonic-suite/asr/src/thread/kaldi-task-sequence-test.cc<|end_filename|>
// thread/kaldi-task-sequence-test.cc
// Copyright 2012 Johns Hopkins University (Author: <NAME>)
// <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "thread/kaldi-task-sequence.h"
namespace kaldi {
class MyTaskClass { // spins for a while, then outputs a pre-given integer.
public:
MyTaskClass(int32 i, std::vector<int32> *vec)
: done_(false), i_(i), vec_(vec) {}
void operator()() {
int32 spin = 1000000 * Rand() % 100;
for (int32 i = 0; i < spin; i++)
;
done_ = true;
}
~MyTaskClass() {
KALDI_ASSERT(done_);
vec_->push_back(i_);
}
private:
bool done_;
int32 i_;
std::vector<int32> *vec_;
};
void TestTaskSequencer() {
TaskSequencerConfig config;
config.num_threads = 1 + Rand() % 20;
if (Rand() % 2 == 1)
config.num_threads_total = config.num_threads + Rand() % config.num_threads;
int32 num_tasks = Rand() % 100;
std::vector<int32> task_output;
{
TaskSequencer<MyTaskClass> sequencer(config);
for (int32 i = 0; i < num_tasks; i++) {
sequencer.Run(new MyTaskClass(i, &task_output));
}
} // and let "sequencer" be destroyed, which waits for the last threads.
KALDI_ASSERT(task_output.size() == static_cast<size_t>(num_tasks));
for (int32 i = 0; i < num_tasks; i++) KALDI_ASSERT(task_output[i] == i);
}
} // end namespace kaldi.
int main() {
using namespace kaldi;
for (int32 i = 0; i < 1000; i++) TestTaskSequencer();
}
<|start_filename|>tonic-suite/nlp/src/SENNA_Tokenizer.cpp<|end_filename|>
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <ctype.h>
#include "SENNA_Tokenizer.h"
#include "SENNA_utils.h"
#define MAX_WORD_SIZE 256
static void tokenize_gazetteer(int **gazt_idx_, SENNA_Tokenizer *tokenizer,
SENNA_Hash *hash) {
int i, j, k;
int entity_size;
int idxhash;
char **words = tokenizer->tokens->words;
int *gazt_idx;
*gazt_idx_ = SENNA_realloc(*gazt_idx_, sizeof(int), tokenizer->max_tokens);
gazt_idx = *gazt_idx_;
for (i = 0; i < tokenizer->tokens->n; i++)
gazt_idx[i] = tokenizer->gazt_hash_no_entity_idx;
for (i = 0; i < tokenizer->tokens->n; i++) {
entity_size = 0;
for (j = 0; j < tokenizer->tokens->n - i; j++) {
int word_size = strlen(words[i + j]);
if (entity_size + word_size + 1 > tokenizer->max_entity_size) {
tokenizer->entity = SENNA_realloc(tokenizer->entity, sizeof(char),
entity_size + word_size + 1);
tokenizer->max_entity_size = entity_size + word_size + 1;
}
if (j > 0) tokenizer->entity[entity_size - 1] = ' ';
for (k = 0; k < word_size; k++)
tokenizer->entity[entity_size++] = (char)tolower(words[i + j][k]);
tokenizer->entity[entity_size++] = '\0';
idxhash = SENNA_Hash_index(hash, tokenizer->entity);
if (idxhash < 0) break;
if (SENNA_Hash_is_admissible_index(hash, idxhash)) {
for (k = 0; k <= j; k++)
gazt_idx[i + k] = tokenizer->gazt_hash_is_entity_idx;
}
}
}
}
SENNA_Tokens *SENNA_Tokenizer_tokenize(SENNA_Tokenizer *tokenizer,
const char *sentence) {
const char *sentence0n;
int offset0n;
/* first replace all numbers by '0' */
{
int sentence_size = strlen(sentence) + 1;
if (sentence_size > tokenizer->max_sentence0n_size) {
tokenizer->max_sentence0n_size = sentence_size;
tokenizer->sentence0n =
SENNA_realloc(tokenizer->sentence0n, sizeof(char), sentence_size);
tokenizer->offset0n2raw =
SENNA_realloc(tokenizer->offset0n2raw, sizeof(int), sentence_size);
}
}
{
const char *reader = sentence;
char *writer = tokenizer->sentence0n;
int reader_offset = 0;
int writer_offset = 0;
while (1) {
char c = reader[reader_offset];
int number_size = 0;
if (isdigit(c) || c == '+' || c == '-' || c == '.' || c == ',')
SENNA_tokenize_number(&number_size, reader + reader_offset);
tokenizer->offset0n2raw[writer_offset] = reader_offset;
if (number_size) {
writer[writer_offset++] = '0';
reader_offset += number_size;
} else {
writer[writer_offset++] = c;
reader_offset++;
if (c == '\0') break;
}
}
}
sentence0n = tokenizer->sentence0n;
offset0n = 0;
tokenizer->tokens->n = 0;
while (1) {
int sizetoken;
int sizealphanumeric;
int sizedictionary;
int idxhash;
int incsize;
while (isspace(sentence0n[offset0n])) offset0n++;
if (tokenizer->is_tokenized) {
SENNA_tokenize_untilspace(&sizetoken, sentence0n + offset0n);
SENNA_tokenize_dictionarymatch(&sizedictionary, &idxhash,
tokenizer->word_hash,
sentence0n + offset0n);
if (sizedictionary != sizetoken) idxhash = -1;
} else {
SENNA_tokenize_alphanumeric(&sizealphanumeric, sentence0n + offset0n);
SENNA_tokenize_dictionarymatch(&sizedictionary, &idxhash,
tokenizer->word_hash,
sentence0n + offset0n);
sizetoken = (sizealphanumeric > sizedictionary ? sizealphanumeric
: sizedictionary);
idxhash = (sizealphanumeric > sizedictionary ? -1 : idxhash);
}
if (sizetoken == 0) {
if (sentence0n[offset0n] == '\0')
break;
else {
fprintf(stderr, "WARNING: skipping a char (%c)\n",
sentence0n[offset0n]);
offset0n++;
continue;
}
}
/* check buffer sizes */
/* note that we increment one at the time */
incsize = 0;
if (tokenizer->tokens->n + 1 > tokenizer->max_tokens) {
tokenizer->max_tokens = tokenizer->tokens->n + 1;
incsize = 1;
}
/* word strings */
{
int sizetokenraw = tokenizer->offset0n2raw[offset0n + sizetoken] -
tokenizer->offset0n2raw[offset0n];
if (incsize) {
tokenizer->tokens->words = SENNA_realloc(
tokenizer->tokens->words, sizeof(char *), tokenizer->max_tokens);
tokenizer->tokens->words[tokenizer->tokens->n] = NULL;
tokenizer->words_sizes = SENNA_realloc(
tokenizer->words_sizes, sizeof(int), tokenizer->max_tokens);
tokenizer->words_sizes[tokenizer->tokens->n] = 0;
}
if (sizetokenraw >= tokenizer->words_sizes[tokenizer->tokens->n]) {
tokenizer->words_sizes[tokenizer->tokens->n] = sizetokenraw + 1;
tokenizer->tokens->words[tokenizer->tokens->n] = SENNA_realloc(
tokenizer->tokens->words[tokenizer->tokens->n], sizeof(char),
tokenizer->words_sizes[tokenizer->tokens->n]);
}
memcpy(tokenizer->tokens->words[tokenizer->tokens->n],
sentence + tokenizer->offset0n2raw[offset0n], sizetokenraw);
tokenizer->tokens->words[tokenizer->tokens->n][sizetokenraw] = '\0';
}
/* words */
{
if (incsize)
tokenizer->tokens->word_idx = SENNA_realloc(
tokenizer->tokens->word_idx, sizeof(int), tokenizer->max_tokens);
tokenizer->tokens->word_idx[tokenizer->tokens->n] =
(idxhash >= 0 ? idxhash : tokenizer->word_hash_unknown_idx);
}
/* word offsets */
{
if (incsize) {
tokenizer->tokens->start_offset =
SENNA_realloc(tokenizer->tokens->start_offset, sizeof(int),
tokenizer->max_tokens);
tokenizer->tokens->end_offset = SENNA_realloc(
tokenizer->tokens->end_offset, sizeof(int), tokenizer->max_tokens);
}
tokenizer->tokens->start_offset[tokenizer->tokens->n] =
tokenizer->offset0n2raw[offset0n];
tokenizer->tokens->end_offset[tokenizer->tokens->n] =
tokenizer->offset0n2raw[offset0n + sizetoken];
}
/* caps */
if (tokenizer->caps_hash) {
int i;
int allcaps, initcap, hascap;
allcaps = !islower(sentence0n[offset0n]);
initcap = isupper(sentence0n[offset0n]);
hascap = initcap;
for (i = 1; i < sizetoken; i++) {
if (islower(sentence0n[offset0n + i]))
allcaps = 0;
else if (isupper(sentence0n[offset0n + i]))
hascap = 1;
}
if (incsize)
tokenizer->tokens->caps_idx = SENNA_realloc(
tokenizer->tokens->caps_idx, sizeof(int), tokenizer->max_tokens);
if (hascap && allcaps)
tokenizer->tokens->caps_idx[tokenizer->tokens->n] =
tokenizer->caps_hash_allcaps_idx;
else if (initcap)
tokenizer->tokens->caps_idx[tokenizer->tokens->n] =
tokenizer->caps_hash_initcap_idx;
else if (hascap)
tokenizer->tokens->caps_idx[tokenizer->tokens->n] =
tokenizer->caps_hash_hascap_idx;
else
tokenizer->tokens->caps_idx[tokenizer->tokens->n] =
tokenizer->caps_hash_nocaps_idx;
}
/* suffixes */
if (tokenizer->suff_hash) {
static char suffix[3] = "\0\0\0";
int idxhashsuffix;
suffix[0] =
(char)(sizetoken >= 2
? tolower(sentence0n[offset0n + sizetoken - 2])
: (sizetoken >= 1
? tolower(sentence0n[offset0n + sizetoken - 1])
: '\0'));
suffix[1] =
(char)(sizetoken >= 2 ? tolower(sentence0n[offset0n + sizetoken - 1])
: '\0');
idxhashsuffix = SENNA_Hash_index(tokenizer->suff_hash, suffix);
if (incsize)
tokenizer->tokens->suff_idx = SENNA_realloc(
tokenizer->tokens->suff_idx, sizeof(int), tokenizer->max_tokens);
tokenizer->tokens->suff_idx[tokenizer->tokens->n] =
(idxhashsuffix < 0 ? tokenizer->suff_hash_nosuffix_idx
: idxhashsuffix);
}
tokenizer->tokens->n++;
offset0n = offset0n + sizetoken;
}
/* gazetteers */
/* note: they need to know all the tokens, so we do it at the end */
if (tokenizer->gazl_hash)
tokenize_gazetteer(&tokenizer->tokens->gazl_idx, tokenizer,
tokenizer->gazl_hash);
if (tokenizer->gazm_hash)
tokenize_gazetteer(&tokenizer->tokens->gazm_idx, tokenizer,
tokenizer->gazm_hash);
if (tokenizer->gazo_hash)
tokenize_gazetteer(&tokenizer->tokens->gazo_idx, tokenizer,
tokenizer->gazo_hash);
if (tokenizer->gazp_hash)
tokenize_gazetteer(&tokenizer->tokens->gazp_idx, tokenizer,
tokenizer->gazp_hash);
return tokenizer->tokens;
}
void SENNA_tokenize_untilspace(int *size_, const char *sentence) {
int size = 0;
while (1) {
char c = sentence[size];
if (c == '\0' || isspace(c)) break;
size++;
}
*size_ = size;
}
void SENNA_tokenize_alphanumeric(int *size_, const char *sentence) {
int size = 0;
while (1) {
char c = *sentence++;
if (c == '\0' || (!isdigit(c) && !isalpha(c))) break;
size++;
}
*size_ = size;
}
void SENNA_tokenize_dictionarymatch(int *size_, int *idxhash_, SENNA_Hash *hash,
const char *sentence) {
static char word[MAX_WORD_SIZE];
int size = 0;
int idxhash = -1;
char c;
/* match until space */
while (size < MAX_WORD_SIZE - 1) {
c = sentence[size];
if (c == '\0' || isspace(c)) break;
word[size++] = (char)tolower(c);
}
/* take the largest word into the dictionary */
for (; size > 0; size--) {
word[size] = '\0';
idxhash = SENNA_Hash_index(hash, word);
if (idxhash >= 0) break;
}
*size_ = size;
*idxhash_ = idxhash;
}
void SENNA_tokenize_number(int *size_, const char *sentence) {
int state = 0;
int size = 0;
int idx = 0;
int finished = 0;
while (!finished) {
char c = sentence[idx++];
if (c == '\0') break;
switch (state) {
case 0:
if (c == '+' || c == '-')
state = 1;
else if (c == '.' || c == ',')
state = 2;
else if (isdigit(c))
state = 4;
else
finished = 1;
break;
case 1:
if (c == '.' || c == ',')
state = 2;
else if (isdigit(c))
state = 4;
else
finished = 1;
break;
case 2:
if (isdigit(c))
state = 4;
else
finished = 1;
break;
case 3:
if (isdigit(c))
state = 4;
else
finished = 1;
break;
case 4:
size = idx - 1;
if (c == '.' || c == ',')
state = 3;
else if (isdigit(c))
state = 4;
else
finished = 1;
break;
}
}
*size_ = size;
}
static int checkhash(SENNA_Hash *hash, const char *key) {
int idx;
if (!hash) return -1;
idx = SENNA_Hash_index(hash, key);
if (idx < 0) SENNA_error("could not find key %s", key);
return idx;
}
SENNA_Tokenizer *SENNA_Tokenizer_new(
SENNA_Hash *word_hash, SENNA_Hash *caps_hash, SENNA_Hash *suff_hash,
SENNA_Hash *gazt_hash, SENNA_Hash *gazl_hash, SENNA_Hash *gazm_hash,
SENNA_Hash *gazo_hash, SENNA_Hash *gazp_hash, int is_tokenized) {
SENNA_Tokenizer *tokenizer = SENNA_malloc(sizeof(SENNA_Tokenizer), 1);
memset(tokenizer, 0, sizeof(SENNA_Tokenizer));
if (!word_hash) SENNA_error("Tokenizer *needs* a hash for words");
tokenizer->is_tokenized = is_tokenized;
tokenizer->word_hash = word_hash;
tokenizer->caps_hash = caps_hash;
tokenizer->suff_hash = suff_hash;
tokenizer->gazt_hash = gazt_hash;
tokenizer->gazl_hash = gazl_hash;
tokenizer->gazm_hash = gazm_hash;
tokenizer->gazo_hash = gazo_hash;
tokenizer->gazp_hash = gazp_hash;
tokenizer->word_hash_unknown_idx = checkhash(word_hash, "UNKNOWN");
tokenizer->suff_hash_nosuffix_idx = checkhash(suff_hash, "NOSUFFIX");
tokenizer->caps_hash_allcaps_idx = checkhash(caps_hash, "allcaps");
tokenizer->caps_hash_hascap_idx = checkhash(caps_hash, "hascap");
tokenizer->caps_hash_initcap_idx = checkhash(caps_hash, "initcap");
tokenizer->caps_hash_nocaps_idx = checkhash(caps_hash, "nocaps");
tokenizer->gazt_hash_no_entity_idx = checkhash(gazt_hash, "NO");
tokenizer->gazt_hash_is_entity_idx = checkhash(gazt_hash, "YES");
tokenizer->tokens = SENNA_malloc(sizeof(SENNA_Tokens), 1);
memset(tokenizer->tokens, 0, sizeof(SENNA_Tokens));
return tokenizer;
}
void SENNA_Tokenizer_free(SENNA_Tokenizer *tokenizer) {
int i;
for (i = 0; i < tokenizer->max_tokens; i++)
SENNA_free(tokenizer->tokens->words[i]);
SENNA_free(tokenizer->tokens->words);
SENNA_free(tokenizer->tokens->start_offset);
SENNA_free(tokenizer->tokens->end_offset);
SENNA_free(tokenizer->tokens->word_idx);
SENNA_free(tokenizer->tokens->caps_idx);
SENNA_free(tokenizer->tokens->suff_idx);
SENNA_free(tokenizer->tokens->gazl_idx);
SENNA_free(tokenizer->tokens->gazm_idx);
SENNA_free(tokenizer->tokens->gazo_idx);
SENNA_free(tokenizer->tokens->gazp_idx);
SENNA_free(tokenizer->tokens);
SENNA_free(tokenizer->words_sizes);
SENNA_free(tokenizer->sentence0n);
SENNA_free(tokenizer->offset0n2raw);
SENNA_free(tokenizer->entity);
SENNA_free(tokenizer);
}
<|start_filename|>tonic-suite/asr/src/thread/kaldi-barrier.h<|end_filename|>
// thread/kaldi-barrier.h
// Copyright 2012 <NAME> (Brno University of Technology)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_THREAD_KALDI_BARRIER_H_
#define KALDI_THREAD_KALDI_BARRIER_H_ 1
#include <pthread.h>
namespace kaldi {
/**
* The Barrier class
* A barrier causes a group of threads to wait until
* all the threads reach the "barrier".
*/
class Barrier {
public:
Barrier(int32 threshold = 0);
~Barrier();
void SetThreshold(int32 thr); ///< number of threads to wait for
int32 Wait(); ///< last thread returns -1, the others 0
private:
pthread_mutex_t mutex_; ///< Mutex which control access to barrier
pthread_cond_t cv_; ///< Conditional variable to make barrier wait
int32 threshold_; ///< size of thread-group
int32 counter_; ///< number of threads we wait for
int32 cycle_; ///< cycle flag to keep synchronized
KALDI_DISALLOW_COPY_AND_ASSIGN(Barrier);
};
} // namespace kaldi
#endif // KALDI_THREAD_KALDI_BARRIER_H_
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-compute-discriminative-parallel.h<|end_filename|>
// nnet2/nnet-compute-discriminative-parallel.h
// Copyright 2012-2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_NNET2_NNET_COMPUTE_DISCRIMINATIVE_PARALLEL_H_
#define KALDI_NNET2_NNET_COMPUTE_DISCRIMINATIVE_PARALLEL_H_
#include "nnet2/am-nnet.h"
#include "nnet2/nnet-example.h"
#include "hmm/transition-model.h"
#include "nnet2/nnet-compute-discriminative.h"
namespace kaldi {
namespace nnet2 {
/* This header provides a multi-threaded version of the discriminative training
code (this is for a CPU-based, instead of GPU-based, setup).
Note: we expect that "nnet_to_update" will be the same as
"&(am_nnet.GetNnet())"
*/
void NnetDiscriminativeUpdateParallel(
const AmNnet &am_nnet, const TransitionModel &tmodel,
const NnetDiscriminativeUpdateOptions &opts, int32 num_threads,
SequentialDiscriminativeNnetExampleReader *example_reader,
Nnet *nnet_to_update, NnetDiscriminativeStats *stats);
} // namespace nnet2
} // namespace kaldi
#endif // KALDI_NNET2_NNET_COMPUTE_DISCRIMINATIVE_PARALLEL_H_
<|start_filename|>tonic-suite/asr/src/lm/lm-lib-test.cc<|end_filename|>
// lm/lm-lib-test.cc
//
// Copyright 2009-2011 <NAME>.
//
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
/// @addtogroup LanguageModel
/// @{
/**
* @file lm-lib-test.cc
* @brief Unit tests for language model code.
*/
#include <iostream>
#include <string>
#include <sstream>
#include "lm/kaldi-lm.h"
namespace kaldi {
// hard-coded symbols (for now)
#define startOfSentence "<s>"
#define endOfSentence "</s>"
#define epsilon "<eps>"
#define MAX_SENTENCE_LENGTH 1000
/// @brief Recursively prints all complete paths starting at s and their score.
static LangModelFst::LmWeight PrintCompletePath(fst::SymbolTable *pst,
fst::StdVectorFst *pfst,
fst::StdArc::StateId s,
LangModelFst::LmWeight score) {
fst::ArcIterator<fst::StdVectorFst> ai(*pfst, s);
for (ai.Reset(); !ai.Done(); ai.Next()) {
std::cout << pst->Find(ai.Value().ilabel) << " ";
fst::StdArc::Weight w = score; // initialize with current score
// reset weight to 0 if we are going through the initial state again
if (s == pfst->Start()) {
w = fst::StdArc::Weight::One();
}
std::cout << " \tcurrent score " << w;
w = fst::Times(w, ai.Value().weight); // add in value from current arc
std::cout << " added arc " << ai.Value().weight;
fst::StdArc::Weight fw = pfst->Final(ai.Value().nextstate);
if (fw != fst::StdArc::Weight::Zero()) {
w = fst::Times(w, fw); // add in destination state weight if final
std::cout << " added state weight " << w << '\n';
}
std::cout << '\n';
score = PrintCompletePath(pst, pfst, ai.Value().nextstate, w);
}
// test this after recursive call in case there are arcs out of a final state
if (pfst->Final(s) == fst::StdArc::Weight::One()) {
// we hit final state, stop there
// std::cout << " total score: " << score << '\n';
}
return score;
}
/// @brief Recursively prints all complete paths starting from initial state.
static LangModelFst::LmWeight PrintCompletePaths(fst::SymbolTable *pst,
fst::StdVectorFst *pfst) {
KALDI_ASSERT(pst);
KALDI_ASSERT(pfst);
KALDI_ASSERT(pfst->Start() >= 0);
return PrintCompletePath(pst, pfst, pfst->Start(),
fst::StdArc::Weight::One());
}
/// @brief Creates an FST that generates any sequence of symbols
/// taken from given symbol table.
/// This FST is then associated with given symbol table.
static fst::StdVectorFst *CreateGenFst(fst::SymbolTable *pst) {
fst::StdArc::StateId initId, midId, finalId;
fst::StdVectorFst *genFst = new fst::StdVectorFst;
pst->AddSymbol(epsilon); // added if not there
int64 boslab = pst->AddSymbol(startOfSentence); // added if not there
int64 eoslab = pst->AddSymbol(endOfSentence); // added if not there
genFst->SetInputSymbols(pst);
genFst->SetOutputSymbols(pst);
initId = genFst->AddState();
midId = genFst->AddState();
finalId = genFst->AddState();
genFst->SetStart(initId); // initial state
genFst->SetFinal(finalId, fst::StdArc::Weight::One()); // final state
genFst->AddArc(initId, fst::StdArc(boslab, boslab, 0, midId));
genFst->AddArc(midId, fst::StdArc(eoslab, eoslab, 0, finalId));
// add a loop for each symbol except epsilon, begin and end of sentence
fst::SymbolTableIterator si(*pst);
for (si.Reset(); !si.Done(); si.Next()) {
if (si.Value() == boslab || si.Value() == eoslab || si.Value() == 0)
continue;
genFst->AddArc(midId, fst::StdArc(si.Value(), si.Value(), 0, midId));
}
return genFst;
}
/// @brief Randomly generates ntests paths with uniform distribution.
static fst::StdVectorFst *CreateRandPathFst(int n, fst::StdVectorFst *genFst) {
typedef fst::UniformArcSelector<fst::StdArc> UniformSelector;
int nTrials = 50;
UniformSelector uniform_sel;
fst::RandGenOptions<UniformSelector> opts(uniform_sel, MAX_SENTENCE_LENGTH,
n);
for (int i = 0; i < nTrials; i++) {
fst::StdVectorFst *tmpFst = new fst::StdVectorFst;
RandGen(*genFst, tmpFst, opts);
if (tmpFst->Properties(fst::kCoAccessible, true)) {
// std::cout << "Got valid random path after " << i << " tries" << '\n';
return tmpFst;
}
// not good, try another
delete tmpFst;
}
// couldn't generate it within allowed trials
std::cerr << " Warning: couldn't generate complete paths within " << nTrials;
std::cerr << " trials and " << MAX_SENTENCE_LENGTH << " max length" << '\n';
return NULL;
}
/// @brief Tests if all paths generated from genFst are included in testFst.
static bool coverageTests(fst::StdVectorFst *genFst, fst::StdVectorFst *testFst,
int ntests) {
bool success = true;
#ifdef KALDI_PARANOID
KALDI_ASSERT(genFst != NULL);
KALDI_ASSERT(testFst != NULL);
#endif
std::cout << "Generating " << ntests << " tests";
std::cout.flush();
// randomly generate ntests paths with uniform distribution
fst::StdVectorFst *pathFst = CreateRandPathFst(ntests, genFst);
if (!pathFst) return false;
// compose paths with language model fst
fst::StdVectorFst *outFst = new fst::StdVectorFst;
// std::cout << "Path FST " << '\n';
// printFirstCompletePath(pst, pathFst, pathFst->Start());
Compose(*pathFst, *testFst, outFst);
// Composition result must have ntests arcs out of initial state
int narcs = outFst->NumArcs(outFst->Start());
std::cout << ", composition has " << narcs << " arcs out of start state"
<< '\n';
if (narcs != ntests) success = false;
// std::cout << "Out FST " << '\n';
// printFirstCompletePath(pst, outFst, outFst->Start());
delete pathFst;
delete outFst;
return success;
}
/// @brief Tests read and write methods.
bool TestLmTableReadWrite(int nTests, const string &infile,
const string &outfile) {
bool success = true;
// reading test: create a language model FST from input file
std::cout << "LangModelFst test: read file " << infile << '\n';
LangModelFst lm;
if (!lm.Read(infile, kArpaLm)) return false;
// first create an FST that generates
// any sequence of symbols taken from symbol table
fst::StdVectorFst *genFst = CreateGenFst(lm.GetFst()->MutableInputSymbols());
// see if path generated in this FST are covered by the LM FST
std::cout << "For any sequence of symbols found in symbol table:" << '\n';
if (coverageTests(genFst, lm.GetFst(), nTests)) {
std::cout << "PASSED";
} else {
std::cout << "FAILED";
success = false;
}
std::cout << '\n';
// writing test: write out FST, read it back in a new lm
// reading doesn't provide symbol tables automatically ?
std::cout << "LangModelFst test: write to " << outfile;
std::cout << " and read it back" << '\n';
// std::cout << "lm input symbol table:" << '\n';
// lm.GetFst()->InputSymbols()->WriteText(std::cout);
// std::cout << "lm output symbol table:" << '\n';
// lm.GetFst()->OutputSymbols()->WriteText(std::cout);
lm.Write(outfile);
std::cout << "LangModelFst test: read from " << outfile << '\n';
LangModelFst lm2;
if (!lm2.Read(outfile, kFst)) return false;
// std::cout << "lm2 output symbol table:" << '\n';
// lm2.GetFst()->InputSymbols()->WriteText(std::cout);
// std::cout << "lm2 output symbol table:" << '\n';
// lm2.GetFst()->OutputSymbols()->WriteText(std::cout);
// generate random sequences from the original LM
// and see if they are covered by the FST that was just read
std::cout << "For any complete path in original LM:" << '\n';
if (coverageTests(lm.GetFst(), lm2.GetFst(), nTests)) {
std::cout << "PASSED";
} else {
std::cout << "FAILED";
success = false;
}
std::cout << '\n';
delete genFst;
return success;
}
/// @brief Tests correctness of path weights.
bool TestLmTableEvalScore(const string &inpfile, const string &intext,
const string &refScoreFile) {
bool success = true;
// read in reference score
std::ifstream strm(refScoreFile.c_str(), std::ifstream::in);
LangModelFst::LmWeight refScore;
strm >> refScore;
std::cout << "Reference score is " << refScore << '\n';
std::cout << "LangModelFst test: score text strings with LM " << intext
<< '\n';
// use original log base for testing
LangModelFst lm;
if (!lm.Read(inpfile, kArpaLm, NULL, false)) return false;
std::cout << "LangModelFst test: read text strings " << intext << '\n';
// here specify symbol table to be used so composition works
LangModelFst txtString;
if (!txtString.Read(intext, kTextString,
lm.GetFst()->MutableInputSymbols())) {
return false;
}
// PrintCompletePaths(txtString.GetFst()->InputSymbols(), txtString.GetFst());
// std::cout << "Fst string input symbol table:" << '\n';
// txtString.GetFst()->OutputSymbols()->WriteText(std::cout);
// std::cout << "Fst string output symbol table:" << '\n';
// txtString.GetFst()->OutputSymbols()->WriteText(std::cout);
// compose paths with language model fst
fst::StdVectorFst composedFst;
fst::ComposeFstOptions<fst::StdArc, fst::Matcher<fst::StdFst>,
fst::MatchComposeFilter<fst::Matcher<fst::StdFst> > >
copts;
copts.gc_limit = 0; // Cache only the last state for fastest copy.
composedFst =
fst::ComposeFst<fst::StdArc>(*txtString.GetFst(), *lm.GetFst(), copts);
composedFst.Write("composed.fst");
// find best path score
fst::StdVectorFst *bestFst = new fst::StdVectorFst;
fst::ShortestPath(composedFst, bestFst, 1);
std::cout << "Best path has " << bestFst->NumStates() << " states" << '\n';
LangModelFst::LmWeight testScore =
PrintCompletePaths(bestFst->MutableInputSymbols(), bestFst);
std::cout << "Complete path score is " << testScore << '\n';
if (testScore.Value() <= refScore.Value()) {
std::cout << "PASSED";
} else {
std::cout << "FAILED";
success = false;
}
std::cout << '\n';
delete bestFst;
unlink("composed.fst");
return success;
}
} // end namespace kaldi
int main(int argc, char *argv[]) {
int ntests;
bool success = true;
std::string infile = "input.arpa";
std::string outfile = "output.fst";
// Note that for these tests to work, language models must be acceptors
// (i.e. have same symbol table for input and output) since we
// compose them with one another
ntests = 20;
std::cout << "Testing small arpa file with missing backoffs" << '\n';
infile = "missing_backoffs.arpa";
success &= kaldi::TestLmTableReadWrite(ntests, infile, outfile);
std::cout << "Testing small arpa file with unused backoffs" << '\n';
infile = "unused_backoffs.arpa";
success &= kaldi::TestLmTableReadWrite(ntests, infile, outfile);
std::cout << "Testing normal small arpa file" << '\n';
infile = "input.arpa";
success &= kaldi::TestLmTableReadWrite(ntests, infile, outfile);
ntests = 2;
// note that we use latest value of 'infile' as the tested language model
for (int i = 1; i <= ntests; i++) {
std::ostringstream intext("");
std::ostringstream refscore("");
// these inputN.txt sentences have been scored
// by an external LM tool with results in inputN.score
intext << "input" << i << ".txt";
refscore << "input" << i << ".score";
success &=
kaldi::TestLmTableEvalScore(infile, intext.str(), refscore.str());
}
unlink("output.fst");
exit(success ? 0 : 1);
}
/// @}
<|start_filename|>tonic-suite/asr/src/matrix/srfft.h<|end_filename|>
// matrix/srfft.h
// Copyright 2009-2011 Microsoft Corporation; Go Vivace Inc.
// 2014 <NAME>
//
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
//
// This file includes a modified version of code originally published in Malvar,
// H., "Signal processing with lapped transforms, " Artech House, Inc., 1992.
// The
// current copyright holder of the original code, <NAME>, has given
// his permission for the release of this modified version under the Apache
// License v2.0.
#ifndef KALDI_MATRIX_SRFFT_H_
#define KALDI_MATRIX_SRFFT_H_
#include "matrix/kaldi-vector.h"
#include "matrix/kaldi-matrix.h"
namespace kaldi {
/// @addtogroup matrix_funcs_misc
/// @{
// This class is based on code by <NAME>, from his book
// "Signal Processing with Lapped Transforms" (1992). Copied with
// permission, optimized by Go Vivace Inc., and converted into C++ by
// Microsoft Corporation
// This is a more efficient way of doing the complex FFT than ComplexFft
// (declared in matrix-functios.h), but it only works for powers of 2.
// Note: in multi-threaded code, you would need to have one of these objects per
// thread, because multiple calls to Compute in parallel would not work.
template <typename Real>
class SplitRadixComplexFft {
public:
typedef MatrixIndexT Integer;
// N is the number of complex points (must be a power of two, or this
// will crash). Note that the constructor does some work so it's best to
// initialize the object once and do the computation many times.
SplitRadixComplexFft(Integer N);
// Does the FFT computation, given pointers to the real and
// imaginary parts. If "forward", do the forward FFT; else
// do the inverse FFT (without the 1/N factor).
// xr and xi are pointers to zero-based arrays of size N,
// containing the real and imaginary parts
// respectively.
void Compute(Real *xr, Real *xi, bool forward) const;
// This version of Compute takes a single array of size N*2,
// containing [ r0 im0 r1 im1 ... ]. Otherwise its behavior is the
// same as the version above.
void Compute(Real *x, bool forward);
// This version of Compute is const; it operates on an array of size N*2
// containing [ r0 im0 r1 im1 ... ], but it uses the argument "temp_buffer" as
// temporary storage instead of a class-member variable. It will allocate it
// if
// needed.
void Compute(Real *x, bool forward, std::vector<Real> *temp_buffer) const;
~SplitRadixComplexFft();
protected:
// temp_buffer_ is allocated only if someone calls Compute with only one Real*
// argument and we need a temporary buffer while creating interleaved data.
std::vector<Real> temp_buffer_;
private:
void ComputeTables();
void ComputeRecursive(Real *xr, Real *xi, Integer logn) const;
void BitReversePermute(Real *x, Integer logn) const;
Integer N_;
Integer logn_; // log(N)
Integer *brseed_;
// brseed is Evans' seed table, ref: (Ref: <NAME>.
// Evans, "An improved digit-reversal permutation algorithm ...",
// IEEE Trans. ASSP, Aug. 1987, pp. 1120-1125).
Real **tab_; // Tables of butterfly coefficients.
KALDI_DISALLOW_COPY_AND_ASSIGN(SplitRadixComplexFft);
};
template <typename Real>
class SplitRadixRealFft : private SplitRadixComplexFft<Real> {
public:
SplitRadixRealFft(MatrixIndexT N)
: // will fail unless N>=4 and N is a power of 2.
SplitRadixComplexFft<Real>(N / 2),
N_(N) {}
/// If forward == true, this function transforms from a sequence of N real
/// points to its complex fourier
/// transform; otherwise it goes in the reverse direction. If you call it
/// in the forward and then reverse direction and multiply by 1.0/N, you
/// will get back the original data.
/// The interpretation of the complex-FFT data is as follows: the array
/// is a sequence of complex numbers C_n of length N/2 with (real, im) format,
/// i.e. [real0, real_{N/2}, real1, im1, real2, im2, real3, im3, ...].
void Compute(Real *x, bool forward);
/// This is as the other Compute() function, but it is a const version that
/// uses a user-supplied buffer.
void Compute(Real *x, bool forward, std::vector<Real> *temp_buffer) const;
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(SplitRadixRealFft);
int N_;
};
/// @} end of "addtogroup matrix_funcs_misc"
} // end namespace kaldi
#endif
<|start_filename|>tonic-suite/asr/src/thread/kaldi-mutex.cc<|end_filename|>
// thread/kaldi-mutex.cc
// Copyright 2012 <NAME> (Brno University of Technology)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <pthread.h>
#include <cerrno>
#include <string.h>
#include "base/kaldi-error.h"
#include "thread/kaldi-mutex.h"
namespace kaldi {
Mutex::Mutex() {
int ret;
if ((ret = pthread_mutex_init(&mutex_, NULL)) != 0)
KALDI_ERR << "Cannot initialize pthread mutex, error is: " << strerror(ret);
}
Mutex::~Mutex() {
int ret;
if ((ret = pthread_mutex_destroy(&mutex_)) != 0) {
if (ret != 16) {
KALDI_ERR << "Cannot destroy pthread mutex, error is: " << strerror(ret);
} else {
KALDI_WARN
<< "Error destroying pthread mutex; ignoring it as it could be "
<< "a known issue that affects Haswell processors, see "
<< "https://sourceware.org/bugzilla/show_bug.cgi?id=16657 "
<< "If your processor is not Haswell and you see this message, "
<< "it could be a bug in Kaldi.";
}
}
}
void Mutex::Lock() {
int ret;
if ((ret = pthread_mutex_lock(&mutex_)) != 0)
KALDI_ERR << "Error on locking pthread mutex, error is: " << strerror(ret);
}
bool Mutex::TryLock() {
int32 ret = pthread_mutex_trylock(&mutex_);
bool lock_succeeded = false;
switch (ret) {
case 0:
lock_succeeded = true;
break;
case EBUSY:
lock_succeeded = false;
break;
default:
KALDI_ERR << "Error on try-locking pthread mutex, error is: "
<< strerror(ret);
}
return lock_succeeded;
}
void Mutex::Unlock() {
int ret;
if ((ret = pthread_mutex_unlock(&mutex_)) != 0)
KALDI_ERR << "Error on unlocking pthread mutex, error is: "
<< strerror(ret);
}
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-functions.h<|end_filename|>
// nnet2/nnet-functions.h
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_NNET2_NNET_FUNCTIONS_H_
#define KALDI_NNET2_NNET_FUNCTIONS_H_
#include "base/kaldi-common.h"
#include "util/kaldi-io.h"
#include "matrix/matrix-lib.h"
#include "nnet2/nnet-component.h"
#include "nnet2/nnet-nnet.h"
#include <iostream>
#include <sstream>
#include <vector>
namespace kaldi {
namespace nnet2 {
// Here we declare various functions for manipulating the neural net,
// such as adding new hidden layers; we'll add things like "mixing up"
// to here.
/// If "nnet" has exactly one softmax layer, this function will return
/// its index; otherwise it will return -1.
int32 IndexOfSoftmaxLayer(const Nnet &nnet);
/**
Inserts the components of one neural network into a particular place in the
other one. This is useful for adding hidden layers to a neural net. Inserts
the components of "src_nnet" before component index c of "dest_nnet".
*/
void InsertComponents(const Nnet &src_nnet, int32 c, Nnet *dest_nnet);
/**
Removes the last "num_to_remove" components and
adds the components from "src_nnet".
*/
void ReplaceLastComponents(const Nnet &src_nnet, int32 num_to_remove,
Nnet *dest_nnet);
} // namespace nnet2
} // namespace kaldi
#endif
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-latgen-map.cc<|end_filename|>
// gmmbin/gmm-latgen-map.cc
// Copyright 2012 <NAME>, Cisco Systems;
// Johns Hopkins University (author: <NAME>)
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <string>
#include <vector>
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "gmm/mle-am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "transform/fmllr-diag-gmm.h"
#include "fstext/fstext-lib.h"
#include "decoder/lattice-faster-decoder.h"
#include "gmm/decodable-am-diag-gmm.h"
#include "base/timer.h"
#include "lat/kaldi-lattice.h" // for {Compact}LatticeArc
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
using fst::SymbolTable;
using fst::VectorFst;
using fst::StdArc;
const char *usage =
"Decode features using GMM-based model. Note: the input\n"
"<gmms-rspecifier> will typically be piped in from gmm-est-map.\n"
"Note: <model-in> is only needed for the transition-model, which "
"isn't\n"
"included in <gmms-rspecifier>.\n"
"\n"
"Usage: gmm-latgen-map [options] <model-in> "
"<gmms-rspecifier> <fsts-rxfilename|fsts-rspecifier> "
"<features-rspecifier> "
"<lattice-wspecifier> [ <words-wspecifier> [ <alignments-wspecifier> ] "
"]\n";
ParseOptions po(usage);
bool binary = true;
bool allow_partial = true;
BaseFloat acoustic_scale = 0.1;
std::string word_syms_filename, utt2spk_rspecifier;
LatticeFasterDecoderConfig decoder_opts;
decoder_opts.Register(&po);
po.Register("utt2spk", &utt2spk_rspecifier,
"rspecifier for utterance to "
"speaker map");
po.Register("binary", &binary, "Write output in binary mode");
po.Register("acoustic-scale", &acoustic_scale,
"Scaling factor for acoustic likelihoods");
po.Register("word-symbol-table", &word_syms_filename,
"Symbol table for words [for debug output]");
po.Register("allow-partial", &allow_partial,
"Produce output even when final state was not reached");
po.Read(argc, argv);
if (po.NumArgs() < 5 || po.NumArgs() > 7) {
po.PrintUsage();
exit(1);
}
std::string model_in_filename = po.GetArg(1),
gmms_rspecifier = po.GetArg(2), fst_in_filename = po.GetArg(3),
feature_rspecifier = po.GetArg(4),
lattice_wspecifier = po.GetArg(5),
words_wspecifier = po.GetOptArg(6),
alignment_wspecifier = po.GetOptArg(7);
TransitionModel trans_model;
{
bool binary_read;
Input is(model_in_filename, &binary_read);
trans_model.Read(is.Stream(), binary_read);
}
RandomAccessMapAmDiagGmmReaderMapped gmms_reader(gmms_rspecifier,
utt2spk_rspecifier);
Int32VectorWriter words_writer(words_wspecifier);
Int32VectorWriter alignment_writer(alignment_wspecifier);
bool determinize = decoder_opts.determinize_lattice;
if (!determinize) KALDI_WARN << "determinize is set to FASLE ...";
CompactLatticeWriter compact_lattice_writer;
LatticeWriter lattice_writer;
if (lattice_wspecifier != "") {
if (!(determinize ? compact_lattice_writer.Open(lattice_wspecifier)
: lattice_writer.Open(lattice_wspecifier)))
KALDI_ERR << "Could not open table for writing lattices: "
<< lattice_wspecifier;
}
fst::SymbolTable *word_syms = NULL;
if (word_syms_filename != "") {
word_syms = fst::SymbolTable::ReadText(word_syms_filename);
if (!word_syms) {
KALDI_ERR << "Could not read symbol table from file "
<< word_syms_filename;
}
}
BaseFloat tot_like = 0.0;
kaldi::int64 frame_count = 0;
int num_success = 0, num_fail = 0;
Timer timer;
if (ClassifyRspecifier(fst_in_filename, NULL, NULL) == kNoRspecifier) {
// Input FST is just one FST, not a table of FSTs.
VectorFst<StdArc> *decode_fst = fst::ReadFstKaldi(fst_in_filename);
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
for (; !feature_reader.Done(); feature_reader.Next()) {
string utt = feature_reader.Key();
if (!gmms_reader.HasKey(utt)) {
KALDI_WARN
<< "Utterance " << utt
<< " has no corresponding MAP model skipping this utterance.";
num_fail++;
continue;
}
AmDiagGmm am_gmm;
am_gmm.CopyFromAmDiagGmm(gmms_reader.Value(utt));
Matrix<BaseFloat> features(feature_reader.Value());
feature_reader.FreeCurrent();
if (features.NumRows() == 0) {
KALDI_WARN << "Zero-length utterance: " << utt;
num_fail++;
continue;
}
LatticeFasterDecoder decoder(*decode_fst, decoder_opts);
kaldi::DecodableAmDiagGmmScaled gmm_decodable(am_gmm, trans_model,
features, acoustic_scale);
double like;
if (DecodeUtteranceLatticeFaster(
decoder, gmm_decodable, trans_model, word_syms, utt,
acoustic_scale, determinize, allow_partial, &alignment_writer,
&words_writer, &compact_lattice_writer, &lattice_writer,
&like)) {
tot_like += like;
frame_count += features.NumRows();
num_success++;
} else
num_fail++;
} // end looping over all utterances
} else {
RandomAccessTableReader<fst::VectorFstHolder> fst_reader(fst_in_filename);
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
for (; !feature_reader.Done(); feature_reader.Next()) {
string utt = feature_reader.Key();
if (!fst_reader.HasKey(utt)) {
KALDI_WARN << "Utterance " << utt << " has no corresponding FST"
<< "skipping this utterance.";
num_fail++;
continue;
}
if (!gmms_reader.HasKey(utt)) {
KALDI_WARN
<< "Utterance " << utt
<< " has no corresponding MAP model skipping this utterance.";
num_fail++;
continue;
}
AmDiagGmm am_gmm;
am_gmm.CopyFromAmDiagGmm(gmms_reader.Value(utt));
Matrix<BaseFloat> features(feature_reader.Value());
feature_reader.FreeCurrent();
if (features.NumRows() == 0) {
KALDI_WARN << "Zero-length utterance: " << utt;
num_fail++;
continue;
}
LatticeFasterDecoder decoder(fst_reader.Value(utt), decoder_opts);
kaldi::DecodableAmDiagGmmScaled gmm_decodable(am_gmm, trans_model,
features, acoustic_scale);
double like;
if (DecodeUtteranceLatticeFaster(
decoder, gmm_decodable, trans_model, word_syms, utt,
acoustic_scale, determinize, allow_partial, &alignment_writer,
&words_writer, &compact_lattice_writer, &lattice_writer,
&like)) {
tot_like += like;
frame_count += features.NumRows();
num_success++;
} else
num_fail++;
} // end looping over all utterances
}
KALDI_LOG << "Average log-likelihood per frame is "
<< (tot_like / frame_count) << " over " << frame_count
<< " frames.";
double elapsed = timer.Elapsed();
KALDI_LOG << "Time taken [excluding initialization] " << elapsed
<< "s: real-time factor assuming 100 frames/sec is "
<< (elapsed * 100.0 / frame_count);
KALDI_LOG << "Done " << num_success << " utterances, failed for "
<< num_fail;
if (word_syms) delete word_syms;
return (num_success != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/cudamatrix/cu-randkernels.h<|end_filename|>
// cudamatrix/cu-randkernels.h
// Copyright 2012 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_CUDAMATRIX_CU_RANDKERNELS_H_
#define KALDI_CUDAMATRIX_CU_RANDKERNELS_H_
#if HAVE_CUDA == 1
#include "base/kaldi-error.h"
#include "cudamatrix/cu-randkernels-ansi.h"
/*
* In this file are C++ templated wrappers
* of the ANSI-C CUDA kernels
*/
namespace kaldi {
/*********************************************************
* base templates
*/
template <typename Real>
inline void cuda_rand(dim3 Gr, dim3 Bl, Real *mat, uint32_cuda *z1,
uint32_cuda *z2, uint32_cuda *z3, uint32_cuda *z4,
MatrixDim d) {
KALDI_ERR << __func__ << " Not implemented!";
}
template <typename Real>
inline void cuda_gauss_rand(dim3 Gr, dim3 Bl, Real *mat, uint32_cuda *z1,
uint32_cuda *z2, uint32_cuda *z3, uint32_cuda *z4,
MatrixDim d) {
KALDI_ERR << __func__ << " Not implemented!";
}
template <typename Real>
inline void cuda_vec_gauss_rand(int Gr, int Bl, Real *v, uint32_cuda *z1,
uint32_cuda *z2, uint32_cuda *z3,
uint32_cuda *z4, int dim) {
KALDI_ERR << __func__ << " Not implemented!";
}
template <typename Real>
inline void cuda_binarize_probs(dim3 Gr, dim3 Bl, Real *states,
const Real *probs, Real *rand, MatrixDim d) {
KALDI_ERR << __func__ << " Not implemented!";
}
/*********************************************************
* float specializations
*/
template <>
inline void cuda_rand<float>(dim3 Gr, dim3 Bl, float *mat, uint32_cuda *z1,
uint32_cuda *z2, uint32_cuda *z3, uint32_cuda *z4,
MatrixDim d) {
cudaF_rand(Gr, Bl, mat, z1, z2, z3, z4, d);
}
template <>
inline void cuda_gauss_rand<float>(dim3 Gr, dim3 Bl, float *mat,
uint32_cuda *z1, uint32_cuda *z2,
uint32_cuda *z3, uint32_cuda *z4,
MatrixDim d) {
cudaF_gauss_rand(Gr, Bl, mat, z1, z2, z3, z4, d);
}
template <>
inline void cuda_vec_gauss_rand<float>(int Gr, int Bl, float *v,
uint32_cuda *z1, uint32_cuda *z2,
uint32_cuda *z3, uint32_cuda *z4,
int dim) {
cudaF_vec_gauss_rand(Gr, Bl, v, z1, z2, z3, z4, dim);
}
template <>
inline void cuda_binarize_probs<float>(dim3 Gr, dim3 Bl, float *states,
const float *probs, float *rand,
MatrixDim d) {
cudaF_binarize_probs(Gr, Bl, states, probs, rand, d);
}
/*********************************************************
* double specializations
*/
template <>
inline void cuda_rand<double>(dim3 Gr, dim3 Bl, double *mat, uint32_cuda *z1,
uint32_cuda *z2, uint32_cuda *z3, uint32_cuda *z4,
MatrixDim d) {
cudaD_rand(Gr, Bl, mat, z1, z2, z3, z4, d);
}
template <>
inline void cuda_gauss_rand<double>(dim3 Gr, dim3 Bl, double *mat,
uint32_cuda *z1, uint32_cuda *z2,
uint32_cuda *z3, uint32_cuda *z4,
MatrixDim d) {
cudaD_gauss_rand(Gr, Bl, mat, z1, z2, z3, z4, d);
}
template <>
inline void cuda_vec_gauss_rand<double>(int Gr, int Bl, double *v,
uint32_cuda *z1, uint32_cuda *z2,
uint32_cuda *z3, uint32_cuda *z4,
int dim) {
cudaD_vec_gauss_rand(Gr, Bl, v, z1, z2, z3, z4, dim);
}
template <>
inline void cuda_binarize_probs<double>(dim3 Gr, dim3 Bl, double *states,
const double *probs, double *rand,
MatrixDim d) {
cudaD_binarize_probs(Gr, Bl, states, probs, rand, d);
}
} // namespace
#endif // HAVE_CUDA
#endif
<|start_filename|>tonic-suite/asr/src/gst-plugin/gst-audio-source.h<|end_filename|>
// gst-plugin/gst-audio-source.h
// Copyright 2013 <NAME>, Tallinn University of Technology
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_GST_PLUGIN_GST_AUDIO_SOURCE_H_
#define KALDI_GST_PLUGIN_GST_AUDIO_SOURCE_H_
#include <online/online-audio-source.h>
#include <matrix/kaldi-vector.h>
#include <gst/gst.h>
namespace kaldi {
// OnlineAudioSourceItf implementation using a queue of Gst Buffers
class GstBufferSource : public OnlineAudioSourceItf {
public:
typedef int16 SampleType; // hardcoded 16-bit audio
GstBufferSource();
// Implementation of the OnlineAudioSourceItf
bool Read(Vector<BaseFloat> *data);
void PushBuffer(GstBuffer *buf);
void SetEnded(bool ended);
~GstBufferSource();
private:
GAsyncQueue *buf_queue_;
gint pos_in_current_buf_;
GstBuffer *current_buffer_;
bool ended_;
GMutex lock_;
GCond data_cond_;
KALDI_DISALLOW_COPY_AND_ASSIGN(GstBufferSource);
};
} // namespace kaldi
#endif // KALDI_GST_PLUGIN_GST_AUDIO_SOURCE_H_
<|start_filename|>tonic-suite/asr/src/sgmm/sgmm-clusterable.h<|end_filename|>
// sgmm/sgmm-clusterable.h
// Copyright 2012 Johns Hopkins University (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_SGMM_SGMM_CLUSTERABLE_H_
#define KALDI_SGMM_SGMM_CLUSTERABLE_H_
#include <vector>
#include <queue>
#include "sgmm/am-sgmm.h"
#include "hmm/transition-model.h"
#include "itf/clusterable-itf.h"
namespace kaldi {
/// This header defines an object that can be used to create decision
/// trees using a form of SGMM statistics. It is analogous to the
/// GaussClusterable object, but uses the SGMM. The auxiliary function
/// it uses is related to the normal SGMM auxiliary function, but for
/// efficiency it uses a simpler model on the weights, which is equivalent
/// to assuming the weights w_{ji} [there no index m since we assume one
/// mixture per state!] are directly estimated using ML, instead of being
/// computed from v_j and w_i as in the actual SGMM.
class SgmmClusterable : public Clusterable {
public:
SgmmClusterable(const AmSgmm &sgmm,
const std::vector<SpMatrix<double> > &H)
: // H can be empty vector
// at initialization. Used to cache something from the model.
sgmm_(sgmm),
H_(H),
gamma_(sgmm.NumGauss()),
y_(sgmm.PhoneSpaceDim()) {}
virtual std::string Type() const { return "sgmm"; }
/// compare with the Accumulate function of MleAmSgmmAccs
/// Note: the pdf-index j, relating to the original SGMM
/// in sgmm_, is only needed to select the right vector to
/// compute Gaussian-level alignments with.
void Accumulate(const SgmmPerFrameDerivedVars &frame_vars, int32 j,
BaseFloat weight);
virtual BaseFloat Objf() const;
virtual void SetZero();
virtual void Add(const Clusterable &other_in);
virtual void Sub(const Clusterable &other_in);
virtual BaseFloat Normalizer() const;
virtual Clusterable *Copy() const;
virtual void Scale(BaseFloat f);
virtual void Write(std::ostream &os, bool binary) const;
virtual Clusterable *ReadNew(std::istream &is, bool binary) const;
virtual ~SgmmClusterable() {}
const Vector<double> &gamma() const { return gamma_; }
const Vector<double> &y() const { return y_; }
private:
void ComputeH(); // Compute the quantity my_H_, from gamma_ and H_.
const AmSgmm &sgmm_; // Reference to the SGMM object, needed to compute
// objective functions.
const std::vector<SpMatrix<double> > &
H_; // Reference to a vector of SpMatrix which
// should have been computed from the model using ComputeH(). Needed for
// Objf() function.
Vector<double>
gamma_; // Occupation counts for each Gaussian index. Comparable
// to the gamma_{jmi} statistics in the SGMM paper.
Vector<double>
y_; // Statistics comparable to the y_{jm} statistics in the SGMM
// paper.
SpMatrix<double>
my_H_; // This quantity is a weighted sum over the H quantities,
// weighted by gamma_(i). It's only nonempty if the H_ matrix is nonempty.
// This quantity is never written to disk; it is to be viewed as a kind of
// cache, present only for purposes of fast objective-function computation.
};
/// Comparable to AccumulateTreeStats, but this version
/// accumulates stats of type SgmmClusterable. Returns
/// true on success.
bool AccumulateSgmmTreeStats(
const TransitionModel &trans_model, const AmSgmm &am_sgmm,
const std::vector<SpMatrix<double> > &H, // this is a ref. to temp.
// storage needed in the clusterable class... can be empty
// during accumulation as it doesn't call Objf().
int N, // context window size.
int P, // central position.
const std::vector<int32> &ci_phones, // must be sorted
const std::vector<int32> &alignment,
const std::vector<std::vector<int32> > &gselect,
const SgmmPerSpkDerivedVars &per_spk_vars,
const Matrix<BaseFloat> &features,
std::map<EventType, SgmmClusterable *> *stats);
} // end namespace kaldi
#endif // KALDI_SGMM_SGMM_CLUSTERABLE_H_
<|start_filename|>tonic-suite/asr/src/base/io-funcs-test.cc<|end_filename|>
// base/io-funcs-test.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/io-funcs.h"
#include "base/kaldi-math.h"
namespace kaldi {
void UnitTestIo(bool binary) {
{
const char *filename = "tmpf";
std::ofstream outfile(filename, std::ios_base::out | std::ios_base::binary);
InitKaldiOutputStream(outfile, binary);
if (!binary) outfile << "\t";
int64 i1 = Rand() % 10000;
WriteBasicType(outfile, binary, i1);
uint16 i2 = Rand() % 10000;
WriteBasicType(outfile, binary, i2);
if (!binary) outfile << "\t";
char c = Rand();
WriteBasicType(outfile, binary, c);
if (!binary && Rand() % 2 == 0) outfile << " \n";
std::vector<int32> vec1;
WriteIntegerVector(outfile, binary, vec1);
if (!binary && Rand() % 2 == 0) outfile << " \n";
std::vector<uint16> vec2;
for (size_t i = 0; i < 10; i++) vec2.push_back(Rand() % 100 - 10);
WriteIntegerVector(outfile, binary, vec2);
if (!binary) outfile << " \n";
std::vector<char> vec3;
for (size_t i = 0; i < 10; i++) vec3.push_back(Rand() % 100);
WriteIntegerVector(outfile, binary, vec3);
if (!binary && Rand() % 2 == 0) outfile << " \n";
const char *token1 = "Hi";
WriteToken(outfile, binary, token1);
if (!binary) outfile << " \n";
std::string token2 = "There.";
WriteToken(outfile, binary, token2);
if (!binary && Rand() % 2 == 0) outfile << " \n";
std::string token3 = "You.";
WriteToken(outfile, binary, token3);
if (!binary && Rand() % 2 == 0) outfile << " ";
float f1 = RandUniform();
WriteBasicType(outfile, binary, f1);
if (!binary && Rand() % 2 == 0) outfile << "\t";
float f2 = RandUniform();
WriteBasicType(outfile, binary, f2);
double d1 = RandUniform();
WriteBasicType(outfile, binary, d1);
if (!binary && Rand() % 2 == 0) outfile << "\t";
double d2 = RandUniform();
WriteBasicType(outfile, binary, d2);
if (!binary && Rand() % 2 == 0) outfile << "\t";
outfile.close();
{
std::ifstream infile(filename, std::ios_base::in | std::ios_base::binary);
bool binary_in;
InitKaldiInputStream(infile, &binary_in);
int64 i1_in;
ReadBasicType(infile, binary_in, &i1_in);
KALDI_ASSERT(i1_in == i1);
uint16 i2_in;
ReadBasicType(infile, binary_in, &i2_in);
KALDI_ASSERT(i2_in == i2);
char c_in;
ReadBasicType(infile, binary_in, &c_in);
KALDI_ASSERT(c_in == c);
std::vector<int32> vec1_in;
ReadIntegerVector(infile, binary_in, &vec1_in);
KALDI_ASSERT(vec1_in == vec1);
std::vector<uint16> vec2_in;
ReadIntegerVector(infile, binary_in, &vec2_in);
KALDI_ASSERT(vec2_in == vec2);
std::vector<char> vec3_in;
ReadIntegerVector(infile, binary_in, &vec3_in);
KALDI_ASSERT(vec3_in == vec3);
std::string token1_in, token2_in;
KALDI_ASSERT(Peek(infile, binary_in) == static_cast<int>(*token1));
KALDI_ASSERT(PeekToken(infile, binary_in) == (int)*token1); // Note:
// the stuff with skipping over '<' is tested in ../util/kaldi-io-test.cc,
// since we need to make sure it works with pipes.
ReadToken(infile, binary_in, &token1_in);
KALDI_ASSERT(token1_in == std::string(token1));
ReadToken(infile, binary_in, &token2_in);
KALDI_ASSERT(token2_in == std::string(token2));
if (Rand() % 2 == 0)
ExpectToken(infile, binary_in, token3.c_str());
else
ExpectToken(infile, binary_in, token3);
float f1_in; // same type.
ReadBasicType(infile, binary_in, &f1_in);
AssertEqual(f1_in, f1);
double f2_in; // wrong type.
ReadBasicType(infile, binary_in, &f2_in);
AssertEqual(f2_in, f2);
double d1_in; // same type.
ReadBasicType(infile, binary_in, &d1_in);
AssertEqual(d1_in, d1);
float d2_in; // wrong type.
ReadBasicType(infile, binary_in, &d2_in);
AssertEqual(d2_in, d2);
KALDI_ASSERT(Peek(infile, binary_in) == -1);
KALDI_ASSERT(PeekToken(infile, binary_in) == -1);
}
unlink(filename);
}
}
} // end namespace kaldi.
int main() {
using namespace kaldi;
for (size_t i = 0; i < 10; i++) {
UnitTestIo(false);
UnitTestIo(true);
}
KALDI_ASSERT(
1); // just wanted to check that KALDI_ASSERT does not fail for 1.
return 0;
}
<|start_filename|>tonic-suite/asr/src/transform/basis-fmllr-diag-gmm.cc<|end_filename|>
// transform/basis-fmllr-diag-gmm.cc
// Copyright 2012 Carnegie Mellon University (author: <NAME>)
// 2014 Johns Hopkins University (author: <NAME>)
// 2014 IMSL, PKU-HKUST (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <algorithm>
#include <utility>
#include <vector>
using std::vector;
#include <string>
using std::string;
#include "transform/fmllr-diag-gmm.h"
#include "gmm/am-diag-gmm.h"
#include "gmm/mle-diag-gmm.h"
#include "gmm/mle-am-diag-gmm.h"
#include "transform/basis-fmllr-diag-gmm.h"
namespace kaldi {
/// This function takes the step direction (delta) of fMLLR matrix as argument,
/// and optimize step size using Newton's method. This is an iterative method,
/// where each iteration should not decrease the auxiliary function. Note that
/// the resulting step size \k should be close to 1. If \k <<1 or >>1, there
/// maybe problems with preconditioning or the speaker stats.
static BaseFloat CalBasisFmllrStepSize(
const AffineXformStats &spk_stats, const Matrix<BaseFloat> &spk_stats_tmp_K,
const std::vector<SpMatrix<BaseFloat> > &spk_stats_tmp_G,
const Matrix<BaseFloat> &delta, const Matrix<BaseFloat> &A,
const Matrix<BaseFloat> &S, int32 max_iters);
void BasisFmllrAccus::Write(std::ostream &os, bool binary) const {
WriteToken(os, binary, "<BASISFMLLRACCUS>");
WriteToken(os, binary, "<BETA>");
WriteBasicType(os, binary, beta_);
if (!binary) os << '\n';
if (grad_scatter_.NumCols() != 0) {
WriteToken(os, binary, "<GRADSCATTER>");
grad_scatter_.Write(os, binary);
}
WriteToken(os, binary, "</BASISFMLLRACCUS>");
}
void BasisFmllrAccus::Read(std::istream &is, bool binary, bool add) {
ExpectToken(is, binary, "<BASISFMLLRACCUS>");
ExpectToken(is, binary, "<BETA>");
double tmp_beta = 0;
ReadBasicType(is, binary, &tmp_beta);
if (add) {
beta_ += tmp_beta;
} else {
beta_ = tmp_beta;
}
ExpectToken(is, binary, "<GRADSCATTER>");
grad_scatter_.Read(is, binary, add);
ExpectToken(is, binary, "</BASISFMLLRACCUS>");
}
void BasisFmllrAccus::ResizeAccus(int32 dim) {
if (dim <= 0) {
KALDI_ERR << "Invalid feature dimension " << dim; // dim=0 is not allowed
} else {
// 'kSetZero' may not be necessary, but makes computation safe
grad_scatter_.Resize((dim + 1) * dim, kSetZero);
}
}
void BasisFmllrAccus::AccuGradientScatter(const AffineXformStats &spk_stats) {
// Gradient of auxf w.r.t. xform_spk
// Eq. (33)
Matrix<double> grad_mat(dim_, dim_ + 1);
grad_mat.SetUnit();
grad_mat.Scale(spk_stats.beta_);
grad_mat.AddMat(1.0, spk_stats.K_);
for (int d = 0; d < dim_; ++d) {
Matrix<double> G_d_mat(spk_stats.G_[d]);
grad_mat.Row(d).AddVec(-1.0, G_d_mat.Row(d));
}
// Row stack of gradient matrix
Vector<BaseFloat> grad_vec((dim_ + 1) * dim_);
grad_vec.CopyRowsFromMat(grad_mat);
// The amount of data beta_ is likely to be ZERO, especially
// when silence-weight is set to be 0 and we are using the
// per-utt mode.
if (spk_stats.beta_ > 0) {
beta_ += spk_stats.beta_;
grad_scatter_.AddVec2(BaseFloat(1.0 / spk_stats.beta_), grad_vec);
}
}
void BasisFmllrEstimate::Write(std::ostream &os, bool binary) const {
uint32 tmp_uint32;
WriteToken(os, binary, "<BASISFMLLRPARAM>");
WriteToken(os, binary, "<NUMBASIS>");
tmp_uint32 = static_cast<uint32>(basis_size_);
WriteBasicType(os, binary, tmp_uint32);
if (fmllr_basis_.size() != 0) {
WriteToken(os, binary, "<BASIS>");
for (int32 n = 0; n < basis_size_; ++n) {
fmllr_basis_[n].Write(os, binary);
}
}
WriteToken(os, binary, "</BASISFMLLRPARAM>");
}
void BasisFmllrEstimate::Read(std::istream &is, bool binary) {
uint32 tmp_uint32;
string token;
ExpectToken(is, binary, "<BASISFMLLRPARAM>");
ExpectToken(is, binary, "<NUMBASIS>");
ReadBasicType(is, binary, &tmp_uint32);
basis_size_ = static_cast<int32>(tmp_uint32);
KALDI_ASSERT(basis_size_ > 0);
ExpectToken(is, binary, "<BASIS>");
fmllr_basis_.resize(basis_size_);
for (int32 n = 0; n < basis_size_; ++n) {
fmllr_basis_[n].Read(is, binary);
if (n == 0)
dim_ = fmllr_basis_[n].NumRows();
else {
KALDI_ASSERT(dim_ == fmllr_basis_[n].NumRows());
}
}
ExpectToken(is, binary, "</BASISFMLLRPARAM>");
}
void BasisFmllrEstimate::ComputeAmDiagPrecond(const AmDiagGmm &am_gmm,
SpMatrix<double> *pre_cond) {
KALDI_ASSERT(am_gmm.Dim() == dim_);
if (pre_cond->NumRows() != (dim_ + 1) * dim_)
pre_cond->Resize((dim_ + 1) * dim_, kSetZero);
int32 num_pdf = am_gmm.NumPdfs();
Matrix<double> H_mat((dim_ + 1) * dim_, (dim_ + 1) * dim_);
// expected values of fMLLR G statistics
vector<SpMatrix<double> > G_hat(dim_);
for (int32 d = 0; d < dim_; ++d) G_hat[d].Resize(dim_ + 1, kSetZero);
// extend mean vectors with 1 [mule_jm 1]
Vector<double> extend_mean(dim_ + 1);
// extend covariance matrix with a row and column of 0
Vector<double> extend_var(dim_ + 1);
for (int32 j = 0; j < num_pdf; ++j) {
const DiagGmm &diag_gmm = am_gmm.GetPdf(j);
int32 num_comp = diag_gmm.NumGauss();
// means, covariance and mixture weights for this diagonal GMM
Matrix<double> means(num_comp, dim_);
Matrix<double> vars(num_comp, dim_);
diag_gmm.GetMeans(&means);
diag_gmm.GetVars(&vars);
Vector<BaseFloat> weights(diag_gmm.weights());
for (int32 m = 0; m < num_comp; ++m) {
extend_mean.Range(0, dim_).CopyFromVec(means.Row(m));
extend_mean(dim_) = 1.0;
extend_var.Range(0, dim_).CopyFromVec(vars.Row(m));
extend_var(dim_) = 0;
// loop over feature dimension
// Eq. (28): G_hat {d} = \sum_{j, m} P_{j}{m} Inv_Sigma{j, m, d}
// (mule_extend mule_extend^T + Sigma_extend)
// where P_{j}{m} = P_{j} c_{j}{m}
for (int32 d = 0; d < dim_; ++d) {
double alpha = (1.0 / num_pdf) * weights(m) * (1.0 / vars.Row(m)(d));
G_hat[d].AddVec2(alpha, extend_mean);
// add vector to the diagonal elements of the matrix
// not work for full covariance matrices
G_hat[d].AddDiagVec(alpha, extend_var);
} // loop over dimension
} // loop over Gaussians
} // loop over states
// fill H_ with G_hat[i]; build the block diagonal structure
// Eq. (31)
for (int32 d = 0; d < dim_; d++) {
H_mat.Range(d * (dim_ + 1), (dim_ + 1), d * (dim_ + 1), (dim_ + 1))
.CopyFromSp(G_hat[d]);
}
// add the extra H(1) elements
// Eq. (30) and Footnote 1 (0-based index)
for (int32 i = 0; i < dim_; ++i)
for (int32 j = 0; j < dim_; ++j)
H_mat(i * (dim_ + 1) + j, j * (dim_ + 1) + i) += 1;
// the final H should be symmetric
if (!H_mat.IsSymmetric())
KALDI_ERR << "Preconditioner matrix H = H(1) + H(2) is not symmetric";
pre_cond->CopyFromMat(H_mat, kTakeLower);
}
void BasisFmllrEstimate::EstimateFmllrBasis(
const AmDiagGmm &am_gmm, const BasisFmllrAccus &basis_accus) {
// Compute the preconditioner
SpMatrix<double> precond_mat((dim_ + 1) * dim_);
ComputeAmDiagPrecond(am_gmm, &precond_mat);
// H = C C^T
TpMatrix<double> C((dim_ + 1) * dim_);
C.Cholesky(precond_mat);
TpMatrix<double> C_inv(C);
C_inv.InvertDouble();
// From TpMatrix to Matrix
Matrix<double> C_inv_full((dim_ + 1) * dim_, (dim_ + 1) * dim_);
C_inv_full.CopyFromTp(C_inv);
// Convert to the preconditioned coordinates
// Eq. (35) M_hat = C^{-1} grad_scatter C^{-T}
SpMatrix<double> M_hat((dim_ + 1) * dim_);
{
SpMatrix<double> grad_scatter_d(basis_accus.grad_scatter_);
M_hat.AddMat2Sp(1.0, C_inv_full, kNoTrans, grad_scatter_d, 0.0);
}
Vector<double> Lvec((dim_ + 1) * dim_);
Matrix<double> U((dim_ + 1) * dim_, (dim_ + 1) * dim_);
// SVD of M_hat; sort eigenvalues from greatest to smallest
M_hat.SymPosSemiDefEig(&Lvec, &U);
SortSvd(&Lvec, &U);
// After transpose, each row is one base
U.Transpose();
fmllr_basis_.resize(basis_size_);
for (int32 n = 0; n < basis_size_; ++n) {
fmllr_basis_[n].Resize(dim_, dim_ + 1, kSetZero);
Vector<double> basis_vec((dim_ + 1) * dim_);
// Convert eigenvectors back to unnormalized space
basis_vec.AddMatVec(1.0, C_inv_full, kTrans, U.Row(n), 0.0);
// Convert stacked vectors to matrix
fmllr_basis_[n].CopyRowsFromVec(basis_vec);
}
// Output the eigenvalues of the gradient scatter matrix
// The eigenvalues are divided by twice the number of frames
// in the training data, to get the per-frame values.
Vector<double> Lvec_scaled(Lvec);
Lvec_scaled.Scale(1.0 / (2 * basis_accus.beta_));
KALDI_LOG << "The [per-frame] eigenvalues sorted from largest to smallest: "
<< Lvec_scaled;
/// The sum of the [per-frame] eigenvalues is roughly equal to
/// the improvement of log-likelihood of the training data.
KALDI_LOG << "Sum of the [per-frame] eigenvalues, that is"
" the log-likelihood improvement, is " << Lvec_scaled.Sum();
}
double BasisFmllrEstimate::ComputeTransform(const AffineXformStats &spk_stats,
Matrix<BaseFloat> *out_xform,
Vector<BaseFloat> *coefficient,
BasisFmllrOptions options) const {
if (coefficient == NULL) {
Vector<BaseFloat> tmp;
return ComputeTransform(spk_stats, out_xform, &tmp, options);
}
KALDI_ASSERT(dim_ == spk_stats.dim_);
if (spk_stats.beta_ < options.min_count) {
KALDI_WARN << "Not updating fMLLR since count is below min-count: "
<< spk_stats.beta_;
coefficient->Resize(0);
return 0.0;
} else {
if (out_xform->NumRows() != dim_ || out_xform->NumCols() != (dim_ + 1)) {
out_xform->Resize(dim_, dim_ + 1, kSetZero);
}
// Initialized either as [I;0] or as the current transform
Matrix<BaseFloat> W_mat(dim_, dim_ + 1);
if (out_xform->IsZero()) {
W_mat.SetUnit();
} else {
W_mat.CopyFromMat(*out_xform);
}
// Create temporary K and G quantities. Add for efficiency,
// avoid repetitions of converting the stats from double
// precision to single precision
Matrix<BaseFloat> stats_tmp_K(spk_stats.K_);
std::vector<SpMatrix<BaseFloat> > stats_tmp_G(dim_);
for (int32 d = 0; d < dim_; d++)
stats_tmp_G[d] = SpMatrix<BaseFloat>(spk_stats.G_[d]);
// Number of bases for this speaker, according to the available
// adaptation data
int32 basis_size = int32(
std::min(double(basis_size_), options.size_scale * spk_stats.beta_));
coefficient->Resize(basis_size, kSetZero);
BaseFloat impr_spk = 0;
for (int32 iter = 1; iter <= options.num_iters; ++iter) {
// Auxf computation based on FmllrAuxFuncDiagGmm from fmllr-diag-gmm.cc
BaseFloat start_obj = FmllrAuxFuncDiagGmm(W_mat, spk_stats);
// Contribution of quadratic terms to derivative
// Eq. (37) s_{d} = G_{d} w_{d}
Matrix<BaseFloat> S(dim_, dim_ + 1);
for (int32 d = 0; d < dim_; ++d)
S.Row(d).AddSpVec(1.0, stats_tmp_G[d], W_mat.Row(d), 0.0);
// W_mat = [A; b]
Matrix<BaseFloat> A(dim_, dim_);
A.CopyFromMat(W_mat.Range(0, dim_, 0, dim_));
Matrix<BaseFloat> A_inv(A);
A_inv.InvertDouble();
Matrix<BaseFloat> A_inv_trans(A_inv);
A_inv_trans.Transpose();
// Compute gradient of auxf w.r.t. W_mat
// Eq. (38) P = beta [A^{-T}; 0] + K - S
Matrix<BaseFloat> P(dim_, dim_ + 1);
P.SetZero();
P.Range(0, dim_, 0, dim_).CopyFromMat(A_inv_trans);
P.Scale(spk_stats.beta_);
P.AddMat(1.0, stats_tmp_K);
P.AddMat(-1.0, S);
// Compute directional gradient restricted by bases. Here we only use
// the simple gradient method, rather than conjugate gradient. Finding
// the optimal transformation W_mat is equivalent to optimizing weights
// d_{1,2,...,N}.
// Eq. (39) delta(W) = \sum_n tr(\fmllr_basis_{n}^T \P) \fmllr_basis_{n}
// delta(d_{n}) = tr(\fmllr_basis_{n}^T \P)
Matrix<BaseFloat> delta_W(dim_, dim_ + 1);
Vector<BaseFloat> delta_d(basis_size);
for (int32 n = 0; n < basis_size; ++n) {
delta_d(n) = TraceMatMat(fmllr_basis_[n], P, kTrans);
delta_W.AddMat(delta_d(n), fmllr_basis_[n]);
}
BaseFloat step_size =
CalBasisFmllrStepSize(spk_stats, stats_tmp_K, stats_tmp_G, delta_W, A,
S, options.step_size_iters);
W_mat.AddMat(step_size, delta_W, kNoTrans);
coefficient->AddVec(step_size, delta_d);
// Check auxiliary function
BaseFloat end_obj = FmllrAuxFuncDiagGmm(W_mat, spk_stats);
KALDI_VLOG(4) << "Objective function (iter=" << iter
<< "): " << start_obj / spk_stats.beta_ << " -> "
<< (end_obj / spk_stats.beta_) << " over "
<< spk_stats.beta_ << " frames";
impr_spk += (end_obj - start_obj);
} // loop over iters
out_xform->CopyFromMat(W_mat, kNoTrans);
return impr_spk;
}
}
// static
BaseFloat CalBasisFmllrStepSize(
const AffineXformStats &spk_stats, const Matrix<BaseFloat> &spk_stats_tmp_K,
const std::vector<SpMatrix<BaseFloat> > &spk_stats_tmp_G,
const Matrix<BaseFloat> &delta, const Matrix<BaseFloat> &A,
const Matrix<BaseFloat> &S, int32 max_iters) {
int32 dim = spk_stats.dim_;
KALDI_ASSERT(dim == delta.NumRows() && dim == S.NumRows());
// The first D columns of delta_W
SubMatrix<BaseFloat> delta_Dim(delta, 0, dim, 0, dim);
// Eq. (46): b = tr(delta K^T) - tr(delta S^T)
BaseFloat b = TraceMatMat(delta, spk_stats_tmp_K, kTrans) -
TraceMatMat(delta, S, kTrans);
// Eq. (47): c = sum_d tr(delta_{d} G_{d} delta_{d})
BaseFloat c = 0;
Vector<BaseFloat> G_row_delta(dim + 1);
for (int32 d = 0; d < dim; ++d) {
G_row_delta.AddSpVec(1.0, spk_stats_tmp_G[d], delta.Row(d), 0.0);
c += VecVec(G_row_delta, delta.Row(d));
}
// Sometimes, the change of step size, d1/d2, may get tiny
// Due to numerical precision, we compute everything in double
BaseFloat step_size = 0.0;
BaseFloat obj_old, obj_new = 0.0;
Matrix<BaseFloat> N(dim, dim);
for (int32 iter_step = 1; iter_step <= max_iters; ++iter_step) {
if (iter_step == 1) {
// k = 0, auxf = beta logdet(A)
obj_old = spk_stats.beta_ * A.LogDet();
} else {
obj_old = obj_new;
}
// Eq. (49): N = (A + k * delta_Dim)^{-1} delta_Dim
// In case of bad condition, careful preconditioning should be done. Maybe
// safer
// to use SolveQuadraticMatrixProblem. Future work for Yajie.
Matrix<BaseFloat> tmp_A(A);
tmp_A.AddMat(step_size, delta_Dim, kNoTrans);
tmp_A.InvertDouble();
N.AddMatMat(1.0, tmp_A, kNoTrans, delta_Dim, kNoTrans, 0.0);
// first-order derivative w.r.t. k
// Eq. (50): d1 = beta * trace(N) + b - k * c
BaseFloat d1 = spk_stats.beta_ * TraceMat(N) + b - step_size * c;
// second-order derivative w.r.t. k
// Eq. (51): d2 = -beta * tr(N N) - c
BaseFloat d2 = -c - spk_stats.beta_ * TraceMatMat(N, N, kNoTrans);
d2 = std::min((double)d2, -c / 10.0);
// convergence judgment from fmllr-sgmm.cc
// it seems to work well, though not sure whether 1e-06 is appropriate
// note from Dan: commenting this out after someone complained it was
// causing a test to behave weirdly. This doesn't dominate computation
// anyway, I don't think.
// if (std::fabs(d1 / d2) < 0.000001) { break; }
// Eq. (52): update step_size
BaseFloat step_size_change = -(d1 / d2);
step_size += step_size_change;
// Repeatedly check auxiliary function; halve step size change if auxf
// decreases.
// According to the paper, we should limit the number of repetitions. The
// following implementation seems to work well. But the termination
// condition/judgment
// should be optimized later.
do {
// Eq. (48): auxf = beta * logdet(A + k * delta_Dim) + kb - 0.5 * k * k *
// c
tmp_A.CopyFromMat(A);
tmp_A.AddMat(step_size, delta_Dim, kNoTrans);
obj_new = spk_stats.beta_ * tmp_A.LogDet() + step_size * b -
0.5 * step_size * step_size * c;
if (obj_new - obj_old <
-1.0e-04 * spk_stats.beta_) { // deal with numerical issues
KALDI_WARN << "Objective function decreased (" << obj_old << "->"
<< obj_new << "). Halving step size change ( step size "
<< step_size << " -> "
<< (step_size - (step_size_change / 2)) << ")";
step_size_change /= 2;
step_size -= step_size_change;
}
} while (obj_new - obj_old < -1.0e-04 * spk_stats.beta_ &&
step_size_change > 1e-05);
}
return step_size;
}
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/nnet/nnet-trnopts.h<|end_filename|>
// nnet/nnet-trnopts.h
// Copyright 2013 Brno University of Technology (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_NNET_NNET_TRNOPTS_H_
#define KALDI_NNET_NNET_TRNOPTS_H_
#include "base/kaldi-common.h"
#include "itf/options-itf.h"
namespace kaldi {
namespace nnet1 {
struct NnetTrainOptions {
// option declaration
BaseFloat learn_rate;
BaseFloat momentum;
BaseFloat l2_penalty;
BaseFloat l1_penalty;
// default values
NnetTrainOptions()
: learn_rate(0.008), momentum(0.0), l2_penalty(0.0), l1_penalty(0.0) {}
// register options
void Register(OptionsItf* po) {
po->Register("learn-rate", &learn_rate, "Learning rate");
po->Register("momentum", &momentum, "Momentum");
po->Register("l2-penalty", &l2_penalty, "L2 penalty (weight decay)");
po->Register("l1-penalty", &l1_penalty, "L1 penalty (promote sparsity)");
}
// print for debug purposes
friend std::ostream& operator<<(std::ostream& os,
const NnetTrainOptions& opts) {
os << "RbmTrainOptions : "
<< "learn_rate" << opts.learn_rate << ", "
<< "momentum" << opts.momentum << ", "
<< "l2_penalty" << opts.l2_penalty << ", "
<< "l1_penalty" << opts.l1_penalty;
return os;
}
};
struct RbmTrainOptions {
// option declaration
BaseFloat learn_rate;
BaseFloat momentum;
BaseFloat momentum_max;
int32 momentum_steps;
int32 momentum_step_period;
BaseFloat l2_penalty;
// default values
RbmTrainOptions()
: learn_rate(0.4),
momentum(0.5),
momentum_max(0.9),
momentum_steps(40),
momentum_step_period(500000),
// 500000 * 40 = 55h of linear increase of momentum
l2_penalty(0.0002) {}
// register options
void Register(OptionsItf* po) {
po->Register("learn-rate", &learn_rate, "Learning rate");
po->Register("momentum", &momentum,
"Initial momentum for linear scheduling");
po->Register("momentum-max", &momentum_max,
"Final momentum for linear scheduling");
po->Register("momentum-steps", &momentum_steps,
"Number of steps of linear momentum scheduling");
po->Register("momentum-step-period", &momentum_step_period,
"Number of datapoints per single momentum increase step");
po->Register("l2-penalty", &l2_penalty,
"L2 penalty (weight decay, increases mixing-rate)");
}
// print for debug purposes
friend std::ostream& operator<<(std::ostream& os,
const RbmTrainOptions& opts) {
os << "RbmTrainOptions : "
<< "learn_rate" << opts.learn_rate << ", "
<< "momentum" << opts.momentum << ", "
<< "momentum_max" << opts.momentum_max << ", "
<< "momentum_steps" << opts.momentum_steps << ", "
<< "momentum_step_period" << opts.momentum_step_period << ", "
<< "l2_penalty" << opts.l2_penalty;
return os;
}
};
} // namespace nnet1
} // namespace kaldi
#endif
<|start_filename|>tonic-suite/asr/src/sgmm/am-sgmm.h<|end_filename|>
// sgmm/am-sgmm.h
// Copyright 2009-2011 Microsoft Corporation; <NAME>;
// Saarland University (Author: <NAME>);
// <NAME>; <NAME>;
// Copyright 2012-2013 Johns Hopkins University (author: <NAME>)
// <NAME>; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_SGMM_AM_SGMM_H_
#define KALDI_SGMM_AM_SGMM_H_
#include <vector>
#include "base/kaldi-common.h"
#include "matrix/matrix-lib.h"
#include "gmm/model-common.h"
#include "gmm/diag-gmm.h"
#include "gmm/full-gmm.h"
#include "itf/options-itf.h"
#include "util/table-types.h"
namespace kaldi {
struct SgmmGselectConfig {
/// Number of highest-scoring full-covariance Gaussians per frame.
int32 full_gmm_nbest;
/// Number of highest-scoring diagonal-covariance Gaussians per frame.
int32 diag_gmm_nbest;
SgmmGselectConfig() {
full_gmm_nbest = 15;
diag_gmm_nbest = 50;
}
void Register(OptionsItf *po) {
po->Register("full-gmm-nbest", &full_gmm_nbest,
"Number of highest-scoring"
" full-covariance Gaussians selected per frame.");
po->Register("diag-gmm-nbest", &diag_gmm_nbest,
"Number of highest-scoring"
" diagonal-covariance Gaussians selected per frame.");
}
};
/** \struct SgmmPerFrameDerivedVars
* Holds the per-frame precomputed quantities x(t), x_{i}(t), z_{i}(t), and
* n_{i}(t) (cf. Eq. (33)-(36)) for the SGMM, as well as the cached Gaussian
* selection records.
*/
struct SgmmPerFrameDerivedVars {
std::vector<int32> gselect;
Vector<BaseFloat> xt; ///< x'(t), FMLLR-adapted, dim = [D], eq.(33)
Matrix<BaseFloat> xti; ///< x_{i}(t) = x'(t) - o_i(s): dim = [I][D], eq.(34)
Matrix<BaseFloat> zti; ///< z_{i}(t), dim = [I][S], eq.(35)
Vector<BaseFloat> nti; ///< n_{i}(t), dim = [I], eq.(36)
SgmmPerFrameDerivedVars() : xt(0), xti(0, 0), zti(0, 0), nti(0) {}
void Resize(int32 ngauss, int32 feat_dim, int32 phn_dim) {
xt.Resize(feat_dim);
xti.Resize(ngauss, feat_dim);
zti.Resize(ngauss, phn_dim);
nti.Resize(ngauss);
}
bool IsEmpty() const {
return (xt.Dim() == 0 || xti.NumRows() == 0 || zti.NumRows() == 0 ||
nti.Dim() == 0);
}
bool NeedsResizing(int32 ngauss, int32 feat_dim, int32 phn_dim) const {
/* if (xt.Dim() != feat_dim)
KALDI_LOG << "xt dim = " << xt.Dim() << ", feat dim = " << feat_dim;
if (xti.NumRows() != ngauss || xti.NumCols() != feat_dim)
KALDI_LOG << "xti size = " << xti.NumRows() << ", " << xti.NumCols()
<< "; ngauss = " << ngauss << ", feat dim = " << feat_dim;
if (zti.NumRows() != ngauss || zti.NumCols() != phn_dim)
KALDI_LOG << "zti size = " << zti.NumRows() << ", " << zti.NumCols()
<< "; ngauss = " << ngauss << "; phn dim = " << phn_dim;
if (nti.Dim() != ngauss)
KALDI_LOG << "nti dim = " << nti.Dim() << ", ngauss = " << ngauss;
*/
return (xt.Dim() != feat_dim || xti.NumRows() != ngauss ||
xti.NumCols() != feat_dim || zti.NumRows() != ngauss ||
zti.NumCols() != phn_dim || nti.Dim() != ngauss);
}
};
struct SgmmPerSpkDerivedVars {
// To set this up, call ComputePerSpkDerivedVars from the sgmm object.
void Clear() {
v_s.Resize(0);
o_s.Resize(0, 0);
}
Vector<BaseFloat> v_s; ///< Speaker adaptation vector v_^{(s)}. Dim is [T]
Matrix<BaseFloat> o_s; ///< Per-speaker offsets o_{i}. Dimension is [I][D]
};
/** \class AmSgmm
* Class for definition of the subspace Gmm acoustic model
*/
class AmSgmm {
public:
AmSgmm() {}
void Read(std::istream &rIn, bool binary);
void Write(std::ostream &out, bool binary,
SgmmWriteFlagsType write_params) const;
/// Checks the various components for correct sizes. With wrong sizes,
/// assertion failure occurs. When the argument is set to true, dimensions of
/// the various components are printed.
void Check(bool show_properties = true);
/// Initializes the SGMM parameters from a full-covariance UBM.
void InitializeFromFullGmm(const FullGmm &gmm, int32 num_states,
int32 phn_subspace_dim, int32 spk_subspace_dim);
/// Used to copy models (useful in update)
void CopyFromSgmm(const AmSgmm &other, bool copy_normalizers);
/// Copies the global parameters from the supplied model, but sets
/// the state vectors to zero. Supports reducing the phonetic
/// and speaker subspace dimensions.
void CopyGlobalsInitVecs(const AmSgmm &other, int32 phn_subspace_dim,
int32 spk_subspace_dim, int32 num_pdfs);
/// Computes the top-scoring Gaussian indices (used for pruning of later
/// stages of computation). Returns frame log-likelihood given selected
/// Gaussians from full UBM.
BaseFloat GaussianSelection(const SgmmGselectConfig &config,
const VectorBase<BaseFloat> &data,
std::vector<int32> *gselect) const;
/// As GaussianSelection, but limiting it to a provided list of
/// preselected Gaussians (e.g. for gender dependency).
/// The list "preselect" must be sorted and uniq.
BaseFloat GaussianSelectionPreselect(const SgmmGselectConfig &config,
const VectorBase<BaseFloat> &data,
const std::vector<int32> &preselect,
std::vector<int32> *gselect) const;
/// This needs to be called with each new frame of data, prior to accumulation
/// or likelihood evaluation: it computes various pre-computed quantities. The
/// 'logdet_s' term is the log determinant of the FMLLR transform, or 0.0 if
/// no FMLLR is used or it's single-class fMLLR applied in the feature
/// extraction, and we're not keeping track of it here.
void ComputePerFrameVars(const VectorBase<BaseFloat> &data,
const std::vector<int32> &gselect,
const SgmmPerSpkDerivedVars &spk_vars,
BaseFloat logdet_s,
SgmmPerFrameDerivedVars *per_frame_vars) const;
/// Computes the per-speaker derived vars; assumes vars->v_s is already
/// set up.
void ComputePerSpkDerivedVars(SgmmPerSpkDerivedVars *vars) const;
/// This does a likelihood computation for a given state using the
/// top-scoring Gaussian components (in per_frame_vars). If the
/// log_prune parameter is nonzero (e.g. 5.0), the LogSumExp() stage is
/// pruned, which is a significant speedup... smaller values are faster.
BaseFloat LogLikelihood(const SgmmPerFrameDerivedVars &per_frame_vars,
int32 state_index, BaseFloat log_prune = 0.0) const;
/// Similar to LogLikelihood() function above, but also computes the posterior
/// probabilities for the top-scoring Gaussian components and all substates.
BaseFloat ComponentPosteriors(const SgmmPerFrameDerivedVars &per_frame_vars,
int32 state, Matrix<BaseFloat> *post) const;
/// Increases the total number of substates based on the state occupancies.
void SplitSubstates(const Vector<BaseFloat> &state_occupancies,
int32 target_nsubstates, BaseFloat perturb,
BaseFloat power, BaseFloat cond);
/// Functions for increasing the phonetic and speaker space dimensions.
/// The argument norm_xform is a LDA-like feature normalizing transform,
/// computed by the ComputeFeatureNormalizer function.
void IncreasePhoneSpaceDim(int32 target_dim,
const Matrix<BaseFloat> &norm_xform);
void IncreaseSpkSpaceDim(int32 target_dim,
const Matrix<BaseFloat> &norm_xform);
/// Computes (and initializes if necessary) derived vars...
/// for now this is just the normalizers "n" and the diagonal UBM.
void ComputeDerivedVars();
/// Computes the data-independent terms in the log-likelihood computation
/// for each Gaussian component and all substates. Eq. (31)
void ComputeNormalizers();
/// Computes the normalizers, while normalizing the weights to one
/// among each of the sets in "normalize_sets": these sets should
/// be disjoint and their union should be all the indices 0 ... I-1.
void ComputeNormalizersNormalized(
const std::vector<std::vector<int32> > &normalize_sets);
/// Computes the LDA-like pre-transform and its inverse as well as the
/// eigenvalues of the scatter of the means used in FMLLR estimation.
void ComputeFmllrPreXform(const Vector<BaseFloat> &state_occs,
Matrix<BaseFloat> *xform,
Matrix<BaseFloat> *inv_xform,
Vector<BaseFloat> *diag_mean_scatter) const;
/// Various model dimensions.
int32 NumPdfs() const { return c_.size(); }
int32 NumSubstates(int32 j) const { return c_[j].Dim(); }
int32 NumGauss() const { return M_.size(); }
int32 PhoneSpaceDim() const { return w_.NumCols(); }
int32 SpkSpaceDim() const { return (N_.size() > 0) ? N_[0].NumCols() : 0; }
int32 FeatureDim() const { return M_[0].NumRows(); }
void RemoveSpeakerSpace() { N_.clear(); }
/// Accessors
const FullGmm &full_ubm() const { return full_ubm_; }
const DiagGmm &diag_ubm() const { return diag_ubm_; }
const Matrix<BaseFloat> &StateVectors(int32 state_index) const {
return v_[state_index];
}
const SpMatrix<BaseFloat> &GetInvCovars(int32 gauss_index) const {
return SigmaInv_[gauss_index];
}
const Matrix<BaseFloat> &GetPhoneProjection(int32 gauss_index) const {
return M_[gauss_index];
}
/// Templated accessors (used to accumulate in different precision)
template <typename Real>
void GetInvCovars(int32 gauss_index, SpMatrix<Real> *out) const;
template <typename Real>
void GetSubstateMean(int32 j, int32 m, int32 i,
VectorBase<Real> *mean_out) const;
template <typename Real>
void GetSubstateSpeakerMean(int32 state, int32 substate, int32 gauss,
const SgmmPerSpkDerivedVars &spk,
VectorBase<Real> *mean_out) const;
template <typename Real>
void GetVarScaledSubstateSpeakerMean(int32 state, int32 substate, int32 gauss,
const SgmmPerSpkDerivedVars &spk,
VectorBase<Real> *mean_out) const;
template <typename Real>
void GetNtransSigmaInv(std::vector<Matrix<Real> > *out) const;
/// Computes quantities H = M_i Sigma_i^{-1} M_i^T.
template <class Real>
void ComputeH(std::vector<SpMatrix<Real> > *H_i) const;
protected:
friend class ComputeNormalizersClass;
private:
/// Compute a subset of normalizers; used in multi-threaded implementation.
void ComputeNormalizersInternal(int32 num_threads, int32 thread,
int32 *entropy_count, double *entropy_sum);
/// Initializes the matrices M_ and w_
void InitializeMw(int32 phn_subspace_dim,
const Matrix<BaseFloat> &norm_xform);
/// Initializes the matrices N_
void InitializeN(int32 spk_subspace_dim, const Matrix<BaseFloat> &norm_xform);
void InitializeVecs(int32 num_states); ///< Initializes the state-vectors.
void InitializeCovars(); ///< initializes the within-class covariances.
void ComputeSmoothingTermsFromModel(
const std::vector<SpMatrix<BaseFloat> > &H,
const Vector<BaseFloat> &state_occupancies, SpMatrix<BaseFloat> *H_sm,
BaseFloat max_cond) const;
private:
/// These contain the "background" model associated with the subspace GMM.
DiagGmm diag_ubm_;
FullGmm full_ubm_;
/// Globally shared parameters of the subspace GMM.
/// The various quantities are: I = number of Gaussians, D = data dimension,
/// S = phonetic subspace dimension, T = speaker subspace dimension,
/// J = number of states, M_{j} = number of substates of state j.
/// Inverse within-class (full) covariances; dim is [I][D][D].
std::vector<SpMatrix<BaseFloat> > SigmaInv_;
/// Phonetic-subspace projections. Dimension is [I][D][S]
std::vector<Matrix<BaseFloat> > M_;
/// Speaker-subspace projections. Dimension is [I][D][T]
std::vector<Matrix<BaseFloat> > N_;
/// Weight projection vectors. Dimension is [I][S]
Matrix<BaseFloat> w_;
/// The parameters in a particular SGMM state.
/// v_{jm}, per-state phonetic-subspace vectors. Dimension is [J][M_{j}][S].
std::vector<Matrix<BaseFloat> > v_;
/// c_{jm}, mixture weights. Dimension is [J][M_{j}]
std::vector<Vector<BaseFloat> > c_;
/// n_{jim}, per-Gaussian normalizer. Dimension is [J][I][M_{j}]
std::vector<Matrix<BaseFloat> > n_;
// Priors for MAP adaptation of M -- keeping them here for now but they may
// be moved somewhere else eventually
// These are parameters of a matrix-variate normal distribution. The means are
// the unadapted M_i, and we have 2 separate covaraince matrices for the rows
// and columns of M.
std::vector<Matrix<BaseFloat> > M_prior_; // Matrix-variate Gaussian mean
SpMatrix<BaseFloat> row_cov_inv_;
SpMatrix<BaseFloat> col_cov_inv_;
KALDI_DISALLOW_COPY_AND_ASSIGN(AmSgmm);
friend class EbwAmSgmmUpdater;
friend class MleAmSgmmUpdater;
friend class MleSgmmSpeakerAccs;
friend class AmSgmmFunctions; // misc functions that need access.
friend class MleAmSgmmUpdaterMulti;
};
template <typename Real>
inline void AmSgmm::GetInvCovars(int32 gauss_index, SpMatrix<Real> *out) const {
out->Resize(SigmaInv_[gauss_index].NumRows(), kUndefined);
out->CopyFromSp(SigmaInv_[gauss_index]);
}
template <typename Real>
inline void AmSgmm::GetSubstateMean(int32 j, int32 m, int32 i,
VectorBase<Real> *mean_out) const {
KALDI_ASSERT(mean_out != NULL);
KALDI_ASSERT(j < NumPdfs() && m < NumSubstates(j) && i < NumGauss());
KALDI_ASSERT(mean_out->Dim() == FeatureDim());
Vector<BaseFloat> mean_tmp(FeatureDim());
mean_tmp.AddMatVec(1.0, M_[i], kNoTrans, v_[j].Row(m), 0.0);
mean_out->CopyFromVec(mean_tmp);
}
template <typename Real>
inline void AmSgmm::GetSubstateSpeakerMean(int32 j, int32 m, int32 i,
const SgmmPerSpkDerivedVars &spk,
VectorBase<Real> *mean_out) const {
GetSubstateMean(j, m, i, mean_out);
if (spk.v_s.Dim() != 0) // have speaker adaptation...
mean_out->AddVec(1.0, spk.o_s.Row(i));
}
template <typename Real>
void AmSgmm::GetVarScaledSubstateSpeakerMean(int32 j, int32 m, int32 i,
const SgmmPerSpkDerivedVars &spk,
VectorBase<Real> *mean_out) const {
Vector<BaseFloat> tmp_mean(mean_out->Dim()), tmp_mean2(mean_out->Dim());
GetSubstateSpeakerMean(j, m, i, spk, &tmp_mean);
tmp_mean2.AddSpVec(1.0, SigmaInv_[i], tmp_mean, 0.0);
mean_out->CopyFromVec(tmp_mean2);
}
/// Computes the inverse of an LDA transform (without dimensionality reduction)
/// The computed transform is used in initializing the phonetic and speaker
/// subspaces, as well as while increasing the dimensions of those spaces.
void ComputeFeatureNormalizer(const FullGmm &gmm, Matrix<BaseFloat> *xform);
/// This is the entry for a single time.
struct SgmmGauPostElement {
// Need gselect info here, since "posteriors" is relative to this set of
// selected Gaussians.
std::vector<int32> gselect;
std::vector<int32> tids; // transition-ids for each entry in "posteriors"
std::vector<Matrix<BaseFloat> > posteriors;
};
/// indexed by time.
class SgmmGauPost : public std::vector<SgmmGauPostElement> {
public:
// Add the standard Kaldi Read and Write routines so
// we can use KaldiObjectHolder with this type.
explicit SgmmGauPost(size_t i) : std::vector<SgmmGauPostElement>(i) {}
SgmmGauPost() {}
void Write(std::ostream &os, bool binary) const;
void Read(std::istream &is, bool binary);
};
typedef KaldiObjectHolder<SgmmGauPost> SgmmGauPostHolder;
typedef RandomAccessTableReader<SgmmGauPostHolder>
RandomAccessSgmmGauPostReader;
typedef SequentialTableReader<SgmmGauPostHolder> SequentialSgmmGauPostReader;
typedef TableWriter<SgmmGauPostHolder> SgmmGauPostWriter;
/// Class for misc functions that need access to SGMM private variables.
class AmSgmmFunctions {
public:
/// Computes matrix of approximated K-L divergences,
/// of size [#states x #states], as described in
/// "State-Level Data Borrowing for Low-Resource Speech Recognition based on
/// Subspace GMMs", by <NAME> et. al, Interspeech 2011.
/// Model must have one substate per state.
static void ComputeDistances(const AmSgmm &model,
const Vector<BaseFloat> &state_occs,
MatrixBase<BaseFloat> *dists);
};
} // namespace kaldi
#endif // KALDI_SGMM_AM_SGMM_H_
<|start_filename|>tonic-suite/asr/src/fstext/fst-test-utils.h<|end_filename|>
// fstext/fst-test-utils.h
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_FSTEXT_FST_TEST_UTILS_H_
#define KALDI_FSTEXT_FST_TEST_UTILS_H_
#include <sstream>
#include <string>
// Just some #includes.
#include "fst/script/print-impl.h"
#include "fstext/rand-fst.h"
#endif
<|start_filename|>tonic-suite/asr/src/sgmm/fmllr-sgmm-test.cc<|end_filename|>
// sgmm/fmllr-sgmm-test.cc
// Copyright 2009-2011 Saarland University
// Author: <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <vector>
#include "gmm/model-test-common.h"
#include "sgmm/am-sgmm.h"
#include "sgmm/fmllr-sgmm.h"
#include "util/kaldi-io.h"
using kaldi::AmSgmm;
using kaldi::int32;
using kaldi::BaseFloat;
using kaldi::Vector;
using kaldi::Matrix;
namespace ut = kaldi::unittest;
void ApplyFmllrXform(const kaldi::VectorBase<BaseFloat> &in,
const Matrix<BaseFloat> &xf, Vector<BaseFloat> *out) {
int32 dim = in.Dim();
KALDI_ASSERT(xf.NumRows() == dim && xf.NumCols() == dim + 1);
Vector<BaseFloat> tmp(dim + 1);
tmp.Range(0, dim).CopyFromVec(in);
tmp(dim) = 1.0;
out->Resize(dim, kaldi::kSetZero);
out->AddMatVec(1.0, xf, kaldi::kNoTrans, tmp, 0.0);
}
// Tests the Read() and Write() methods for the accumulators, in both binary
// and ASCII mode, as well as Check().
void TestSgmmFmllrAccsIO(const AmSgmm &sgmm,
const kaldi::Matrix<BaseFloat> &feats) {
KALDI_LOG << "Test IO start.";
using namespace kaldi;
int32 dim = sgmm.FeatureDim();
kaldi::SgmmPerFrameDerivedVars frame_vars;
kaldi::SgmmPerSpkDerivedVars empty;
kaldi::SgmmFmllrGlobalParams fmllr_globals;
kaldi::SgmmGselectConfig sgmm_config;
frame_vars.Resize(sgmm.NumGauss(), dim, sgmm.PhoneSpaceDim());
sgmm_config.full_gmm_nbest =
std::min(sgmm_config.full_gmm_nbest, sgmm.NumGauss());
kaldi::Vector<BaseFloat> occs(sgmm.NumPdfs());
occs.Set(feats.NumRows());
sgmm.ComputeFmllrPreXform(occs, &fmllr_globals.pre_xform_,
&fmllr_globals.inv_xform_,
&fmllr_globals.mean_scatter_);
if (fmllr_globals.mean_scatter_.Min() == 0.0) {
KALDI_WARN << "Global covariances low rank!";
KALDI_WARN << "Diag-scatter = " << fmllr_globals.mean_scatter_;
return;
}
// std::cout << "Pre-Xform = " << fmllr_globals.pre_xform_;
// std::cout << "Inv-Xform = " << fmllr_globals.inv_xform_;
FmllrSgmmAccs accs;
accs.Init(sgmm.FeatureDim(), sgmm.NumGauss());
BaseFloat loglike = 0.0;
Vector<BaseFloat> empty_spk;
std::vector<int32> gselect;
for (int32 i = 0; i < feats.NumRows(); i++) {
sgmm.GaussianSelection(sgmm_config, feats.Row(i), &gselect);
sgmm.ComputePerFrameVars(feats.Row(i), gselect, empty, 0.0, &frame_vars);
loglike += accs.Accumulate(sgmm, empty, feats.Row(i), frame_vars, 0, 1.0);
}
kaldi::SgmmFmllrConfig update_opts;
// update_opts.fmllr_min_count = 100;
kaldi::Matrix<BaseFloat> xform_mat(dim, dim + 1);
xform_mat.SetUnit();
BaseFloat frames, impr;
accs.Update(sgmm, fmllr_globals, update_opts, &xform_mat, &frames, &impr);
Vector<BaseFloat> xformed_feat(dim);
ApplyFmllrXform(feats.Row(0), xform_mat, &xformed_feat);
sgmm.GaussianSelection(sgmm_config, xformed_feat, &gselect);
sgmm.ComputePerFrameVars(xformed_feat, gselect, empty, 0.0, &frame_vars);
BaseFloat loglike1 = sgmm.LogLikelihood(frame_vars, 0);
bool binary_in;
// First, non-binary write
KALDI_LOG << "Test ASCII IO.";
accs.Write(kaldi::Output("tmpf", false).Stream(), false);
FmllrSgmmAccs *accs1 = new FmllrSgmmAccs();
// Non-binary read
kaldi::Input ki1("tmpf", &binary_in);
accs1->Read(ki1.Stream(), binary_in, false);
xform_mat.SetUnit();
accs1->Update(sgmm, fmllr_globals, update_opts, &xform_mat, NULL, NULL);
ApplyFmllrXform(feats.Row(0), xform_mat, &xformed_feat);
sgmm.GaussianSelection(sgmm_config, xformed_feat, &gselect);
sgmm.ComputePerFrameVars(xformed_feat, gselect, empty, 0.0, &frame_vars);
BaseFloat loglike2 = sgmm.LogLikelihood(frame_vars, 0);
std::cout << "LL1 = " << loglike1 << ", LL2 = " << loglike2 << std::endl;
kaldi::AssertEqual(loglike1, loglike2, 1e-2);
delete accs1;
// Next, binary write
KALDI_LOG << "Test Binary IO.";
accs.Write(kaldi::Output("tmpfb", true).Stream(), true);
FmllrSgmmAccs *accs2 = new FmllrSgmmAccs();
// Binary read
kaldi::Input ki2("tmpfb", &binary_in);
accs2->Read(ki2.Stream(), binary_in, false);
xform_mat.SetUnit();
accs2->Update(sgmm, fmllr_globals, update_opts, &xform_mat, NULL, NULL);
ApplyFmllrXform(feats.Row(0), xform_mat, &xformed_feat);
sgmm.GaussianSelection(sgmm_config, xformed_feat, &gselect);
sgmm.ComputePerFrameVars(xformed_feat, gselect, empty, 0.0, &frame_vars);
BaseFloat loglike3 = sgmm.LogLikelihood(frame_vars, 0);
std::cout << "LL1 = " << loglike1 << ", LL3 = " << loglike3 << std::endl;
kaldi::AssertEqual(loglike1, loglike3, 1e-4);
delete accs2;
KALDI_LOG << "Test IO end.";
unlink("tmpf");
unlink("tmpfb");
}
void TestSgmmFmllrSubspace(const AmSgmm &sgmm,
const kaldi::Matrix<BaseFloat> &feats) {
KALDI_LOG << "Test Subspace start.";
using namespace kaldi;
int32 dim = sgmm.FeatureDim();
kaldi::SgmmPerFrameDerivedVars frame_vars;
kaldi::SgmmPerSpkDerivedVars empty;
kaldi::SgmmFmllrGlobalParams fmllr_globals;
kaldi::SgmmGselectConfig sgmm_config;
frame_vars.Resize(sgmm.NumGauss(), dim, sgmm.PhoneSpaceDim());
sgmm_config.full_gmm_nbest =
std::min(sgmm_config.full_gmm_nbest, sgmm.NumGauss());
kaldi::Vector<BaseFloat> occs(sgmm.NumPdfs());
occs.Set(feats.NumRows());
sgmm.ComputeFmllrPreXform(occs, &fmllr_globals.pre_xform_,
&fmllr_globals.inv_xform_,
&fmllr_globals.mean_scatter_);
if (fmllr_globals.mean_scatter_.Min() == 0.0) {
KALDI_WARN << "Global covariances low rank!";
KALDI_WARN << "Diag-scatter = " << fmllr_globals.mean_scatter_;
return;
}
FmllrSgmmAccs accs;
accs.Init(sgmm.FeatureDim(), sgmm.NumGauss());
BaseFloat loglike = 0.0;
Vector<BaseFloat> empty_spk;
std::vector<int32> gselect;
for (int32 i = 0; i < feats.NumRows(); i++) {
sgmm.GaussianSelection(sgmm_config, feats.Row(i), &gselect);
sgmm.ComputePerFrameVars(feats.Row(i), gselect, empty, 0.0, &frame_vars);
loglike += accs.Accumulate(sgmm, empty, feats.Row(i), frame_vars, 0, 1.0);
}
SpMatrix<double> grad_scatter(dim * (dim + 1));
accs.AccumulateForFmllrSubspace(sgmm, fmllr_globals, &grad_scatter);
kaldi::SgmmFmllrConfig update_opts;
EstimateSgmmFmllrSubspace(grad_scatter, update_opts.num_fmllr_bases, dim,
&fmllr_globals);
// update_opts.fmllr_min_count = 100;
kaldi::Matrix<BaseFloat> xform_mat(dim, dim + 1);
xform_mat.SetUnit();
accs.Update(sgmm, fmllr_globals, update_opts, &xform_mat, NULL, NULL);
KALDI_LOG << "Test Subspace end.";
}
void TestSgmmFmllr() {
// srand(time(NULL));
int32 dim = 1 + kaldi::RandInt(0, 9); // random dimension of the gmm
int32 num_comp = 2 + kaldi::RandInt(0, 9); // random number of mixtures
kaldi::FullGmm full_gmm;
ut::InitRandFullGmm(dim, num_comp, &full_gmm);
int32 num_states = 1;
AmSgmm sgmm;
kaldi::SgmmGselectConfig config;
sgmm.InitializeFromFullGmm(full_gmm, num_states, dim + 1, dim);
sgmm.ComputeNormalizers();
kaldi::Matrix<BaseFloat> feats;
{ // First, generate random means and variances
int32 num_feat_comp =
num_comp + kaldi::RandInt(-num_comp / 2, num_comp / 2);
kaldi::Matrix<BaseFloat> means(num_feat_comp, dim),
vars(num_feat_comp, dim);
for (int32 m = 0; m < num_feat_comp; m++) {
for (int32 d = 0; d < dim; d++) {
means(m, d) = kaldi::RandGauss();
vars(m, d) = exp(kaldi::RandGauss()) + 1e-2;
}
}
// Now generate random features with those means and variances.
feats.Resize(num_feat_comp * 200, dim);
for (int32 m = 0; m < num_feat_comp; m++) {
kaldi::SubMatrix<BaseFloat> tmp(feats, m * 200, 200, 0, dim);
ut::RandDiagGaussFeatures(200, means.Row(m), vars.Row(m), &tmp);
}
}
TestSgmmFmllrAccsIO(sgmm, feats);
TestSgmmFmllrSubspace(sgmm, feats);
}
int main() {
std::srand(1000);
kaldi::g_kaldi_verbose_level = 5;
for (int i = 0; i < 10; i++) TestSgmmFmllr();
std::cout << "Test OK.\n";
return 0;
}
<|start_filename|>tonic-suite/asr/src/sgmm/estimate-am-sgmm-multi.h<|end_filename|>
// sgmm/estimate-am-sgmm-multi.h
// Copyright 2012 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_SGMM_ESTIMATE_AM_SGMM_MULTI_H_
#define KALDI_SGMM_ESTIMATE_AM_SGMM_MULTI_H_ 1
#include <string>
#include <vector>
#include "sgmm/am-sgmm.h"
#include "sgmm/estimate-am-sgmm.h"
#include "gmm/model-common.h"
namespace kaldi {
/** \class MleAmSgmmGlobalAccs
* Class for the accumulators associated with SGMM global parameters (e.g.
* phonetic-, weight- and speaker-projections; and covariances). This is
* used when the global parameters are updated using stats from multiple
* models.
*/
class MleAmSgmmGlobalAccs {
public:
explicit MleAmSgmmGlobalAccs()
: feature_dim_(0),
phn_space_dim_(0),
spk_space_dim_(0),
num_gaussians_(0),
total_frames_(0.0),
total_like_(0.0) {}
/// Resizes the accumulators to the correct sizes given the model. The flags
/// argument control which accumulators to resize.
void ResizeAccumulators(const AmSgmm &model, SgmmUpdateFlagsType flags);
/// Set the accumulators specified by the flags argument to zero.
void ZeroAccumulators(SgmmUpdateFlagsType flags);
/// Add another accumulator object
void AddAccumulators(const AmSgmm &model, const MleAmSgmmAccs &acc,
SgmmUpdateFlagsType flags);
int32 FeatureDim() const { return feature_dim_; }
int32 PhoneSpaceDim() const { return phn_space_dim_; }
int32 NumGauss() const { return num_gaussians_; }
private:
/// The stats which are not tied to any state.
/// Stats Y_{i} for phonetic-subspace projections M; Dim is [I][D][S].
std::vector<Matrix<double> > Y_;
/// Stats Z_{i} for speaker-subspace projections N. Dim is [I][D][T].
std::vector<Matrix<double> > Z_;
/// R_{i}, quadratic term for speaker subspace estimation. Dim is [I][T][T]
std::vector<SpMatrix<double> > R_;
/// S_{i}^{-}, scatter of adapted feature vectors x_{i}(t). Dim is [I][D][D].
std::vector<SpMatrix<double> > S_;
/// Total occupancies gamma_i for each Gaussian. Dim is [I]
Vector<double> gamma_i_;
/// Q_{i}, quadratic term for phonetic subspace estimation. Dim is [I][S][S]
std::vector<SpMatrix<double> > Q_;
/// Eq (74): S_{i}^{(means)}, scatter of substate mean vectors for estimating
/// the shared covariance matrices. Dimension is [I][D][D].
std::vector<SpMatrix<double> > S_means_;
/// Dimensionality of various subspaces
int32 feature_dim_, phn_space_dim_, spk_space_dim_;
int32 num_gaussians_; ///< Other model specifications
double total_frames_, total_like_;
KALDI_DISALLOW_COPY_AND_ASSIGN(MleAmSgmmGlobalAccs);
friend class MleAmSgmmUpdaterMulti;
};
/** \class MleAmSgmmUpdaterMulti
* Contains the functions needed to update the parameters for multiple SGMMs
* whose global parameters are tied.
*/
class MleAmSgmmUpdaterMulti {
public:
explicit MleAmSgmmUpdaterMulti(const AmSgmm &model,
const MleAmSgmmOptions &options)
: update_options_(options),
global_SigmaInv_(model.SigmaInv_),
global_M_(model.M_),
global_N_(model.N_),
global_w_(model.w_) {}
void Update(const std::vector<MleAmSgmmAccs *> &accs,
const std::vector<AmSgmm *> &models, SgmmUpdateFlagsType flags);
/// Various model dimensions.
int32 NumGauss() const { return global_M_.size(); }
int32 PhoneSpaceDim() const { return global_w_.NumCols(); }
int32 SpkSpaceDim() const {
return (global_N_.size() > 0) ? global_N_[0].NumCols() : 0;
}
int32 FeatureDim() const { return global_M_[0].NumRows(); }
private:
MleAmSgmmOptions update_options_;
/// SGMM global parameters that will be updated together and copied to the
/// different models:
std::vector<SpMatrix<BaseFloat> > global_SigmaInv_;
std::vector<Matrix<BaseFloat> > global_M_;
std::vector<Matrix<BaseFloat> > global_N_;
Matrix<BaseFloat> global_w_;
BaseFloat UpdateGlobals(const MleAmSgmmGlobalAccs &glob_accs,
SgmmUpdateFlagsType flags);
double UpdateM(const MleAmSgmmGlobalAccs &accs);
double UpdateN(const MleAmSgmmGlobalAccs &accs);
double UpdateVars(const MleAmSgmmGlobalAccs &accs);
double UpdateWParallel(const std::vector<MleAmSgmmAccs *> &accs,
const std::vector<AmSgmm *> &models);
// double UpdateWSequential(const std::vector<MleAmSgmmAccs*> &accs,
// const std::vector<AmSgmm*> &models);
void ComputeSmoothingTerms(const MleAmSgmmGlobalAccs &accs,
const std::vector<SpMatrix<double> > &H,
SpMatrix<double> *H_sm) const;
void RenormalizeV(const SpMatrix<double> &H_sm,
const std::vector<AmSgmm *> &models);
KALDI_DISALLOW_COPY_AND_ASSIGN(MleAmSgmmUpdaterMulti);
MleAmSgmmUpdaterMulti() {} // Prevent unconfigured updater.
};
} // namespace kaldi
#endif // KALDI_SGMM_ESTIMATE_AM_SGMM_MULTI_H_
<|start_filename|>tonic-suite/nlp/src/SENNA_PT0.cpp<|end_filename|>
#include <sys/time.h>
#include "SENNA_PT0.h"
#include "SENNA_utils.h"
#include "SENNA_nn.h"
#include "socket.h"
int *SENNA_PT0_forward(SENNA_PT0 *pt0, const int *sentence_words,
const int *sentence_caps, const int *sentence_posl,
int sentence_size, int socketfd) {
int idx;
struct timeval tv1, tv2;
gettimeofday(&tv1, NULL);
pt0->input_state = SENNA_realloc(
pt0->input_state, sizeof(float),
(sentence_size + pt0->window_size - 1) *
(pt0->ll_word_size + pt0->ll_caps_size + pt0->ll_posl_size));
pt0->output_state = SENNA_realloc(pt0->output_state, sizeof(float),
sentence_size * pt0->output_state_size);
SENNA_nn_lookup(pt0->input_state,
pt0->ll_word_size + pt0->ll_caps_size + pt0->ll_posl_size,
pt0->ll_word_weight, pt0->ll_word_size, pt0->ll_word_max_idx,
sentence_words, sentence_size, pt0->ll_word_padding_idx,
(pt0->window_size - 1) / 2);
SENNA_nn_lookup(pt0->input_state + pt0->ll_word_size,
pt0->ll_word_size + pt0->ll_caps_size + pt0->ll_posl_size,
pt0->ll_caps_weight, pt0->ll_caps_size, pt0->ll_caps_max_idx,
sentence_caps, sentence_size, pt0->ll_caps_padding_idx,
(pt0->window_size - 1) / 2);
SENNA_nn_lookup(pt0->input_state + pt0->ll_word_size + pt0->ll_caps_size,
pt0->ll_word_size + pt0->ll_caps_size + pt0->ll_posl_size,
pt0->ll_posl_weight, pt0->ll_posl_size, pt0->ll_posl_max_idx,
sentence_posl, sentence_size, pt0->ll_posl_padding_idx,
(pt0->window_size - 1) / 2);
gettimeofday(&tv2, NULL);
pt0->apptime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
int input_size = pt0->ll_word_size + pt0->ll_caps_size + pt0->ll_posl_size;
char *input_data = (char *)malloc(
sentence_size * (pt0->window_size * (input_size)) * sizeof(float));
for (idx = 0; idx < sentence_size; idx++) {
memcpy((char *)(input_data +
idx * (pt0->window_size) * (input_size) * sizeof(float)),
(char *)(pt0->input_state + idx * input_size),
pt0->window_size * input_size * sizeof(float));
}
if (pt0->service) {
SOCKET_send(socketfd, input_data,
pt0->window_size * (input_size) * sizeof(float) * sentence_size,
pt0->debug);
SOCKET_receive(socketfd, (char *)(pt0->output_state),
pt0->output_state_size * sizeof(float) * sentence_size,
pt0->debug);
}
/*
for(idx = 0; idx < sentence_size; idx++)
{
gettimeofday(&tv1,NULL);
if(pt0->service) {
SOCKET_send(socketfd,
(char*)(pt0->input_state+idx*(pt0->ll_word_size+pt0->ll_caps_size+pt0->ll_posl_size)),
pt0->window_size*(pt0->ll_word_size+pt0->ll_caps_size+pt0->ll_posl_size)*sizeof(float),
pt0->debug
);
SOCKET_receive(socketfd,
(char*)(pt0->output_state+idx*pt0->output_state_size),
pt0->output_state_size*sizeof(float),
pt0->debug
);
} else {
SENNA_nn_linear(pt0->hidden_state,
pt0->hidden_state_size,
pt0->l1_weight,
pt0->l1_bias,
pt0->input_state+idx*(pt0->ll_word_size+pt0->ll_caps_size+pt0->ll_posl_size),
pt0->window_size*(pt0->ll_word_size+pt0->ll_caps_size+pt0->ll_posl_size));
SENNA_nn_hardtanh(pt0->hidden_state,
pt0->hidden_state,
pt0->hidden_state_size);
SENNA_nn_linear(pt0->output_state+idx*pt0->output_state_size,
pt0->output_state_size,
pt0->l2_weight,
pt0->l2_bias,
pt0->hidden_state,
pt0->hidden_state_size);
}
gettimeofday(&tv2,NULL);
pt0->dnntime += (tv2.tv_sec-tv1.tv_sec)*1000000 +
(tv2.tv_usec-tv1.tv_usec);
pt0->calls++;
gettimeofday(&tv1,NULL);
pt0->labels = SENNA_realloc(pt0->labels, sizeof(int), sentence_size);
SENNA_nn_viterbi(pt0->labels, pt0->viterbi_score_init,
pt0->viterbi_score_trans, pt0->output_state, pt0->output_state_size,
sentence_size);
gettimeofday(&tv2,NULL);
pt0->apptime += (tv2.tv_sec-tv1.tv_sec)*1000000 +
(tv2.tv_usec-tv1.tv_usec);
}
*/
gettimeofday(&tv2, NULL);
pt0->dnntime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
pt0->calls++;
gettimeofday(&tv1, NULL);
pt0->labels = SENNA_realloc(pt0->labels, sizeof(int), sentence_size);
SENNA_nn_viterbi(pt0->labels, pt0->viterbi_score_init,
pt0->viterbi_score_trans, pt0->output_state,
pt0->output_state_size, sentence_size);
gettimeofday(&tv2, NULL);
pt0->apptime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
return pt0->labels;
}
SENNA_PT0 *SENNA_PT0_new(const char *path, const char *subpath) {
SENNA_PT0 *pt0 = SENNA_malloc(sizeof(SENNA_PT0), 1);
FILE *f;
float dummy;
memset(pt0, 0, sizeof(SENNA_PT0));
f = SENNA_fopen(path, subpath, "rb");
SENNA_fread(&pt0->window_size, sizeof(int), 1, f);
SENNA_fread_tensor_2d(&pt0->ll_word_weight, &pt0->ll_word_size,
&pt0->ll_word_max_idx, f);
SENNA_fread_tensor_2d(&pt0->ll_caps_weight, &pt0->ll_caps_size,
&pt0->ll_caps_max_idx, f);
SENNA_fread_tensor_2d(&pt0->ll_posl_weight, &pt0->ll_posl_size,
&pt0->ll_posl_max_idx, f);
SENNA_fread_tensor_2d(&pt0->l1_weight, &pt0->input_state_size,
&pt0->hidden_state_size, f);
SENNA_fread_tensor_1d(&pt0->l1_bias, &pt0->hidden_state_size, f);
SENNA_fread_tensor_2d(&pt0->l2_weight, &pt0->hidden_state_size,
&pt0->output_state_size, f);
SENNA_fread_tensor_1d(&pt0->l2_bias, &pt0->output_state_size, f);
SENNA_fread_tensor_1d(&pt0->viterbi_score_init, &pt0->output_state_size, f);
SENNA_fread_tensor_2d(&pt0->viterbi_score_trans, &pt0->output_state_size,
&pt0->output_state_size, f);
SENNA_fread(&pt0->ll_word_padding_idx, sizeof(int), 1, f);
SENNA_fread(&pt0->ll_caps_padding_idx, sizeof(int), 1, f);
SENNA_fread(&pt0->ll_posl_padding_idx, sizeof(int), 1, f);
SENNA_fread(&dummy, sizeof(float), 1, f);
SENNA_fclose(f);
if ((int)dummy != 777)
SENNA_error("pt0: data corrupted (or not IEEE floating computer)");
pt0->input_state = NULL;
pt0->hidden_state = SENNA_malloc(sizeof(float), pt0->hidden_state_size);
pt0->output_state = NULL;
pt0->labels = NULL;
/* some info if you want verbose */
SENNA_message("pt0: window size: %d", pt0->window_size);
SENNA_message("pt0: vector size in word lookup table: %d", pt0->ll_word_size);
SENNA_message("pt0: word lookup table size: %d", pt0->ll_word_max_idx);
SENNA_message("pt0: vector size in caps lookup table: %d", pt0->ll_caps_size);
SENNA_message("pt0: caps lookup table size: %d", pt0->ll_caps_max_idx);
SENNA_message("pt0: vector size in pos lookup table: %d", pt0->ll_posl_size);
SENNA_message("pt0: pos lookup table size: %d", pt0->ll_posl_max_idx);
SENNA_message("pt0: number of hidden units: %d", pt0->hidden_state_size);
SENNA_message("pt0: number of classes: %d", pt0->output_state_size);
pt0->service = false;
pt0->debug = false;
pt0->socketfd = -1;
pt0->calls = 0;
pt0->dnntime = 0;
pt0->apptime = 0;
return pt0;
}
void SENNA_PT0_free(SENNA_PT0 *pt0) {
SENNA_free(pt0->ll_word_weight);
SENNA_free(pt0->ll_caps_weight);
SENNA_free(pt0->ll_posl_weight);
SENNA_free(pt0->l1_weight);
SENNA_free(pt0->l1_bias);
SENNA_free(pt0->l2_weight);
SENNA_free(pt0->l2_bias);
SENNA_free(pt0->viterbi_score_init);
SENNA_free(pt0->viterbi_score_trans);
SENNA_free(pt0->input_state);
SENNA_free(pt0->hidden_state);
SENNA_free(pt0->output_state);
SENNA_free(pt0->labels);
SENNA_free(pt0);
}
<|start_filename|>tonic-suite/asr/src/lat/determinize-lattice-pruned.h<|end_filename|>
// lat/determinize-lattice-pruned.h
// Copyright 2009-2012 Microsoft Corporation
// 2012-2013 Johns Hopkins University (Author: <NAME>)
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_LAT_DETERMINIZE_LATTICE_PRUNED_H_
#define KALDI_LAT_DETERMINIZE_LATTICE_PRUNED_H_
#include <fst/fstlib.h>
#include <fst/fst-decl.h>
#include <algorithm>
#include <map>
#include <set>
#include <vector>
#include "fstext/lattice-weight.h"
#include "hmm/transition-model.h"
#include "itf/options-itf.h"
#include "lat/kaldi-lattice.h"
namespace fst {
/// \addtogroup fst_extensions
/// @{
// For example of usage, see test-determinize-lattice-pruned.cc
/*
DeterminizeLatticePruned implements a special form of determinization with
epsilon removal, optimized for a phase of lattice generation. This algorithm
also does pruning at the same time-- the combination is more efficient as it
somtimes prevents us from creating a lot of states that would later be pruned
away. This allows us to increase the lattice-beam and not have the algorithm
blow up. Also, because our algorithm processes states in order from those
that appear on high-scoring paths down to those that appear on low-scoring
paths, we can easily terminate the algorithm after a certain specified number
of states or arcs.
The input is an FST with weight-type BaseWeightType (usually a pair of
floats,
with a lexicographical type of order, such as LatticeWeightTpl<float>).
Typically this would be a state-level lattice, with input symbols equal to
words, and output-symbols equal to p.d.f's (so like the inverse of HCLG).
Imagine representing this as an
acceptor of type CompactLatticeWeightTpl<float>, in which the input/output
symbols are words, and the weights contain the original weights together with
strings (with zero or one symbol in them) containing the original output
labels
(the p.d.f.'s). We determinize this using acceptor determinization with
epsilon removal. Remember (from lattice-weight.h) that
CompactLatticeWeightTpl has a special kind of semiring where we always take
the string corresponding to the best cost (of type BaseWeightType), and
discard the other. This corresponds to taking the best output-label sequence
(of p.d.f.'s) for each input-label sequence (of words). We couldn't use the
Gallic weight for this, or it would die as soon as it detected that the input
FST was non-functional. In our case, any acyclic FST (and many cyclic ones)
can be determinized.
We assume that there is a function
Compare(const BaseWeightType &a, const BaseWeightType &b)
that returns (-1, 0, 1) according to whether (a < b, a == b, a > b) in the
total order on the BaseWeightType... this information should be the
same as NaturalLess would give, but it's more efficient to do it this way.
You can define this for things like TropicalWeight if you need to instantiate
this class for that weight type.
We implement this determinization in a special way to make it efficient for
the types of FSTs that we will apply it to. One issue is that if we
explicitly represent the strings (in CompactLatticeWeightTpl) as vectors of
type vector<IntType>, the algorithm takes time quadratic in the length of
words (in states), because propagating each arc involves copying a whole
vector (of integers representing p.d.f.'s). Instead we use a hash structure
where each string is a pointer (Entry*), and uses a hash from (Entry*,
IntType), to the successor string (and a way to get the latest IntType and
the
ancestor Entry*). [this is the class LatticeStringRepository].
Another issue is that rather than representing a determinized-state as a
collection of (state, weight), we represent it in a couple of reduced forms.
Suppose a determinized-state is a collection of (state, weight) pairs; call
this the "canonical representation". Note: these collections are always
normalized to remove any common weight and string part. Define end-states as
the subset of states that have an arc out of them with a label on, or are
final. If we represent a determinized-state a the set of just its
(end-state,
weight) pairs, this will be a valid and more compact representation, and will
lead to a smaller set of determinized states (like early minimization). Call
this collection of (end-state, weight) pairs the "minimal representation". As
a mechanism to reduce compute, we can also consider another representation.
In the determinization algorithm, we start off with a set of (begin-state,
weight) pairs (where the "begin-states" are initial or have a label on the
transition into them), and the "canonical representation" consists of the
epsilon-closure of this set (i.e. follow epsilons). Call this set of
(begin-state, weight) pairs, appropriately normalized, the "initial
representation". If two initial representations are the same, the "canonical
representation" and hence the "minimal representation" will be the same. We
can use this to reduce compute. Note that if two initial representations are
different, this does not preclude the other representations from being the
same.
*/
struct DeterminizeLatticePrunedOptions {
float delta; // A small offset used to measure equality of weights.
int max_mem; // If >0, determinization will fail and return false
// when the algorithm's (approximate) memory consumption crosses this
// threshold.
int max_loop; // If >0, can be used to detect non-determinizable input
// (a case that wouldn't be caught by max_mem).
int max_states;
int max_arcs;
float retry_cutoff;
DeterminizeLatticePrunedOptions()
: delta(kDelta),
max_mem(-1),
max_loop(-1),
max_states(-1),
max_arcs(-1),
retry_cutoff(0.5) {}
void Register(kaldi::OptionsItf *po) {
po->Register("delta", &delta, "Tolerance used in determinization");
po->Register("max-mem", &max_mem,
"Maximum approximate memory usage in "
"determinization (real usage might be many times this)");
po->Register("max-arcs", &max_arcs,
"Maximum number of arcs in "
"output FST (total, not per state");
po->Register("max-states", &max_states,
"Maximum number of arcs in output "
"FST (total, not per state");
po->Register(
"max-loop", &max_loop,
"Option used to detect a particular "
"type of determinization failure, typically due to invalid input "
"(e.g., negative-cost loops)");
po->Register(
"retry-cutoff", &retry_cutoff,
"Controls pruning un-determinized "
"lattice and retrying determinization: if effective-beam < "
"retry-cutoff * beam, we prune the raw lattice and retry. Avoids "
"ever getting empty output for long segments.");
}
};
struct DeterminizeLatticePhonePrunedOptions {
// delta: a small offset used to measure equality of weights.
float delta;
// max_mem: if > 0, determinization will fail and return false when the
// algorithm's (approximate) memory consumption crosses this threshold.
int max_mem;
// phone_determinize: if true, do a first pass determinization on both phones
// and words.
bool phone_determinize;
// word_determinize: if true, do a second pass determinization on words only.
bool word_determinize;
// minimize: if true, push and minimize after determinization.
bool minimize;
DeterminizeLatticePhonePrunedOptions()
: delta(kDelta),
max_mem(50000000),
phone_determinize(true),
word_determinize(true),
minimize(false) {}
void Register(kaldi::OptionsItf *po) {
po->Register("delta", &delta, "Tolerance used in determinization");
po->Register("max-mem", &max_mem,
"Maximum approximate memory usage in "
"determinization (real usage might be many times this).");
po->Register("phone-determinize", &phone_determinize,
"If true, do an "
"initial pass of determinization on both phones and words (see"
" also --word-determinize)");
po->Register("word-determinize", &word_determinize,
"If true, do a second "
"pass of determinization on words only (see also "
"--phone-determinize)");
po->Register("minimize", &minimize,
"If true, push and minimize after "
"determinization.");
}
};
/**
This function implements the normal version of DeterminizeLattice, in which
the
output strings are represented using sequences of arcs, where all but the
first one has an epsilon on the input side. It also prunes using the beam
in the "prune" parameter. The input FST must be topologically sorted in
order
for the algorithm to work. For efficiency it is recommended to sort ilabel
as well.
Returns true on success, and false if it had to terminate the
determinization
earlier than specified by the "prune" beam-- that is, if it terminated
because
of the max_mem, max_loop or max_arcs constraints in the options.
CAUTION: you may want to use the version below which outputs to
CompactLattice.
*/
template <class Weight>
bool DeterminizeLatticePruned(
const ExpandedFst<ArcTpl<Weight> > &ifst, double prune,
MutableFst<ArcTpl<Weight> > *ofst,
DeterminizeLatticePrunedOptions opts = DeterminizeLatticePrunedOptions());
/* This is a version of DeterminizeLattice with a slightly more "natural"
output format,
where the output sequences are encoded using the CompactLatticeArcTpl
template
(i.e. the sequences of output symbols are represented directly as strings
The input
FST must be topologically sorted in order for the algorithm to work. For
efficiency
it is recommended to sort the ilabel for the input FST as well.
Returns true on success, and false if it had to terminate the
determinization
earlier than specified by the "prune" beam-- that is, if it terminated
because
of the max_mem, max_loop or max_arcs constraints in the options.
CAUTION: if Lattice is the input, you need to Invert() before calling this,
so words are on the input side.
*/
template <class Weight, class IntType>
bool DeterminizeLatticePruned(
const ExpandedFst<ArcTpl<Weight> > &ifst, double prune,
MutableFst<ArcTpl<CompactLatticeWeightTpl<Weight, IntType> > > *ofst,
DeterminizeLatticePrunedOptions opts = DeterminizeLatticePrunedOptions());
/** This function takes in lattices and inserts phones at phone boundaries. It
uses the transition model to work out the transition_id to phone map. The
returning value is the starting index of the phone label. Typically we pick
(maximum_output_label_index + 1) as this value. The inserted phones are then
mapped to (returning_value + original_phone_label) in the new lattice. The
returning value will be used by DeterminizeLatticeDeletePhones() where it
works out the phones according to this value.
*/
template <class Weight>
typename ArcTpl<Weight>::Label DeterminizeLatticeInsertPhones(
const kaldi::TransitionModel &trans_model,
MutableFst<ArcTpl<Weight> > *fst);
/** This function takes in lattices and deletes "phones" from them. The "phones"
here are actually any label that is larger than first_phone_label because
when we insert phones into the lattice, we map the original phone label to
(first_phone_label + original_phone_label). It is supposed to be used
together with DeterminizeLatticeInsertPhones()
*/
template <class Weight>
void DeterminizeLatticeDeletePhones(
typename ArcTpl<Weight>::Label first_phone_label,
MutableFst<ArcTpl<Weight> > *fst);
/** This function is a wrapper of DeterminizeLatticePhonePrunedFirstPass() and
DeterminizeLatticePruned(). If --phone-determinize is set to true, it first
calls DeterminizeLatticePhonePrunedFirstPass() to do the initial pass of
determinization on the phone + word lattices. If --word-determinize is set
true, it then does a second pass of determinization on the word lattices by
calling DeterminizeLatticePruned(). If both are set to false, then it gives
a warning and copying the lattices without determinization.
Note: the point of doing first a phone-level determinization pass and then
a word-level determinization pass is that it allows us to determinize
deeper lattices without "failing early" and returning a too-small lattice
due to the max-mem constraint. The result should be the same as word-level
determinization in general, but for deeper lattices it is a bit faster,
despite the fact that we now have two passes of determinization by default.
*/
template <class Weight, class IntType>
bool DeterminizeLatticePhonePruned(
const kaldi::TransitionModel &trans_model,
const ExpandedFst<ArcTpl<Weight> > &ifst, double prune,
MutableFst<ArcTpl<CompactLatticeWeightTpl<Weight, IntType> > > *ofst,
DeterminizeLatticePhonePrunedOptions opts =
DeterminizeLatticePhonePrunedOptions());
/** "Destructive" version of DeterminizeLatticePhonePruned() where the input
lattice might be changed.
*/
template <class Weight, class IntType>
bool DeterminizeLatticePhonePruned(
const kaldi::TransitionModel &trans_model,
MutableFst<ArcTpl<Weight> > *ifst, double prune,
MutableFst<ArcTpl<CompactLatticeWeightTpl<Weight, IntType> > > *ofst,
DeterminizeLatticePhonePrunedOptions opts =
DeterminizeLatticePhonePrunedOptions());
/** This function is a wrapper of DeterminizeLatticePhonePruned() that works for
Lattice type FSTs. It simplifies the calling process by calling
TopSort() Invert() and ArcSort() for you.
Unlike other determinization routines, the function
requires "ifst" to have transition-id's on the input side and words on the
output side.
*/
bool DeterminizeLatticePhonePrunedWrapper(
const kaldi::TransitionModel &trans_model,
MutableFst<kaldi::LatticeArc> *ifst, double prune,
MutableFst<kaldi::CompactLatticeArc> *ofst,
DeterminizeLatticePhonePrunedOptions opts =
DeterminizeLatticePhonePrunedOptions());
/// @} end "addtogroup fst_extensions"
} // end namespace fst
#endif
<|start_filename|>tonic-suite/asr/src/fstext/rescale.h<|end_filename|>
// fstext/rescale.h
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_FSTEXT_RESCALE_H_
#define KALDI_FSTEXT_RESCALE_H_
#include <algorithm>
#include <map>
#include <set>
#include <vector>
#include <fst/fstlib.h>
#include <fst/fst-decl.h>
namespace fst {
/// ComputeTotalWeight computes (approximately) the total weight of the FST,
/// i.e. the sum of all paths. It will only work for arcs of StdArc/LogArc type
/// whose weights we can compare using Value(). If the total weight is greater
/// than max_weight, we just return max_weight (this enables us to avoid
/// pathological cases that would not terminate).
template <class Arc>
inline typename Arc::Weight ComputeTotalWeight(ExpandedFst<Arc> &fst,
typename Arc::Weight max_weight,
float delta = kDelta);
/// Rescale multiplies (in the semiring) all weights and final probabilities in
/// the FST by this weight. Does not preserve equivalence.
template <class Arc>
inline void Rescale(MutableFst<Arc> *fst, typename Arc::Weight rescale);
/// RescaleToStochastic uses a form of line search to compute the weight we must
/// apply to the FST using Rescale to make it so that the "total weight" of the
/// FST is unity, and applies this weight. The projected use-case is that
/// you want to do push-weights but you're scared this might blow up, so you
/// do RescaleToStochastic, push-weights, and then Rescale with the inverse
/// (in the semiring) of that weight, so that you are equivalent to the
/// original FST and the "non-stochasticity" is distributed equally among
/// all states.
inline LogWeight RescaleToStochastic(MutableFst<LogArc> *fst,
float approx_delta = 0.001,
float delta = kDelta);
} // end namespace fst
#include "fstext/rescale-inl.h"
#endif
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-stats.h<|end_filename|>
// nnet2/nnet-stats.h
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_NNET2_NNET_STATS_H_
#define KALDI_NNET2_NNET_STATS_H_
#include "nnet2/nnet-nnet.h"
namespace kaldi {
namespace nnet2 {
/* This program computes various statistics from a neural net. These are
summaries of certain quantities already present in the network as
stored on disk, especially regarding certain average values and
derivatives of the sigmoids.
*/
struct NnetStatsConfig {
BaseFloat bucket_width;
NnetStatsConfig() : bucket_width(0.025) {}
void Register(OptionsItf *po) {
po->Register("bucket-width", &bucket_width,
"Width of bucket in average-derivative "
"stats for analysis.");
}
};
class NnetStats {
public:
NnetStats(int32 affine_component_index, BaseFloat bucket_width)
: affine_component_index_(affine_component_index),
bucket_width_(bucket_width),
global_(0, -1) {}
// Use default copy constructor and assignment operator.
void AddStats(BaseFloat avg_deriv, BaseFloat avg_value);
void AddStatsFromNnet(const Nnet &nnet);
void PrintStats(std::ostream &os);
private:
struct StatsElement {
BaseFloat deriv_begin; // avg-deriv, beginning of bucket.
BaseFloat deriv_end; // avg-deriv, end of bucket.
BaseFloat deriv_sum; // sum of avg-deriv within bucket.
BaseFloat deriv_sumsq; // Sum-squared of avg-deriv within bucket.
BaseFloat abs_value_sum; // Sum of abs(avg-value). Tells us whether it's
// saturating at one or both ends.
BaseFloat abs_value_sumsq; // Sum-squared of abs(avg-value).
int32 count; // Number of nonlinearities in this bucket.
StatsElement(BaseFloat deriv_begin, BaseFloat deriv_end)
: deriv_begin(deriv_begin),
deriv_end(deriv_end),
deriv_sum(0.0),
deriv_sumsq(0.0),
abs_value_sum(0.0),
abs_value_sumsq(0.0),
count(0) {}
void AddStats(BaseFloat avg_deriv, BaseFloat avg_value);
// Outputs stats for this bucket; no newline
void PrintStats(std::ostream &os);
};
int32 BucketFor(BaseFloat avg_deriv); // returns the bucket
// for this avg-derivative value, and makes sure it is allocated.
int32 affine_component_index_; // Component index of the affine component
// associated with this nonlinearity.
BaseFloat bucket_width_; // width of buckets of stats we store (in derivative
// values).
std::vector<StatsElement>
buckets_; // Stats divided into buckets by avg_deriv.
StatsElement global_; // All the stats.
};
void GetNnetStats(const NnetStatsConfig &config, const Nnet &nnet,
std::vector<NnetStats> *stats);
} // namespace nnet2
} // namespace kaldi
#endif // KALDI_NNET2_NNET_STATS_H_
<|start_filename|>tonic-suite/asr/src/bin/make-pdf-to-tid-transducer.cc<|end_filename|>
// bin/make-pdf-to-tid-transducer.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "hmm/transition-model.h"
#include "hmm/hmm-utils.h"
#include "util/common-utils.h"
#include "fst/fstlib.h"
int main(int argc, char *argv[]) {
#ifdef _MSC_VER
if (0) {
fst::VectorFst<fst::StdArc> *fst = NULL;
fst->Write("");
}
#endif
try {
using namespace kaldi;
typedef kaldi::int32 int32;
using fst::SymbolTable;
using fst::VectorFst;
using fst::StdArc;
const char *usage =
"Make transducer from pdfs to transition-ids\n"
"Usage: make-pdf-to-tid-transducer model-filename [fst-out]\n"
"e.g.: \n"
" make-pdf-to-tid-transducer 1.mdl > pdf2tid.fst\n";
ParseOptions po(usage);
po.Read(argc, argv);
if (po.NumArgs() < 1 || po.NumArgs() > 2) {
po.PrintUsage();
exit(1);
}
std::string trans_model_filename = po.GetArg(1);
std::string fst_out_filename = po.GetOptArg(2);
TransitionModel trans_model;
ReadKaldiObject(trans_model_filename, &trans_model);
fst::VectorFst<fst::StdArc> *fst =
GetPdfToTransitionIdTransducer(trans_model);
if (!fst->Write(fst_out_filename))
KALDI_ERR << "Error writing fst to "
<< (fst_out_filename == "" ? "standard output"
: fst_out_filename);
delete fst;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/cudamatrix/cu-device.cc<|end_filename|>
// cudamatrix/cu-device.cc
// Copyright 2009-2012 <NAME>
// 2013 <NAME>
// 2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#if HAVE_CUDA == 1
#include <cublas.h>
#include <cuda.h>
#include <cuda_runtime_api.h>
#include <string>
#include <vector>
#include <algorithm>
#include <dlfcn.h>
#include <unistd.h> // for sleep
#include "cudamatrix/cu-common.h"
#include "cudamatrix/cu-device.h"
#include "cudamatrix/cu-matrix.h"
#include "base/kaldi-error.h"
#include "util/common-utils.h"
namespace kaldi {
/**
* SelectGpuId(use_gpu)
*
* There are 3 'use_gpu' modes for GPU selection:
* "yes" -- Select GPU automatically (or get one by exclusive mode)
* and die if this fails.
* "optional" -- Do as above, but if it fails, back off to CPU.
* "no" -- Run on CPU.
*
* In case of Compute exclusive mode, the GPU is selected by OS.
*
* Otherwise GPU selection is based on largest proportion of free memory.
* This can eventually lead to multiple processes computing on single GPU,
* which is slow. More practical is to use "compute exclusive mode".
*
* This method is to be called at the very beginning of the program
* (before first allocation in cudamatrix), or not at all (default to CPU).
*
*/
void CuDevice::SelectGpuId(std::string use_gpu) {
// Possible modes
if (use_gpu != "yes" && use_gpu != "no" && use_gpu != "optional") {
KALDI_ERR << "Please choose : --use-gpu=yes|no|optional, passed '"
<< use_gpu << "'";
}
// Make sure this function is not called twice!
if (Enabled()) {
KALDI_ERR << "There is already an active GPU " << active_gpu_id_
<< ", cannot change it on the fly!";
}
// Allow the GPU to stay disabled
if (!Enabled() && use_gpu == "no") {
KALDI_LOG << "Manually selected to compute on CPU.";
return;
}
// Check that we have a gpu available
int32 n_gpu = 0;
cudaGetDeviceCount(&n_gpu);
if (n_gpu == 0) {
if (use_gpu == "yes") {
KALDI_ERR << "No CUDA GPU detected!";
}
if (use_gpu == "optional") {
KALDI_WARN << "Running on CPU!!! No CUDA GPU detected...";
return;
}
}
//
// Create a CUDA context : in case of compute-exclusive mode OS selects
// gpu_id,
// or default gpu_id=0. In the case with no free GPUs a context cannot be
// created
// (compute-exclusive mode).
//
cudaError_t e;
e = cudaThreadSynchronize(); //<< CUDA context gets created here.
if (e != cudaSuccess) {
// So far no we don't have context, sleep a bit and retry.
int32 sec_sleep = (use_gpu == "yes" ? 20 : 2);
KALDI_WARN << "Will try again to get a GPU after " << sec_sleep
<< " seconds.";
sleep(sec_sleep);
cudaGetLastError(); // reset the error state
e = cudaThreadSynchronize(); //<< 2nd trial to get CUDA context.
if (e != cudaSuccess) {
if (use_gpu == "yes") {
KALDI_ERR << "Failed to create CUDA context, no more unused GPUs?";
}
if (use_gpu == "optional") {
KALDI_WARN << "Running on CPU!!! No more unused CUDA GPUs?";
return;
}
}
}
// Re-assure we have the context
KALDI_ASSERT(cudaSuccess == cudaThreadSynchronize());
// Check if the machine use compute exclusive mode
if (IsComputeExclusive()) {
FinalizeActiveGpu();
return;
} else {
// Or suggest to use compute exclusive mode
if (n_gpu > 1) {
KALDI_WARN
<< "Suggestion: use 'nvidia-smi -c 1' to set compute exclusive mode";
}
// And select the GPU according to proportion of free memory
if (SelectGpuIdAuto()) {
FinalizeActiveGpu();
return;
} else {
// Could not get GPU, after prevously having the CUDA context?
// Strange but not impossible...
if (use_gpu == "yes") {
KALDI_ERR << "Error acquiring GPU.";
}
if (use_gpu == "optional") {
KALDI_WARN << "Running on CPU!!! Error acquiring GPU.";
return;
}
}
}
}
void CuDevice::FinalizeActiveGpu() {
// The device at this point should have active GPU, so we can query its name
// and memory stats and notify user which GPU is finally used.
// Get the device-id of active device:
{
int32 act_gpu_id;
cudaError_t e;
e = cudaGetDevice(&act_gpu_id);
if (e != cudaSuccess) {
KALDI_ERR << "Failed to get device-id of active device.";
}
// Remember the id of active GPU
active_gpu_id_ = act_gpu_id; // CuDevice::Enabled() is true from now on
// Initialize the CUBLAS
CU_SAFE_CALL(cublasInit());
// Notify user which GPU is finally used
char name[128];
DeviceGetName(name, 128, act_gpu_id);
CU_SAFE_CALL(cudaGetDeviceProperties(&properties_, act_gpu_id));
KALDI_LOG << "The active GPU is [" << act_gpu_id << "]: " << name << "\t"
<< GetFreeMemory(&free_memory_at_startup_, NULL) << " version "
<< properties_.major << "." << properties_.minor;
if (verbose_) PrintMemoryUsage();
}
return;
}
bool CuDevice::DoublePrecisionSupported() {
if (!Enabled()) return true;
return properties_.major > 1 ||
(properties_.major == 1 && properties_.minor >= 3);
// Double precision is supported from version 1.3
}
bool CuDevice::IsComputeExclusive() {
// assume we already have an CUDA context created
KALDI_ASSERT(cudaSuccess == cudaThreadSynchronize());
// get the device-id and its device-properties
int32 gpu_id = -1;
cudaError_t e = cudaGetDevice(&gpu_id);
if (e != cudaSuccess) {
KALDI_ERR << "Failed to get current device";
}
struct cudaDeviceProp gpu_prop;
e = cudaGetDeviceProperties(&gpu_prop, gpu_id);
if (e != cudaSuccess) {
KALDI_ERR << "Failed to get device properties";
}
// find out whether compute exclusive mode is used
switch (gpu_prop.computeMode) {
case cudaComputeModeExclusive:
KALDI_LOG << "CUDA setup operating under Compute Exclusive Mode.";
return true;
break;
#if (CUDA_VERSION >= 4000)
case cudaComputeModeExclusiveProcess:
KALDI_LOG << "CUDA setup operating under Compute Exclusive Process Mode.";
return true;
break;
#endif
default:
// The computation mode is not compute-exclusive,
// in this case we release the GPU context...
e = cudaThreadExit(); // deprecated, but for legacy reason not
// cudaDeviceReset
if (e != cudaSuccess) {
KALDI_ERR << "Failed to release CUDA context on a GPU";
}
return false;
}
}
bool CuDevice::SelectGpuIdAuto() {
// Check that we have at least one gpu
int32 n_gpu = 0;
cudaGetDeviceCount(&n_gpu);
if (n_gpu == 0) {
KALDI_WARN << "No CUDA devices found";
return false;
}
// The GPU is selected according to maximal free memory ratio
std::vector<float> free_mem_ratio(n_gpu + 1, 0.0);
// Get ratios of memory use, if possible
KALDI_LOG << "Selecting from " << n_gpu << " GPUs";
for (int32 n = 0; n < n_gpu; n++) {
int32 ret = cudaSetDevice(n);
switch (ret) {
case cudaSuccess: {
// create the CUDA context for the thread
cudaThreadSynchronize(); // deprecated, but for legacy not
// cudaDeviceSynchronize
// get GPU name
char name[128];
DeviceGetName(name, 128, n);
// get GPU memory stats
int64 free, total;
std::string mem_stats;
mem_stats = GetFreeMemory(&free, &total);
// log
KALDI_LOG << "cudaSetDevice(" << n << "): " << name << "\t"
<< mem_stats;
// store the free/total ratio
free_mem_ratio[n] = free / (float)total;
// destroy the CUDA context for the thread
cudaThreadExit(); // deprecated, but for legacy reason not
// cudaDeviceReset
} break;
#if (CUDA_VERSION > 3020)
case cudaErrorDeviceAlreadyInUse:
KALDI_LOG << "cudaSetDevice(" << n << "): "
<< "Device cannot be accessed, used EXCLUSIVE-THREAD mode...";
break;
#endif
case cudaErrorInvalidDevice:
KALDI_LOG << "cudaSetDevice(" << n << "): "
<< "Device cannot be accessed, not a VALID CUDA device!";
break;
default:
KALDI_LOG << "cudaSetDevice(" << n << "): "
<< "returned " << ret << ", "
<< cudaGetErrorString((cudaError_t)ret);
}
}
// find GPU with max free memory
int32 max_id = 0;
for (int32 n = 1; n < free_mem_ratio.size(); n++) {
if (free_mem_ratio[n] > free_mem_ratio[max_id]) max_id = n;
}
// the free_mem_ratio should be bigger than zero
KALDI_ASSERT(free_mem_ratio[max_id] > 0.0);
// finally select the GPU
KALDI_LOG << "Selected device: " << max_id << " (automatically)";
CU_SAFE_CALL(cudaSetDevice(max_id));
// create the context
cudaError_t e;
e = cudaThreadSynchronize(); // deprecated, but for legacy not
// cudaDeviceSynchronize
if (e != cudaSuccess) {
KALDI_WARN << "Failed to create CUDA context on a GPU.";
return false;
}
return true;
}
void CuDevice::AccuProfile(const std::string &key, double time) {
if (profile_map_.find(key) == profile_map_.end()) {
profile_map_[key] = 0.0;
}
profile_map_[key] += time;
}
void CuDevice::PrintMemoryUsage() const {
if (Enabled()) {
int64 free_memory_now;
GetFreeMemory(&free_memory_now, NULL);
KALDI_LOG << "Memory used: " << (free_memory_at_startup_ - free_memory_now)
<< " bytes.";
}
}
void CuDevice::PrintProfile() {
if (verbose_ && Enabled()) {
std::ostringstream os;
os << "-----\n[cudevice profile]\n";
std::map<std::string, double>::iterator it;
std::vector<std::pair<double, std::string> > pairs;
for (it = profile_map_.begin(); it != profile_map_.end(); ++it)
pairs.push_back(std::make_pair(it->second, it->first));
std::sort(pairs.begin(), pairs.end());
size_t max_print = 15,
start_pos =
(pairs.size() <= max_print ? 0 : pairs.size() - max_print);
for (size_t i = start_pos; i < pairs.size(); i++)
os << pairs[i].second << "\t" << pairs[i].first << "s\n";
os << "-----";
KALDI_LOG << os.str();
PrintMemoryUsage();
}
}
std::string CuDevice::GetFreeMemory(int64 *free, int64 *total) const {
// WARNING! the CUDA API is inconsistent accross versions!
#if (CUDA_VERSION >= 3020)
// define the function signature type
size_t mem_free, mem_total;
#else
unsigned int mem_free, mem_total;
#endif
{
// we will load the cuMemGetInfo dynamically from libcuda.so
// cuMemGetInfo(&mem_free, &mem_total);
// pre-fill ``safe'' values that will not cause problems
mem_free = 1;
mem_total = 1;
// open libcuda.so
void *libcuda = dlopen("libcuda.so", RTLD_LAZY);
if (NULL == libcuda) {
KALDI_WARN << "cannot open libcuda.so";
} else {
// define the function signature type
// and get the symbol
#if (CUDA_VERSION >= 3020)
typedef CUresult (*cu_fun_ptr)(size_t *, size_t *);
cu_fun_ptr dl_cuMemGetInfo =
(cu_fun_ptr)dlsym(libcuda, "cuMemGetInfo_v2");
#else
typedef CUresult (*cu_fun_ptr)(int *, int *);
cu_fun_ptr dl_cuMemGetInfo = (cu_fun_ptr)dlsym(libcuda, "cuMemGetInfo");
#endif
if (NULL == dl_cuMemGetInfo) {
KALDI_WARN << "cannot load cuMemGetInfo from libcuda.so";
} else {
// call the function
dl_cuMemGetInfo(&mem_free, &mem_total);
}
// close the library
dlclose(libcuda);
}
}
// copy the output values outside
if (NULL != free) *free = mem_free;
if (NULL != total) *total = mem_total;
// prepare the text output
std::ostringstream os;
os << "free:" << mem_free / (1024 * 1024) << "M, "
<< "used:" << (mem_total - mem_free) / (1024 * 1024) << "M, "
<< "total:" << mem_total / (1024 * 1024) << "M, "
<< "free/total:" << mem_free / (float)mem_total;
return os.str();
}
void CuDevice::DeviceGetName(char *name, int32 len, int32 dev) {
// prefill with something reasonable
strncpy(name, "Unknown GPU", len);
// open libcuda.so
void *libcuda = dlopen("libcuda.so", RTLD_LAZY);
if (NULL == libcuda) {
KALDI_WARN << "cannot open libcuda.so";
} else {
// define the function signature type
typedef CUresult (*cu_fun_ptr)(char *, int, CUdevice);
// get the symbol
cu_fun_ptr cuDeviceGetName_ptr =
(cu_fun_ptr)dlsym(libcuda, "cuDeviceGetName");
if (NULL == cuDeviceGetName_ptr) {
KALDI_WARN << "cannot load cuDeviceGetName from libcuda.so";
} else {
// call the function
cuDeviceGetName_ptr(name, len, dev);
}
// close the library
dlclose(libcuda);
}
}
void CuDevice::CheckGpuHealth() {
if (!Enabled()) return;
Timer t;
// prepare small matrices for a quick test
Matrix<BaseFloat> a(50, 100);
Matrix<BaseFloat> b(100, 50);
a.SetRandn();
b.SetRandUniform();
// multiply 2 small matrices in CPU:
Matrix<BaseFloat> c(50, 50);
c.AddMatMat(1.0, a, kNoTrans, b, kNoTrans, 0.0);
// multiply same matrices in GPU:
CuMatrix<BaseFloat> c1(50, 50);
c1.AddMatMat(1.0, CuMatrix<BaseFloat>(a), kNoTrans, CuMatrix<BaseFloat>(b),
kNoTrans, 0.0);
// check that relative differnence is <1%
AssertEqual(c, Matrix<BaseFloat>(c1), 0.01);
// measure time spent in this check
AccuProfile(__func__, t.Elapsed());
}
struct CuAllocatorOptions {
bool cache_memory; // Enable GPU memory caching, (false = disable).
int32
count; // Number of times we free and delete a particular size before we
// start to cache it.
int32 cleanup_interval_bytes;
CuAllocatorOptions()
: cache_memory(true), count(1), cleanup_interval_bytes(1000000) {}
};
/// We define class CuAllocator inside the .cc file, because we don't want to
/// expose it in the header. Its purpose is to hang on to memory that we have
/// freed, so that we don't waste time in cudaMalloc and cudaMallocPitch().
/// For some reason, they are sometimes very slow.
class CuAllocator {
public:
CuAllocator(const CuAllocatorOptions &opts, CuDevice *device)
: device_(device),
opts_(opts),
cleanup_countdown_bytes_(opts.cleanup_interval_bytes) {}
inline void *Malloc(size_t size);
inline void *MallocPitch(size_t row_bytes, size_t num_rows, size_t *pitch);
inline void Free(void *ptr);
inline void DisableCaching();
~CuAllocator();
private:
inline void *MallocInternal(size_t row_bytes, size_t num_rows, size_t *pitch);
// struct MemInfoForSize stores information associated with a particular size
// of allocated memory. The row_bytes and num_rows refer to the arguments of
// a cudaMallocPitch call; for regular, non-pitch allocations with cudaMalloc,
// we make "row_bytes" zero and the size in bytes is "num_rows"... there is a
// reason why we do it this way round (make num_rows contain the size in
// bytes); it relates to the ordering of the map, and the behavior when
// we didn't find the exact size and want to find larger match.
struct MemInfoForSize {
size_t row_bytes; // or zero, if a regular CudaMalloc, not
// CudaMallocPitch.
size_t num_rows; // or the number of rows, if it's a regular CudaMalloc
// call, not CudaMallocPitch.
size_t pitch; // If CudaMallocPitch, the pitch returned by CudaMallocPitch;
// this code assumes (and checks) that it's a deterministic
// function of row_bytes and num_rows.
size_t countdown; // number that have been freed and not cached.
size_t currently_used; // number that are "in the wild".. kept for
// diagnostics and error detection.
std::vector<void *> freed; // freed and cached...
MemInfoForSize(size_t row_bytes, size_t num_rows, int32 count)
: row_bytes(row_bytes),
num_rows(num_rows),
pitch(0),
countdown(count),
currently_used(0) {}
};
// FindMemInfo returns the MemInfoForSize object for this (row_bytes,
// num_rows) combination if it exists; otherwise...
// if there is a MemInfoForSize object with the same row_bytes and larger (but
// not more than twice larger) num_rows that has freed memory waiting, it
// returns that; otherwise, it returns a new MemInfoForSize object for the
// requested size).
inline MemInfoForSize *FindMemInfo(size_t row_bytes, size_t num_rows) {
if (row_bytes >= size_to_list_.size())
size_to_list_.resize(row_bytes + 1, NULL);
// note: we set row_bytes to 0 for regular, linear allocation.
KALDI_ASSERT(num_rows != 0);
if (size_to_list_[row_bytes] == NULL)
size_to_list_[row_bytes] = new std::map<size_t, MemInfoForSize *>;
std::map<size_t, MemInfoForSize *> &size_to_list =
*(size_to_list_[row_bytes]);
typedef std::map<size_t, MemInfoForSize *>::iterator IterType;
// get an iterator to the requested object or the next-larger one.
// Here, upper_bound(num_rows - 1) returns an object strictly greater
// than num_rows - 1, which could be num_rows itself. We need to
// treat num_rows == 0 as a special case because of size_t being
// unsigned.
IterType iter = (num_rows == 0 ? size_to_list.begin()
: size_to_list.upper_bound(num_rows - 1));
if (iter != size_to_list.end() && iter->first == num_rows) {
// Found a MemInfoForSize object
// with the requested size -> return it.
KALDI_ASSERT(iter->second->row_bytes == row_bytes &&
iter->second->num_rows == num_rows);
return iter->second;
} else if (iter != size_to_list.end() &&
iter->second->num_rows <= 2 * num_rows &&
!iter->second->freed.empty()) {
// Return the non-matching one with freed memory, which is larger than
// this one but not more than twice larger.
KALDI_ASSERT(iter->second->row_bytes == row_bytes &&
iter->second->num_rows > num_rows); // confirm expectations.
return iter->second;
} else {
// There was no such object, and the next-larger object either did not
// exist, had more than twice the num-rows requested, or had no free
// memory -> create an object with the requested size.
return (size_to_list[num_rows] =
new MemInfoForSize(row_bytes, num_rows, opts_.count));
}
}
void PossiblyCleanup(size_t num_bytes);
// A periodic housekeeping task..
void Cleanup();
// Frees all memory in the "freed" vectors; memory that the
// user freed but we held on to. If destroy == true, also
// clean up all memory held in the size_to_list_ object (i.e.
// allocated maps and MemInfoForSize objects).
void ReleaseAllCachedMemory(bool destroy = false);
CuDevice *device_; // device this is attached to...
CuAllocatorOptions opts_;
unordered_map<void *, MemInfoForSize *> addr_to_list_;
// size_to_list_ is indexed first by row_bytes (which is zero for linear
// mallocs) and then by num_rows (which for linear mallocs, is the actual size
// in bytes).
std::vector<std::map<size_t, MemInfoForSize *> *> size_to_list_;
int32 cleanup_countdown_bytes_; // countdown in bytes, until the next time we
// check
// whether we should do cleanup
};
void *CuAllocator::Malloc(size_t size) {
KALDI_ASSERT(size > 0);
return MallocInternal(0, size, NULL);
}
void *CuAllocator::MallocPitch(size_t num_rows, size_t row_bytes,
size_t *pitch) {
KALDI_ASSERT(num_rows > 0 && row_bytes > 0 && pitch != NULL);
return MallocInternal(num_rows, row_bytes, pitch);
}
void *CuAllocator::MallocInternal(size_t row_bytes, size_t num_rows,
size_t *pitch_out) {
// we share the code for standard cudaMalloc and cudaMallocPitch
// because most of it is the same. for cudaMalloc, we'll have
// row_bytes == 0, and num_rows is just the size to be allocated.
KALDI_ASSERT(num_rows != 0 && (row_bytes != 0) == (pitch_out != NULL));
MemInfoForSize *info = FindMemInfo(row_bytes, num_rows);
if (!info->freed.empty()) { // We can satisfy the request with cached,
// previously-allocated memory.
void *ans = info->freed.back();
info->freed.pop_back();
info->currently_used++;
addr_to_list_[ans] = info;
if (pitch_out) *pitch_out = info->pitch;
return ans;
} else {
PossiblyCleanup(row_bytes == 0 ? num_rows : row_bytes * num_rows);
void *ans;
if (row_bytes == 0) { // Simple malloc request, not "MallocPitch".
size_t size = num_rows;
int32 ret = cudaMalloc(&ans, size);
if (ret != 0) {
KALDI_WARN << "Allocation of memory block of " << size << " bytes "
<< "failed, releasing cached memory and retrying.";
cudaGetLastError(); // reset the error state
ReleaseAllCachedMemory();
ret = cudaMalloc(&ans, size);
if (ret != 0) {
KALDI_WARN << "Allocation failed for the second time. Printing "
<< "device memory usage and exiting";
device_->PrintMemoryUsage();
KALDI_ERR << "Memory allocation failure";
}
}
} else {
size_t pitch;
int32 ret = cudaMallocPitch(&ans, &pitch, row_bytes, num_rows);
if (ret != 0) { // allocation failed...
KALDI_WARN << "Allocation of " << num_rows << " rows, each of size "
<< row_bytes << " bytes failed, releasing cached "
<< "memory and retrying.";
cudaGetLastError(); // reset the error state
ReleaseAllCachedMemory();
ret = cudaMallocPitch(&ans, &pitch, row_bytes, num_rows);
if (ret != 0) {
KALDI_WARN << "Allocation failed for the second time. Printing "
<< "device memory usage and exiting";
device_->PrintMemoryUsage();
KALDI_ERR << "Memory allocation failure";
}
}
KALDI_ASSERT(pitch > 0);
if (info->pitch ==
0) { // First allocation; have not set info->pitch yet.
info->pitch = pitch;
} else if (pitch != info->pitch) {
KALDI_ERR << "Pitch differs between multiple calls with the same "
<< "parameters: " << pitch << " vs. " << info->pitch;
}
*pitch_out = info->pitch;
}
addr_to_list_[ans] = info;
info->currently_used++;
return ans;
}
}
void CuAllocator::Free(void *addr) {
unordered_map<void *, MemInfoForSize *>::iterator iter =
addr_to_list_.find(addr);
if (iter == addr_to_list_.end()) {
KALDI_ERR << "Attempt to free address " << addr
<< " that was not allocated "
<< "by CuDevice::Malloc() (or was previously freed);";
}
MemInfoForSize *info = iter->second;
addr_to_list_.erase(addr); // Erase this element in the addr_to_list_ map.
info->currently_used--;
if (info->countdown == 0 && opts_.cache_memory) {
// We have freed [i.e. actually freed with
// CudaFree()] enough of these that we think
// we're wasting too much time this way and
// need to start caching them.
info->freed.push_back(addr);
} else { // Actually free the address, and decrease "countdown".
info->countdown--;
CU_SAFE_CALL(
cudaFree(addr)); // This is how we free, even if allocated with
// cudaMallocPitch().
}
}
inline void CuAllocator::DisableCaching() {
KALDI_LOG << "Disabling caching of GPU memory.";
KALDI_ASSERT(size_to_list_.empty()); // No memory allocated yet!
opts_.cache_memory = false;
}
void CuAllocator::ReleaseAllCachedMemory(bool destroy) {
KALDI_VLOG(2) << "Releasing all cached memory.";
for (size_t i = 0; i < size_to_list_.size(); i++) {
if (size_to_list_[i] == NULL) continue;
typedef std::map<size_t, MemInfoForSize *>::iterator IterType;
for (IterType iter = size_to_list_[i]->begin();
iter != size_to_list_[i]->end(); ++iter) {
MemInfoForSize *info = iter->second;
if (destroy && !info->freed.empty()) {
// When called from the destructor at program end, if verbose level is
// high, say the sizes we had.
if (info->row_bytes == 0) {
KALDI_VLOG(3) << "Releasing " << info->freed.size() << " blocks of "
<< info->num_rows << " bytes.";
} else {
KALDI_VLOG(3) << "Releasing " << info->freed.size()
<< " 2-d blocks of " << info->num_rows << " rows of "
<< info->row_bytes << " bytes each.";
}
}
if (!destroy) {
// We only do this freeing part when we're *not* called from the
// destuctor (destroy = false). This leads to a crash when called from
// the destructor, with cudaFree returning "unload of CUDA runtime
// failed". Presumably this has to do with the destruction order of
// C++, which we can't really control.
while (!info->freed.empty()) {
CU_SAFE_CALL(cudaFree(info->freed.back()));
info->freed.pop_back();
}
}
if (destroy) delete info;
}
if (destroy) {
delete size_to_list_[i];
size_to_list_[i] = NULL;
}
}
}
void CuAllocator::Cleanup() {
// TODO: implement this or remove it (and also PossiblyCleanup).
// Actually we may never implement this, as just calling
// ReleaseAllCachedMemory whenever an allocation fails is probably
// sufficient.
}
void CuAllocator::PossiblyCleanup(size_t num_bytes) {
if (static_cast<size_t>(cleanup_countdown_bytes_) <= num_bytes) {
Cleanup();
cleanup_countdown_bytes_ = opts_.cleanup_interval_bytes;
} else {
cleanup_countdown_bytes_ -= static_cast<int32>(num_bytes);
}
}
CuAllocator::~CuAllocator() {
// Check that nothing was allocated by the user and not freed.
std::set<MemInfoForSize *> unfreed_set;
typedef unordered_map<void *, MemInfoForSize *>::iterator IterType;
for (IterType iter = addr_to_list_.begin(); iter != addr_to_list_.end();
++iter)
unfreed_set.insert(iter->second);
for (std::set<MemInfoForSize *>::iterator iter = unfreed_set.begin();
iter != unfreed_set.end(); ++iter) {
MemInfoForSize *info = *iter;
KALDI_ASSERT(info->currently_used > 0); // Or should not be in this set
// (code error or memory corruption)
if (info->num_rows == 0) {
KALDI_WARN << info->currently_used << " memory chunks of size "
<< info->row_bytes << " were allocated and not freed.";
} else {
KALDI_WARN << info->currently_used << " memory chunks of size "
<< info->row_bytes << " per row, and " << info->num_rows
<< " rows, were allocated and not freed.";
}
}
bool destroy = true;
ReleaseAllCachedMemory(destroy);
}
void CuDevice::Free(void *ptr) { allocator_->Free(ptr); }
void *CuDevice::MallocPitch(size_t row_bytes, size_t num_rows, size_t *pitch) {
return allocator_->MallocPitch(row_bytes, num_rows, pitch);
}
void *CuDevice::Malloc(size_t size) { return allocator_->Malloc(size); }
void CuDevice::DisableCaching() { allocator_->DisableCaching(); }
CuDevice::CuDevice()
: active_gpu_id_(-1),
verbose_(true),
allocator_(new CuAllocator(CuAllocatorOptions(), this)) {}
CuDevice::~CuDevice() {
if (allocator_ != NULL) delete allocator_;
if (Enabled()) CU_SAFE_CALL(cublasShutdown());
}
// The instance of the static singleton
CuDevice CuDevice::global_device_;
}
#endif // HAVE_CUDA
<|start_filename|>tonic-suite/nlp/src/socket.h<|end_filename|>
#ifndef SOCKET_H
#define SOCKET_H
// returns socket to tx data
int CLIENT_init(char* hostname, int portno, bool debug);
// returns socket where to rx data
int SERVER_init(int portno);
// tx len of data
void SOCKET_txsize(int socket, int len);
// receive len of data
int SOCKET_rxsize(int socket);
// send data over socket
int SOCKET_send(int socket, char* data, int size, bool debug);
// receive data over socket
int SOCKET_receive(int socket, char* data, int size, bool debug);
// close the socket
int SOCKET_close(int socket, bool debug);
#endif
<|start_filename|>tonic-suite/asr/src/sgmm2bin/sgmm2-post-to-gpost.cc<|end_filename|>
// sgmm2bin/sgmm2-post-to-gpost.cc
// Copyright 2009-2012 Saarland University Microsoft Corporation
// Johns Hopkins University (Author: <NAME>)
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "sgmm2/am-sgmm2.h"
#include "hmm/transition-model.h"
#include "sgmm2/estimate-am-sgmm2.h"
#include "hmm/posterior.h"
int main(int argc, char *argv[]) {
using namespace kaldi;
try {
const char *usage =
"Convert posteriors to Gaussian-level posteriors for SGMM training.\n"
"Usage: sgmm2-post-to-gpost [options] <model-in> <feature-rspecifier> "
"<posteriors-rspecifier> <gpost-wspecifier>\n"
"e.g.: sgmm2-post-to-gpost 1.mdl 1.ali scp:train.scp 'ark:ali-to-post "
"ark:1.ali ark:-|' ark:-";
ParseOptions po(usage);
std::string gselect_rspecifier, spkvecs_rspecifier, utt2spk_rspecifier;
po.Register("gselect", &gselect_rspecifier,
"Precomputed Gaussian indices (rspecifier)");
po.Register("spk-vecs", &spkvecs_rspecifier,
"Speaker vectors (rspecifier)");
po.Register("utt2spk", &utt2spk_rspecifier,
"rspecifier for utterance to speaker map");
po.Read(argc, argv);
if (po.NumArgs() != 4) {
po.PrintUsage();
exit(1);
}
if (gselect_rspecifier == "") KALDI_ERR << "--gselect option is required";
std::string model_filename = po.GetArg(1),
feature_rspecifier = po.GetArg(2),
posteriors_rspecifier = po.GetArg(3),
gpost_wspecifier = po.GetArg(4);
using namespace kaldi;
typedef kaldi::int32 int32;
AmSgmm2 am_sgmm;
TransitionModel trans_model;
{
bool binary;
Input ki(model_filename, &binary);
trans_model.Read(ki.Stream(), binary);
am_sgmm.Read(ki.Stream(), binary);
}
double tot_like = 0.0;
kaldi::int64 tot_t = 0;
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
RandomAccessPosteriorReader posteriors_reader(posteriors_rspecifier);
RandomAccessInt32VectorVectorReader gselect_reader(gselect_rspecifier);
RandomAccessBaseFloatVectorReaderMapped spkvecs_reader(spkvecs_rspecifier,
utt2spk_rspecifier);
Sgmm2PerFrameDerivedVars per_frame_vars;
Sgmm2GauPostWriter gpost_writer(gpost_wspecifier);
int32 num_done = 0, num_err = 0;
for (; !feature_reader.Done(); feature_reader.Next()) {
const Matrix<BaseFloat> &mat = feature_reader.Value();
std::string utt = feature_reader.Key();
if (!posteriors_reader.HasKey(utt) ||
posteriors_reader.Value(utt).size() != mat.NumRows()) {
KALDI_WARN << "No posteriors available for utterance " << utt
<< " (or wrong size)";
num_err++;
continue;
}
Posterior posterior = posteriors_reader.Value(utt);
if (!gselect_reader.HasKey(utt) ||
gselect_reader.Value(utt).size() != mat.NumRows()) {
KALDI_WARN << "No Gaussian-selection info available for utterance "
<< utt << " (or wrong size)";
num_err++;
continue;
}
const std::vector<std::vector<int32> > &gselect =
gselect_reader.Value(utt);
Sgmm2PerSpkDerivedVars spk_vars;
if (spkvecs_reader.IsOpen()) {
if (spkvecs_reader.HasKey(utt)) {
spk_vars.SetSpeakerVector(spkvecs_reader.Value(utt));
am_sgmm.ComputePerSpkDerivedVars(&spk_vars);
} else {
KALDI_WARN << "Cannot find speaker vector for " << utt;
num_err++;
continue;
}
} // else spk_vars is "empty"
num_done++;
BaseFloat tot_like_this_file = 0.0, tot_weight = 0.0;
Sgmm2GauPost gpost(posterior.size()); // posterior.size() == T.
SortPosteriorByPdfs(trans_model, &posterior);
int32 prev_pdf_id = -1;
BaseFloat prev_like = 0;
Matrix<BaseFloat> prev_posterior;
for (size_t i = 0; i < posterior.size(); i++) {
am_sgmm.ComputePerFrameVars(mat.Row(i), gselect[i], spk_vars,
&per_frame_vars);
gpost[i].gselect = gselect[i];
gpost[i].tids.resize(posterior[i].size());
gpost[i].posteriors.resize(posterior[i].size());
prev_pdf_id = -1; // Only cache for the same frame.
for (size_t j = 0; j < posterior[i].size(); j++) {
int32 tid = posterior[i][j].first, // transition identifier.
pdf_id = trans_model.TransitionIdToPdf(tid);
BaseFloat weight = posterior[i][j].second;
gpost[i].tids[j] = tid;
if (pdf_id != prev_pdf_id) {
// First time see this pdf-id for this frame, update the cached
// variables.
prev_pdf_id = pdf_id;
prev_like = am_sgmm.ComponentPosteriors(per_frame_vars, pdf_id,
&spk_vars, &prev_posterior);
}
gpost[i].posteriors[j] = prev_posterior;
tot_like_this_file += prev_like * weight;
tot_weight += weight;
gpost[i].posteriors[j].Scale(weight);
}
}
KALDI_VLOG(2) << "Average like for this file is "
<< (tot_like_this_file / posterior.size()) << " over "
<< posterior.size() << " frames.";
tot_like += tot_like_this_file;
tot_t += posterior.size();
if (num_done % 10 == 0)
KALDI_LOG << "Avg like per frame so far is " << (tot_like / tot_t);
gpost_writer.Write(utt, gpost);
}
KALDI_LOG << "Overall like per frame (Gaussian only) = "
<< (tot_like / tot_t) << " over " << tot_t << " frames.";
KALDI_LOG << "Done " << num_done << " files, " << num_err
<< " with errors.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2/train-nnet.cc<|end_filename|>
// nnet2/train-nnet.cc
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet2/train-nnet.h"
namespace kaldi {
namespace nnet2 {
NnetSimpleTrainer::NnetSimpleTrainer(const NnetSimpleTrainerConfig &config,
Nnet *nnet)
: config_(config),
nnet_(nnet),
logprob_this_phase_(0.0),
weight_this_phase_(0.0),
logprob_total_(0.0),
weight_total_(0.0) {
num_phases_ = 0;
bool first_time = true;
BeginNewPhase(first_time);
}
void NnetSimpleTrainer::TrainOnExample(const NnetExample &value) {
buffer_.push_back(value);
if (static_cast<int32>(buffer_.size()) == config_.minibatch_size)
TrainOneMinibatch();
}
void NnetSimpleTrainer::TrainOneMinibatch() {
KALDI_ASSERT(!buffer_.empty());
// The following function is declared in nnet-update.h.
logprob_this_phase_ += DoBackprop(*nnet_, buffer_, nnet_);
weight_this_phase_ += TotalNnetTrainingWeight(buffer_);
buffer_.clear();
minibatches_seen_this_phase_++;
if (minibatches_seen_this_phase_ == config_.minibatches_per_phase) {
bool first_time = false;
BeginNewPhase(first_time);
}
}
void NnetSimpleTrainer::BeginNewPhase(bool first_time) {
if (!first_time)
KALDI_LOG << "Training objective function (this phase) is "
<< (logprob_this_phase_ / weight_this_phase_) << " over "
<< weight_this_phase_ << " frames.";
logprob_total_ += logprob_this_phase_;
weight_total_ += weight_this_phase_;
logprob_this_phase_ = 0.0;
weight_this_phase_ = 0.0;
minibatches_seen_this_phase_ = 0;
num_phases_++;
}
NnetSimpleTrainer::~NnetSimpleTrainer() {
if (!buffer_.empty()) {
KALDI_LOG << "Doing partial minibatch of size " << buffer_.size();
TrainOneMinibatch();
if (minibatches_seen_this_phase_ != 0) {
bool first_time = false;
BeginNewPhase(first_time);
}
}
if (weight_total_ == 0.0) {
KALDI_WARN << "No data seen.";
} else {
KALDI_LOG << "Did backprop on " << weight_total_
<< " examples, average log-prob per frame is "
<< (logprob_total_ / weight_total_);
KALDI_LOG << "[this line is to be parsed by a script:] log-prob-per-frame="
<< (logprob_total_ / weight_total_);
}
}
} // namespace nnet2
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-compute.cc<|end_filename|>
// nnet2/nnet-compute.cc
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet2/nnet-compute.h"
#include "hmm/posterior.h"
namespace kaldi {
namespace nnet2 {
/*
This class does the forward and possibly backward computation for (typically)
a whole utterance of contiguous features. You'll instantiate one of
these classes each time you want to do this computation.
*/
class NnetComputer {
public:
/* Initializer. If pad == true, pad input with nnet.LeftContext() frames on
the left and nnet.RightContext() frames on the right (duplicate the first
and last frames.) */
NnetComputer(const Nnet &nnet, const CuMatrixBase<BaseFloat> &input_feats,
bool pad, Nnet *nnet_to_update = NULL);
/// The forward-through-the-layers part of the computation.
void Propagate();
void Backprop(CuMatrix<BaseFloat> *tmp_deriv);
/// Computes objf derivative at last layer, and returns objective
/// function summed over labels and multiplied by utterance_weight.
/// [Note: utterance_weight will normally be 1.0].
BaseFloat ComputeLastLayerDeriv(const Posterior &pdf_post,
CuMatrix<BaseFloat> *deriv) const;
CuMatrixBase<BaseFloat> &GetOutput() { return forward_data_.back(); }
private:
const Nnet &nnet_;
std::vector<CuMatrix<BaseFloat> > forward_data_;
Nnet *nnet_to_update_; // May be NULL, if just want objective function
// but no gradient info or SGD.
};
NnetComputer::NnetComputer(const Nnet &nnet,
const CuMatrixBase<BaseFloat> &input_feats, bool pad,
Nnet *nnet_to_update)
: nnet_(nnet), nnet_to_update_(nnet_to_update) {
int32 dim = input_feats.NumCols();
KALDI_ASSERT(dim == nnet.InputDim());
forward_data_.resize(nnet.NumComponents() + 1);
int32 left_context = (pad ? nnet_.LeftContext() : 0),
right_context = (pad ? nnet_.RightContext() : 0);
int32 num_rows = left_context + input_feats.NumRows() + right_context;
CuMatrix<BaseFloat> &input(forward_data_[0]);
input.Resize(num_rows, dim);
input.Range(left_context, input_feats.NumRows(), 0, dim)
.CopyFromMat(input_feats);
for (int32 i = 0; i < left_context; i++)
input.Row(i).CopyFromVec(input_feats.Row(0));
int32 last_row = input_feats.NumRows() - 1;
for (int32 i = 0; i < right_context; i++)
input.Row(num_rows - i - 1).CopyFromVec(input_feats.Row(last_row));
}
/// This is the forward part of the computation.
void NnetComputer::Propagate() {
for (int32 c = 0; c < nnet_.NumComponents(); c++) {
const Component &component = nnet_.GetComponent(c);
CuMatrix<BaseFloat> &input = forward_data_[c],
&output = forward_data_[c + 1];
component.Propagate(input, 1, &output);
const Component *prev_component =
(c == 0 ? NULL : &(nnet_.GetComponent(c - 1)));
bool will_do_backprop = (nnet_to_update_ != NULL),
keep_last_output = will_do_backprop &&
((c > 0 && prev_component->BackpropNeedsOutput()) ||
component.BackpropNeedsInput());
if (!keep_last_output)
forward_data_[c].Resize(0, 0); // We won't need this data; save memory.
}
}
BaseFloat NnetComputer::ComputeLastLayerDeriv(
const Posterior &pdf_post, CuMatrix<BaseFloat> *deriv) const {
// TODO: convert this to proper CUDA code, c.f. ComputeObjfAndDeriv
// in nnet-update.cc (I'm not sure, though, that this code is ever reached.)
int32 num_components = nnet_.NumComponents();
double tot_objf = 0.0, tot_weight = 0.0;
const CuMatrix<BaseFloat> &last_layer_output = forward_data_[num_components];
int32 num_frames = last_layer_output.NumRows(),
num_pdfs = last_layer_output.NumCols();
KALDI_ASSERT(pdf_post.size() == static_cast<size_t>(num_frames));
deriv->Resize(num_frames, num_pdfs); // will zero it.
for (int32 i = 0; i < deriv->NumRows(); i++) {
for (size_t j = 0; j < pdf_post[i].size(); j++) {
int32 label = pdf_post[i][j].first;
BaseFloat weight = pdf_post[i][j].second;
KALDI_ASSERT(label >= 0 && label < num_pdfs);
BaseFloat this_prob = last_layer_output(i, label);
KALDI_ASSERT(this_prob >
0.99e-20); // We floored to 1.0e-20 in SoftmaxLayer.
tot_objf += weight * log(this_prob);
tot_weight += weight;
(*deriv)(i, label) += weight / this_prob; // could be "=", assuming the
// labels are all distinct.
}
}
KALDI_VLOG(4) << "Objective function is " << (tot_objf / tot_weight)
<< " per frame over " << tot_weight << " samples.";
return tot_objf;
}
void NnetComputer::Backprop(CuMatrix<BaseFloat> *tmp_deriv) {
KALDI_ASSERT(nnet_to_update_ != NULL); // Or why do backprop?
// If later this reasoning changes, we can change this
// statement and add logic to make component_to_update, below,
// NULL if necessary.
int32 num_chunks = 1;
for (int32 c = nnet_.NumComponents() - 1; c >= 0; c--) {
const Component &component = nnet_.GetComponent(c);
Component *component_to_update = &(nnet_to_update_->GetComponent(c));
const CuMatrix<BaseFloat> &input = forward_data_[c],
&output = forward_data_[c + 1],
&output_deriv = *tmp_deriv;
CuMatrix<BaseFloat> input_deriv;
component.Backprop(input, output, output_deriv, num_chunks,
component_to_update, &input_deriv);
*tmp_deriv = input_deriv;
}
}
void NnetComputation(const Nnet &nnet,
const CuMatrixBase<BaseFloat> &input, // features
bool pad_input, CuMatrixBase<BaseFloat> *output) {
NnetComputer nnet_computer(nnet, input, pad_input, NULL);
nnet_computer.Propagate();
output->CopyFromMat(nnet_computer.GetOutput());
}
BaseFloat NnetGradientComputation(const Nnet &nnet,
const CuMatrixBase<BaseFloat> &input,
bool pad_input, const Posterior &pdf_post,
Nnet *nnet_to_update) {
NnetComputer nnet_computer(nnet, input, pad_input, nnet_to_update);
nnet_computer.Propagate();
CuMatrix<BaseFloat> deriv;
BaseFloat ans;
ans = nnet_computer.ComputeLastLayerDeriv(pdf_post, &deriv);
nnet_computer.Backprop(&deriv);
return ans;
}
} // namespace nnet2
} // namespace kaldi
<|start_filename|>tonic-suite/nlp/src/SENNA_PT0.h<|end_filename|>
#ifndef SENNA_PT0_H
#define SENNA_PT0_H
typedef struct SENNA_PT0_ {
/* sizes */
int window_size;
int ll_word_size;
int ll_word_max_idx;
int ll_caps_size;
int ll_caps_max_idx;
int ll_posl_size;
int ll_posl_max_idx;
int input_state_size;
int hidden_state_size;
int output_state_size;
/* weights */
float *ll_word_weight;
float *ll_caps_weight;
float *ll_posl_weight;
float *l1_weight;
float *l1_bias;
float *l2_weight;
float *l2_bias;
float *viterbi_score_init;
float *viterbi_score_trans;
/* states */
float *input_state;
float *hidden_state;
float *output_state;
int *labels;
/* padding indices */
int ll_word_padding_idx;
int ll_caps_padding_idx;
int ll_posl_padding_idx;
bool service;
bool debug;
/* internal socket */
int socketfd;
/* profiling */
int calls;
unsigned int apptime;
unsigned int dnntime;
} SENNA_PT0;
SENNA_PT0 *SENNA_PT0_new(const char *path, const char *subpath);
int *SENNA_PT0_forward(SENNA_PT0 *pt0, const int *sentence_words,
const int *sentence_caps, const int *sentence_posl,
int sentence_size, int socketfd);
void SENNA_PT0_free(SENNA_PT0 *pt0);
#endif
<|start_filename|>tonic-suite/asr/src/featbin/extend-transform-dim.cc<|end_filename|>
// featbin/extend-transform-dim.cc
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/kaldi-matrix.h"
#include "transform/transform-common.h"
namespace kaldi {
void IncreaseTransformDimension(int32 new_dimension, Matrix<BaseFloat> *mat) {
int32 d = mat->NumRows();
if (new_dimension < d)
KALDI_ERR << "--new-dimension argument invalid or not specified: "
<< new_dimension << " < " << d;
if (mat->NumCols() == d) { // linear transform d->d
mat->Resize(new_dimension, new_dimension, kCopyData);
for (int32 i = d; i < new_dimension; i++)
(*mat)(i, i) = 1.0; // set new dims to unit matrix.
} else if (mat->NumCols() == d + 1) { // affine transform d->d.
Vector<BaseFloat> offset(mat->NumRows());
offset.CopyColFromMat(*mat, d);
mat->Resize(d, d, kCopyData); // remove offset from mat->
mat->Resize(new_dimension, new_dimension + 1,
kCopyData); // extend with zeros.
for (int32 i = d; i < new_dimension; i++)
(*mat)(i, i) = 1.0; // set new dims to unit matrix.
for (int32 i = 0; i < d; i++) // and set offset [last column]
(*mat)(i, new_dimension) = offset(i);
} else {
KALDI_ERR << "Input matrix has unexpected dimension " << d << " x "
<< mat->NumCols();
}
}
} // end namespace kaldi
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Read in transform from dimension d -> d (affine or linear), and "
"output a transform\n"
"from dimension e -> e (with e >= d, and e controlled by option "
"--new-dimension).\n"
"This new transform will leave the extra dimension unaffected, and "
"transform the old\n"
"dimensions in the same way.\n"
"Usage: extend-transform-dim [options] "
"(transform-A-rspecifier|transform-A-rxfilename) "
"(transform-out-wspecifier|transform-out-wxfilename)\n"
"E.g.: extend-transform-dim --new-dimension=117 in.mat big.mat\n";
bool binary = true;
int32 new_dimension = -1;
ParseOptions po(usage);
po.Register(
"binary", &binary,
"Write in binary mode (only relevant if output is a wxfilename)");
po.Register("new-dimension", &new_dimension,
"Larger dimension we are changing matrix to");
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
std::string transform_in_fn = po.GetArg(1);
std::string transform_out_fn = po.GetArg(2);
// all these "fn"'s are either rspecifiers or filenames.
bool in_is_rspecifier =
(ClassifyRspecifier(transform_in_fn, NULL, NULL) != kNoRspecifier),
out_is_wspecifier = (ClassifyWspecifier(transform_out_fn, NULL, NULL,
NULL) != kNoWspecifier);
if (in_is_rspecifier != out_is_wspecifier)
KALDI_ERR
<< "Either none or both of the (input, output) must be a Table.";
if (in_is_rspecifier) {
SequentialBaseFloatMatrixReader reader(transform_in_fn);
BaseFloatMatrixWriter writer(transform_out_fn);
int32 num_done = 0;
for (; !reader.Done(); reader.Next()) {
std::string key = reader.Key();
Matrix<BaseFloat> mat(reader.Value());
IncreaseTransformDimension(new_dimension, &mat);
writer.Write(key, mat);
num_done++;
}
KALDI_LOG << "Increased transform dim to " << new_dimension << " for "
<< num_done << " matrices.";
return (num_done != 0 ? 0 : 1);
} else {
Matrix<BaseFloat> mat;
ReadKaldiObject(transform_in_fn, &mat);
int32 old_dim = mat.NumRows();
IncreaseTransformDimension(new_dimension, &mat);
WriteKaldiObject(mat, transform_out_fn, binary);
KALDI_LOG << "Increased transform dim from " << old_dim << " to "
<< mat.NumRows() << " and wrote to " << transform_out_fn;
return 0;
}
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/nlp/src/SENNA_POS.h<|end_filename|>
#ifndef SENNA_POS_H
#define SENNA_POS_H
#include "tonic.h"
typedef struct SENNA_POS_ {
/* sizes */
int window_size;
int ll_word_size;
int ll_word_max_idx;
int ll_caps_size;
int ll_caps_max_idx;
int ll_suff_size;
int ll_suff_max_idx;
int input_state_size;
int hidden_state_size;
int output_state_size;
/* weights */
float *ll_word_weight;
float *ll_caps_weight;
float *ll_suff_weight;
float *l1_weight;
float *l1_bias;
float *l2_weight;
float *l2_bias;
float *viterbi_score_init;
float *viterbi_score_trans;
/* states */
float *input_state;
float *hidden_state;
float *output_state;
int *labels;
/* padding indices */
int ll_word_padding_idx;
int ll_caps_padding_idx;
int ll_suff_padding_idx;
} SENNA_POS;
SENNA_POS *SENNA_POS_new(const char *path, const char *subpath);
int *SENNA_POS_forward(SENNA_POS *pos, const int *sentence_words,
const int *sentence_caps, const int *sentence_suff,
TonicSuiteApp app);
void SENNA_POS_free(SENNA_POS *pos);
#endif
<|start_filename|>tonic-suite/asr/src/makefiles/linux_atlas_64bit.mk<|end_filename|>
# This version is specialized for 64-bit cross-compilation on BUT machines.
# The configure script will not pick it up automatically.
#
# To use it, from src/ run in bash:
#
# cat makefiles/common.mk makefiles/linux_atlas_64bit.mk > kaldi.mk
# echo "CUDA = true" >> kaldi.mk
# echo "CUDATKDIR = /usr/local/share/cuda" >> kaldi.mk
# cat makefiles/linux_x86_64_cuda.mk >> kaldi.mk
#
# Note that for 64bit compilation of kaldi,
# you need to compile 64bit OpenFST first.
#
### You need to set KALDI_ROOT manually
KALDI_ROOT=/mnt/matylda5/iveselyk/DEVEL/kaldi/trunk
###
FSTROOT = $(KALDI_ROOT)/tools/openfst
ATLASINC = $(KALDI_ROOT)/tools/ATLAS/include
ATLASLIBS = /usr/local/lib64/liblapack.a /usr/local/lib64/libcblas.a /usr/local/lib64/libatlas.a /usr/local/lib64/libf77blas.a
# You have to make sure ATLASLIBS is set...
ifndef FSTROOT
$(error FSTROOT not defined.)
endif
ifndef ATLASINC
$(error ATLASINC not defined.)
endif
ifndef ATLASLIBS
$(error ATLASLIBS not defined.)
endif
CXXFLAGS = -msse -msse2 -Wall -I.. -pthread \
-DKALDI_DOUBLEPRECISION=0 -DHAVE_POSIX_MEMALIGN \
-Wno-sign-compare -Winit-self \
-DHAVE_EXECINFO_H=1 -rdynamic -DHAVE_CXXABI_H \
-DHAVE_ATLAS -I$(ATLASINC) \
-I$(FSTROOT)/include \
$(EXTRA_CXXFLAGS) \
-g # -O0 -DKALDI_PARANOID
ifeq ($(KALDI_FLAVOR), dynamic)
CXXFLAGS += -fPIC
endif
LDFLAGS = -rdynamic $(OPENFSTLDFLAGS)
LDLIBS = $(EXTRA_LDLIBS) $(OPENFSTLIBS) $(ATLASLIBS) -lm -lpthread -ldl
CC = x86_64-linux-g++
CXX = x86_64-linux-g++
AR = x86_64-linux-ar
AS = x86_64-linux-as
RANLIB = x86_64-linux-ranlib
<|start_filename|>tonic-suite/asr/src/gmm/model-common.cc<|end_filename|>
// gmm/model-common.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "matrix/matrix-lib.h"
#include "gmm/model-common.h"
#include <queue>
#include <numeric>
namespace kaldi {
GmmFlagsType StringToGmmFlags(std::string str) {
GmmFlagsType flags = 0;
for (const char *c = str.c_str(); *c != '\0'; c++) {
switch (*c) {
case 'm':
flags |= kGmmMeans;
break;
case 'v':
flags |= kGmmVariances;
break;
case 'w':
flags |= kGmmWeights;
break;
case 't':
flags |= kGmmTransitions;
break;
case 'a':
flags |= kGmmAll;
break;
default:
KALDI_ERR << "Invalid element " << CharToString(*c)
<< " of GmmFlagsType option string " << str;
}
}
return flags;
}
std::string GmmFlagsToString(GmmFlagsType flags) {
std::string ans;
if (flags & kGmmMeans) ans += "m";
if (flags & kGmmVariances) ans += "v";
if (flags & kGmmWeights) ans += "w";
if (flags & kGmmTransitions) ans += "t";
return ans;
}
GmmFlagsType AugmentGmmFlags(GmmFlagsType flags) {
KALDI_ASSERT((flags & ~kGmmAll) ==
0); // make sure only valid flags are present.
if (flags & kGmmVariances) flags |= kGmmMeans;
if (flags & kGmmMeans) flags |= kGmmWeights;
if (!(flags & kGmmWeights)) {
KALDI_WARN << "Adding in kGmmWeights (\"w\") to empty flags.";
flags |= kGmmWeights; // Just add this in regardless:
// if user wants no stats, this will stop programs from crashing due to dim
// mismatches.
}
return flags;
}
SgmmUpdateFlagsType StringToSgmmUpdateFlags(std::string str) {
SgmmUpdateFlagsType flags = 0;
for (const char *c = str.c_str(); *c != '\0'; c++) {
switch (*c) {
case 'v':
flags |= kSgmmPhoneVectors;
break;
case 'M':
flags |= kSgmmPhoneProjections;
break;
case 'w':
flags |= kSgmmPhoneWeightProjections;
break;
case 'S':
flags |= kSgmmCovarianceMatrix;
break;
case 'c':
flags |= kSgmmSubstateWeights;
break;
case 'N':
flags |= kSgmmSpeakerProjections;
break;
case 't':
flags |= kSgmmTransitions;
break;
case 'u':
flags |= kSgmmSpeakerWeightProjections;
break;
case 'a':
flags |= kSgmmAll;
break;
default:
KALDI_ERR << "Invalid element " << CharToString(*c)
<< " of SgmmUpdateFlagsType option string " << str;
}
}
return flags;
}
SgmmUpdateFlagsType StringToSgmmWriteFlags(std::string str) {
SgmmWriteFlagsType flags = 0;
for (const char *c = str.c_str(); *c != '\0'; c++) {
switch (*c) {
case 'g':
flags |= kSgmmGlobalParams;
break;
case 's':
flags |= kSgmmStateParams;
break;
case 'n':
flags |= kSgmmNormalizers;
break;
case 'u':
flags |= kSgmmBackgroundGmms;
break;
case 'a':
flags |= kSgmmAll;
break;
default:
KALDI_ERR << "Invalid element " << CharToString(*c)
<< " of SgmmWriteFlagsType option string " << str;
}
}
return flags;
}
struct CountStats {
CountStats(int32 p, int32 n, BaseFloat occ)
: pdf_index(p), num_components(n), occupancy(occ) {}
int32 pdf_index;
int32 num_components;
BaseFloat occupancy;
bool operator<(const CountStats &other) const {
return occupancy / (num_components + 1.0e-10) <
other.occupancy / (other.num_components + 1.0e-10);
}
};
void GetSplitTargets(const Vector<BaseFloat> &state_occs,
int32 target_components, BaseFloat power,
BaseFloat min_count, std::vector<int32> *targets) {
std::priority_queue<CountStats> split_queue;
int32 num_pdfs = state_occs.Dim();
for (int32 pdf_index = 0; pdf_index < num_pdfs; pdf_index++) {
BaseFloat occ = pow(state_occs(pdf_index), power);
// initialize with one Gaussian per PDF, to put a floor
// of 1 on the #Gauss
split_queue.push(CountStats(pdf_index, 1, occ));
}
for (int32 num_gauss = num_pdfs; num_gauss < target_components;) {
CountStats state_to_split = split_queue.top();
if (state_to_split.occupancy == 0) {
KALDI_WARN << "Could not split up to " << target_components
<< " due to min-count = " << min_count
<< " (or no counts at all)\n";
break;
}
split_queue.pop();
BaseFloat orig_occ = state_occs(state_to_split.pdf_index);
if ((state_to_split.num_components + 1) * min_count >= orig_occ) {
state_to_split.occupancy = 0; // min-count active -> disallow splitting
// this state any more by setting occupancy = 0.
} else {
state_to_split.num_components++;
num_gauss++;
}
split_queue.push(state_to_split);
}
targets->resize(num_pdfs);
while (!split_queue.empty()) {
int32 pdf_index = split_queue.top().pdf_index;
int32 pdf_tgt_comp = split_queue.top().num_components;
(*targets)[pdf_index] = pdf_tgt_comp;
split_queue.pop();
}
}
} // End namespace kaldi
<|start_filename|>tonic-suite/asr/src/sgmm2/am-sgmm2.cc<|end_filename|>
// sgmm2/am-sgmm2.cc
// Copyright 2009-2011 Microsoft Corporation; <NAME>;
// Saarland University (Author: <NAME>);
// <NAME>; <NAME>;
// Copyright 2012-2013 Johns Hopkins University (Author: <NAME>)
// <NAME>; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <functional>
#include "sgmm2/am-sgmm2.h"
#include "thread/kaldi-thread.h"
namespace kaldi {
using std::vector;
// This function needs to be added because std::generate is complaining
// about RandGauss(), which takes an optional arguments.
static inline float _RandGauss() { return RandGauss(); }
void Sgmm2LikelihoodCache::NextFrame() {
t++;
if (t == 0) {
t++; // skip over zero; zero is used to invalidate frames.
for (size_t i = 0; i < substate_cache.size(); i++) substate_cache[i].t = 0;
for (size_t i = 0; i < pdf_cache.size(); i++) pdf_cache[i].t = 0;
}
}
void AmSgmm2::ComputeGammaI(const Vector<BaseFloat> &state_occupancies,
Vector<BaseFloat> *gamma_i) const {
KALDI_ASSERT(state_occupancies.Dim() == NumPdfs());
Vector<BaseFloat> w_jm(NumGauss());
gamma_i->Resize(NumGauss());
for (int32 j1 = 0; j1 < NumGroups(); j1++) {
int32 M = NumSubstatesForGroup(j1);
const std::vector<int32> &pdfs = group2pdf_[j1];
Vector<BaseFloat> substate_weight(M); // total weight for each substate.
for (size_t i = 0; i < pdfs.size(); i++) {
int32 j2 = pdfs[i];
substate_weight.AddVec(state_occupancies(j2), c_[j2]);
}
for (int32 m = 0; m < M; m++) {
w_jm.AddMatVec(1.0, w_, kNoTrans, v_[j1].Row(m), 0.0);
w_jm.ApplySoftMax();
gamma_i->AddVec(substate_weight(m), w_jm);
}
}
}
void AmSgmm2::ComputePdfMappings() {
if (pdf2group_.empty()) {
KALDI_WARN << "ComputePdfMappings(): no pdf2group_ map, assuming you "
"are reading in old model.";
KALDI_ASSERT(v_.size() != 0);
pdf2group_.resize(v_.size());
for (int32 j2 = 0; j2 < static_cast<int32>(pdf2group_.size()); j2++)
pdf2group_[j2] = j2;
}
group2pdf_.clear();
for (int32 j2 = 0; j2 < static_cast<int32>(pdf2group_.size()); j2++) {
int32 j1 = pdf2group_[j2];
if (group2pdf_.size() <= j1) group2pdf_.resize(j1 + 1);
group2pdf_[j1].push_back(j2);
}
}
void AmSgmm2::Read(std::istream &in_stream, bool binary) {
{ // We want this to work even if the object was previously
// populated, so we clear the items that are more likely
// to cause problems.
pdf2group_.clear();
group2pdf_.clear();
u_.Resize(0, 0);
w_jmi_.clear();
v_.clear();
}
// removing anything that was in the object before.
int32 num_pdfs = -1, feat_dim, num_gauss;
std::string token;
ExpectToken(in_stream, binary, "<SGMM>");
ExpectToken(in_stream, binary, "<NUMSTATES>");
ReadBasicType(in_stream, binary, &num_pdfs);
ExpectToken(in_stream, binary, "<DIMENSION>");
ReadBasicType(in_stream, binary, &feat_dim);
ExpectToken(in_stream, binary, "<NUMGAUSS>");
ReadBasicType(in_stream, binary, &num_gauss);
KALDI_ASSERT(num_pdfs > 0 && feat_dim > 0);
ReadToken(in_stream, binary, &token);
while (token != "</SGMM>") {
if (token == "<PDF2GROUP>") {
ReadIntegerVector(in_stream, binary, &pdf2group_);
ComputePdfMappings();
} else if (token == "<WEIGHTIDX2GAUSS>") { // TEMP! Will remove.
std::vector<int32> garbage;
ReadIntegerVector(in_stream, binary, &garbage);
} else if (token == "<DIAG_UBM>") {
diag_ubm_.Read(in_stream, binary);
} else if (token == "<FULL_UBM>") {
full_ubm_.Read(in_stream, binary);
} else if (token == "<SigmaInv>") {
SigmaInv_.resize(num_gauss);
for (int32 i = 0; i < num_gauss; i++) {
SigmaInv_[i].Read(in_stream, binary);
}
} else if (token == "<M>") {
M_.resize(num_gauss);
for (int32 i = 0; i < num_gauss; i++) {
M_[i].Read(in_stream, binary);
}
} else if (token == "<N>") {
N_.resize(num_gauss);
for (int32 i = 0; i < num_gauss; i++) {
N_[i].Read(in_stream, binary);
}
} else if (token == "<w>") {
w_.Read(in_stream, binary);
} else if (token == "<u>") {
u_.Read(in_stream, binary);
} else if (token == "<v>") {
int32 num_groups = group2pdf_.size();
if (num_groups == 0) {
KALDI_WARN << "Reading old model with new code (should still work)";
num_groups = num_pdfs;
}
v_.resize(num_groups);
for (int32 j1 = 0; j1 < num_groups; j1++) {
v_[j1].Read(in_stream, binary);
}
} else if (token == "<c>") {
c_.resize(num_pdfs);
for (int32 j2 = 0; j2 < num_pdfs; j2++) {
c_[j2].Read(in_stream, binary);
}
} else if (token == "<n>") {
int32 num_groups = group2pdf_.size();
if (num_groups == 0) num_groups = num_pdfs;
n_.resize(num_groups);
for (int32 j1 = 0; j1 < num_groups; j1++) {
n_[j1].Read(in_stream, binary);
}
// The following are the Gaussian prior parameters for MAP adaptation of M
// They may be moved to somewhere else eventually.
} else if (token == "<M_Prior>") {
ExpectToken(in_stream, binary, "<NUMGaussians>");
ReadBasicType(in_stream, binary, &num_gauss);
M_prior_.resize(num_gauss);
for (int32 i = 0; i < num_gauss; i++) {
M_prior_[i].Read(in_stream, binary);
}
} else if (token == "<Row_Cov_Inv>") {
row_cov_inv_.Read(in_stream, binary);
} else if (token == "<Col_Cov_Inv>") {
col_cov_inv_.Read(in_stream, binary);
} else {
KALDI_ERR << "Unexpected token '" << token << "' in model file ";
}
ReadToken(in_stream, binary, &token);
}
if (pdf2group_.empty())
ComputePdfMappings(); // sets up group2pdf_, and pdf2group_ if reading
// old model.
if (n_.empty()) ComputeNormalizers();
if (HasSpeakerDependentWeights()) ComputeWeights();
}
int32 AmSgmm2::Pdf2Group(int32 j2) const {
KALDI_ASSERT(static_cast<size_t>(j2) < pdf2group_.size());
int32 j1 = pdf2group_[j2];
return j1;
}
void AmSgmm2::Write(std::ostream &out_stream, bool binary,
SgmmWriteFlagsType write_params) const {
int32 num_pdfs = NumPdfs(), feat_dim = FeatureDim(), num_gauss = NumGauss();
WriteToken(out_stream, binary, "<SGMM>");
if (!binary) out_stream << "\n";
WriteToken(out_stream, binary, "<NUMSTATES>");
WriteBasicType(out_stream, binary, num_pdfs);
WriteToken(out_stream, binary, "<DIMENSION>");
WriteBasicType(out_stream, binary, feat_dim);
WriteToken(out_stream, binary, "<NUMGAUSS>");
WriteBasicType(out_stream, binary, num_gauss);
if (!binary) out_stream << "\n";
if (write_params & kSgmmBackgroundGmms) {
WriteToken(out_stream, binary, "<DIAG_UBM>");
diag_ubm_.Write(out_stream, binary);
WriteToken(out_stream, binary, "<FULL_UBM>");
full_ubm_.Write(out_stream, binary);
}
if (write_params & kSgmmGlobalParams) {
WriteToken(out_stream, binary, "<SigmaInv>");
if (!binary) out_stream << "\n";
for (int32 i = 0; i < num_gauss; i++) {
SigmaInv_[i].Write(out_stream, binary);
}
WriteToken(out_stream, binary, "<M>");
if (!binary) out_stream << "\n";
for (int32 i = 0; i < num_gauss; i++) {
M_[i].Write(out_stream, binary);
}
if (N_.size() != 0) {
WriteToken(out_stream, binary, "<N>");
if (!binary) out_stream << "\n";
for (int32 i = 0; i < num_gauss; i++) {
N_[i].Write(out_stream, binary);
}
}
WriteToken(out_stream, binary, "<w>");
w_.Write(out_stream, binary);
WriteToken(out_stream, binary, "<u>");
u_.Write(out_stream, binary);
}
if (write_params & kSgmmStateParams) {
WriteToken(out_stream, binary, "<PDF2GROUP>");
WriteIntegerVector(out_stream, binary, pdf2group_);
WriteToken(out_stream, binary, "<v>");
for (int32 j1 = 0; j1 < NumGroups(); j1++) {
v_[j1].Write(out_stream, binary);
}
WriteToken(out_stream, binary, "<c>");
for (int32 j2 = 0; j2 < num_pdfs; j2++) {
c_[j2].Write(out_stream, binary);
}
}
if (write_params & kSgmmNormalizers) {
WriteToken(out_stream, binary, "<n>");
if (n_.empty())
KALDI_WARN << "Not writing normalizers since they are not present.";
else
for (int32 j1 = 0; j1 < NumGroups(); j1++)
n_[j1].Write(out_stream, binary);
}
WriteToken(out_stream, binary, "</SGMM>");
}
void AmSgmm2::Check(bool show_properties) {
int32 J1 = NumGroups(), J2 = NumPdfs(), num_gauss = NumGauss(),
feat_dim = FeatureDim(), phn_dim = PhoneSpaceDim(),
spk_dim = SpkSpaceDim();
if (show_properties)
KALDI_LOG << "AmSgmm2: #pdfs = " << J2 << ", #pdf-groups = " << J1
<< ", #Gaussians = " << num_gauss
<< ", feature dim = " << feat_dim
<< ", phone-space dim =" << phn_dim
<< ", speaker-space dim =" << spk_dim;
KALDI_ASSERT(J1 > 0 && num_gauss > 0 && feat_dim > 0 && phn_dim > 0 &&
J2 > 0 && J2 >= J1);
std::ostringstream debug_str;
// First check the diagonal-covariance UBM.
KALDI_ASSERT(diag_ubm_.NumGauss() == num_gauss);
KALDI_ASSERT(diag_ubm_.Dim() == feat_dim);
// Check the full-covariance UBM.
KALDI_ASSERT(full_ubm_.NumGauss() == num_gauss);
KALDI_ASSERT(full_ubm_.Dim() == feat_dim);
// Check the globally-shared covariance matrices.
KALDI_ASSERT(SigmaInv_.size() == static_cast<size_t>(num_gauss));
for (int32 i = 0; i < num_gauss; i++) {
KALDI_ASSERT(SigmaInv_[i].NumRows() == feat_dim &&
SigmaInv_[i](0, 0) > 0.0); // or it wouldn't be +ve definite.
}
if (spk_dim != 0) {
KALDI_ASSERT(N_.size() == static_cast<size_t>(num_gauss));
for (int32 i = 0; i < num_gauss; i++)
KALDI_ASSERT(N_[i].NumRows() == feat_dim && N_[i].NumCols() == spk_dim);
if (u_.NumRows() == 0) {
debug_str << "Speaker-weight projections: no.";
} else {
KALDI_ASSERT(u_.NumRows() == num_gauss && u_.NumCols() == spk_dim);
debug_str << "Speaker-weight projections: yes.";
}
} else {
KALDI_ASSERT(N_.size() == 0 && u_.NumRows() == 0);
}
KALDI_ASSERT(M_.size() == static_cast<size_t>(num_gauss));
for (int32 i = 0; i < num_gauss; i++) {
KALDI_ASSERT(M_[i].NumRows() == feat_dim && M_[i].NumCols() == phn_dim);
}
KALDI_ASSERT(w_.NumRows() == num_gauss && w_.NumCols() == phn_dim);
{ // check v, c.
KALDI_ASSERT(v_.size() == static_cast<size_t>(J1) &&
c_.size() == static_cast<size_t>(J2));
int32 nSubstatesTot = 0;
for (int32 j1 = 0; j1 < J1; j1++) {
int32 M_j = NumSubstatesForGroup(j1);
nSubstatesTot += M_j;
KALDI_ASSERT(M_j > 0 && v_[j1].NumRows() == M_j &&
v_[j1].NumCols() == phn_dim);
}
debug_str << "Substates: " << (nSubstatesTot) << ". ";
int32 nSubstateWeights = 0;
for (int32 j2 = 0; j2 < J2; j2++) {
int32 j1 = Pdf2Group(j2);
int32 M = NumSubstatesForPdf(j2);
KALDI_ASSERT(M == NumSubstatesForGroup(j1));
nSubstateWeights += M;
}
KALDI_ASSERT(nSubstateWeights >= nSubstatesTot);
debug_str << "SubstateWeights: " << (nSubstateWeights) << ". ";
}
// check normalizers.
if (n_.size() == 0) {
debug_str << "Normalizers: no. ";
} else {
debug_str << "Normalizers: yes. ";
KALDI_ASSERT(n_.size() == static_cast<size_t>(J1));
for (int32 j1 = 0; j1 < J1; j1++) {
KALDI_ASSERT(n_[j1].NumRows() == num_gauss &&
n_[j1].NumCols() == NumSubstatesForGroup(j1));
}
}
// check w_jmi_.
if (w_jmi_.size() == 0) {
debug_str << "Computed weights: no. ";
} else {
debug_str << "Computed weights: yes. ";
KALDI_ASSERT(w_jmi_.size() == static_cast<size_t>(J1));
for (int32 j1 = 0; j1 < J1; j1++) {
KALDI_ASSERT(w_jmi_[j1].NumRows() == NumSubstatesForGroup(j1) &&
w_jmi_[j1].NumCols() == num_gauss);
}
}
if (show_properties)
KALDI_LOG << "Subspace GMM model properties: " << debug_str.str();
}
void AmSgmm2::InitializeFromFullGmm(const FullGmm &full_gmm,
const std::vector<int32> &pdf2group,
int32 phn_subspace_dim,
int32 spk_subspace_dim,
bool speaker_dependent_weights,
BaseFloat self_weight) {
pdf2group_ = pdf2group;
ComputePdfMappings();
full_ubm_.CopyFromFullGmm(full_gmm);
diag_ubm_.CopyFromFullGmm(full_gmm);
if (phn_subspace_dim < 1 || phn_subspace_dim > full_gmm.Dim() + 1) {
KALDI_WARN << "Initial phone-subspace dimension must be >= 1, value is "
<< phn_subspace_dim << "; setting to " << full_gmm.Dim() + 1;
phn_subspace_dim = full_gmm.Dim() + 1;
}
KALDI_ASSERT(spk_subspace_dim >= 0);
w_.Resize(0, 0);
N_.clear();
c_.clear();
v_.clear();
SigmaInv_.clear();
KALDI_LOG << "Initializing model";
Matrix<BaseFloat> norm_xform;
ComputeFeatureNormalizingTransform(full_gmm, &norm_xform);
InitializeMw(phn_subspace_dim, norm_xform);
if (spk_subspace_dim > 0)
InitializeNu(spk_subspace_dim, norm_xform, speaker_dependent_weights);
InitializeVecsAndSubstateWeights(self_weight);
KALDI_LOG << "Initializing variances";
InitializeCovars();
}
void AmSgmm2::CopyFromSgmm2(const AmSgmm2 &other, bool copy_normalizers,
bool copy_weights) {
KALDI_LOG << "Copying AmSgmm2";
pdf2group_ = other.pdf2group_;
group2pdf_ = other.group2pdf_;
// Copy background GMMs
diag_ubm_.CopyFromDiagGmm(other.diag_ubm_);
full_ubm_.CopyFromFullGmm(other.full_ubm_);
// Copy global params
SigmaInv_ = other.SigmaInv_;
M_ = other.M_;
w_ = other.w_;
N_ = other.N_;
u_ = other.u_;
// Copy state-specific params, but only copy normalizers if requested.
v_ = other.v_;
c_ = other.c_;
if (copy_normalizers) n_ = other.n_;
if (copy_weights) w_jmi_ = other.w_jmi_;
KALDI_LOG << "Done.";
}
void AmSgmm2::ComputePerFrameVars(
const VectorBase<BaseFloat> &data, const std::vector<int32> &gselect,
const Sgmm2PerSpkDerivedVars &spk_vars,
Sgmm2PerFrameDerivedVars *per_frame_vars) const {
KALDI_ASSERT(!n_.empty() && "ComputeNormalizers() must be called.");
per_frame_vars->Resize(gselect.size(), FeatureDim(), PhoneSpaceDim());
per_frame_vars->gselect = gselect;
per_frame_vars->xt.CopyFromVec(data);
for (int32 ki = 0, last = gselect.size(); ki < last; ki++) {
int32 i = gselect[ki];
per_frame_vars->xti.Row(ki).CopyFromVec(per_frame_vars->xt);
if (spk_vars.v_s.Dim() != 0)
per_frame_vars->xti.Row(ki).AddVec(-1.0, spk_vars.o_s.Row(i));
}
Vector<BaseFloat> SigmaInv_xt(FeatureDim());
bool speaker_dep_weights =
(spk_vars.v_s.Dim() != 0 && HasSpeakerDependentWeights());
for (int32 ki = 0, last = gselect.size(); ki < last; ki++) {
int32 i = gselect[ki];
BaseFloat ssgmm_term = (speaker_dep_weights ? spk_vars.log_b_is(i) : 0.0);
SigmaInv_xt.AddSpVec(1.0, SigmaInv_[i], per_frame_vars->xti.Row(ki), 0.0);
// Eq (35): z_{i}(t) = M_{i}^{T} \Sigma_{i}^{-1} x_{i}(t)
per_frame_vars->zti.Row(ki).AddMatVec(1.0, M_[i], kTrans, SigmaInv_xt, 0.0);
// Eq.(36): n_{i}(t) = -0.5 x_{i}^{T} \Sigma_{i}^{-1} x_{i}(t)
per_frame_vars->nti(ki) =
-0.5 * VecVec(per_frame_vars->xti.Row(ki), SigmaInv_xt) + ssgmm_term;
}
}
// inline
void AmSgmm2::ComponentLogLikes(const Sgmm2PerFrameDerivedVars &per_frame_vars,
int32 j1, Sgmm2PerSpkDerivedVars *spk_vars,
Matrix<BaseFloat> *loglikes) const {
const vector<int32> &gselect = per_frame_vars.gselect;
int32 num_gselect = gselect.size(), num_substates = v_[j1].NumRows();
// Eq.(37): log p(x(t), m, i|j) [indexed by j, ki]
// Although the extra memory allocation of storing this as a
// matrix might seem unnecessary, we save time in the LogSumExp()
// via more effective pruning.
loglikes->Resize(num_gselect, num_substates);
bool speaker_dep_weights =
(spk_vars->v_s.Dim() != 0 && HasSpeakerDependentWeights());
if (speaker_dep_weights) {
KALDI_ASSERT(static_cast<int32>(spk_vars->log_d_jms.size()) == NumGroups());
KALDI_ASSERT(static_cast<int32>(w_jmi_.size()) == NumGroups() ||
"You need to call ComputeWeights().");
}
for (int32 ki = 0; ki < num_gselect; ki++) {
SubVector<BaseFloat> logp_xi(*loglikes, ki);
int32 i = gselect[ki];
// for all substates, compute z_{i}^T v_{jm}
logp_xi.AddMatVec(1.0, v_[j1], kNoTrans, per_frame_vars.zti.Row(ki), 0.0);
logp_xi.AddVec(1.0, n_[j1].Row(i)); // for all substates, add n_{jim}
logp_xi.Add(per_frame_vars.nti(ki)); // for all substates, add n_{i}(t)
}
if (speaker_dep_weights) { // [SSGMM]
Vector<BaseFloat> &log_d = spk_vars->log_d_jms[j1];
if (log_d.Dim() == 0) { // have not yet cached this quantity.
log_d.Resize(num_substates);
log_d.AddMatVec(1.0, w_jmi_[j1], kNoTrans, spk_vars->b_is, 0.0);
log_d.ApplyLog();
}
loglikes->AddVecToRows(-1.0, log_d); // [SSGMM] this is the term
// - log d_{jm}^{(s)} in the likelihood function [eq. 25 in
// the techreport]
}
}
BaseFloat AmSgmm2::LogLikelihood(const Sgmm2PerFrameDerivedVars &per_frame_vars,
int32 j2, Sgmm2LikelihoodCache *cache,
Sgmm2PerSpkDerivedVars *spk_vars,
BaseFloat log_prune) const {
int32 t = cache->t; // not a real time; used to uniquely identify frames.
// Forgo asserts here, as this is frequently called.
// We'll probably get a segfault if an error is made.
Sgmm2LikelihoodCache::PdfCacheElement &pdf_cache = cache->pdf_cache[j2];
#ifdef KALDI_PARANOID
bool random_test = (Rand() % 1000 == 1); // to check that the user is
// calling Next() on the cache, as they should.
#else
bool random_test = false; // compiler will ignore test branches.
#endif
if (pdf_cache.t == t) {
if (!random_test) return pdf_cache.log_like;
} else {
random_test = false;
}
// if random_test == true at this point, it was already cached, and we will
// verify that we return the same value as the cached one.
pdf_cache.t = t;
int32 j1 = pdf2group_[j2];
Sgmm2LikelihoodCache::SubstateCacheElement &substate_cache =
cache->substate_cache[j1];
if (substate_cache.t != t) { // Need to compute sub-state likelihoods.
substate_cache.t = t;
Matrix<BaseFloat> loglikes; // indexed [gselect-index][substate-index]
ComponentLogLikes(per_frame_vars, j1, spk_vars, &loglikes);
BaseFloat max =
loglikes.Max(); // use this to keep things in good numerical range.
loglikes.Add(-max);
loglikes.ApplyExp();
substate_cache.remaining_log_like = max;
int32 num_substates = loglikes.NumCols();
substate_cache.likes.Resize(num_substates); // zeroes it.
substate_cache.likes.AddRowSumMat(
1.0, loglikes); // add likelihoods [not in log!] for
// each column [i.e. summing over the rows], so we get the sum for
// each substate index. You have to multiply by exp(remaining_log_like)
// to get a real likelihood.
}
BaseFloat log_like = substate_cache.remaining_log_like +
log(VecVec(substate_cache.likes, c_[j2]));
if (random_test) KALDI_ASSERT(ApproxEqual(pdf_cache.log_like, log_like));
pdf_cache.log_like = log_like;
KALDI_ASSERT(log_like == log_like && log_like - log_like == 0); // check
// that it's not NaN or infinity.
return log_like;
}
BaseFloat AmSgmm2::ComponentPosteriors(
const Sgmm2PerFrameDerivedVars &per_frame_vars, int32 j2,
Sgmm2PerSpkDerivedVars *spk_vars, Matrix<BaseFloat> *post) const {
KALDI_ASSERT(j2 < NumPdfs() && post != NULL);
int32 j1 = pdf2group_[j2];
ComponentLogLikes(per_frame_vars, j1, spk_vars, post); // now
// post is a matrix of log-likelihoods indexed by [gaussian-selection index]
// [sub-state index]. It doesn't include the sub-state weights,
// though.
BaseFloat loglike = post->Max();
post->Add(-loglike); // get it to nicer numeric range.
post->ApplyExp(); // so we're dealing with likelihoods (with an arbitrary
// offset
// "loglike" removed to make it in a nice numeric range)
post->MulColsVec(c_[j2]); // include the sub-state weights.
BaseFloat tot_like = post->Sum();
KALDI_ASSERT(tot_like != 0.0); // note: not valid to have zero weights.
loglike += log(tot_like);
post->Scale(1.0 / tot_like); // so "post" now sums to one, and "loglike"
// contains the correct log-likelihood of the data given the pdf.
return loglike;
}
void AmSgmm2::SplitSubstatesInGroup(const Vector<BaseFloat> &pdf_occupancies,
const Sgmm2SplitSubstatesConfig &opts,
const SpMatrix<BaseFloat> &sqrt_H_sm,
int32 j1, int32 tgt_M) {
const std::vector<int32> &pdfs = group2pdf_[j1];
int32 phn_dim = PhoneSpaceDim(), cur_M = NumSubstatesForGroup(j1),
num_pdfs_for_group = pdfs.size();
Vector<BaseFloat> rand_vec(phn_dim), v_shift(phn_dim);
KALDI_ASSERT(tgt_M >= cur_M);
if (cur_M == tgt_M) return;
// Resize v[j1] to fit new substates
{
Matrix<BaseFloat> tmp_v_j(v_[j1]);
v_[j1].Resize(tgt_M, phn_dim);
v_[j1].Range(0, cur_M, 0, phn_dim).CopyFromMat(tmp_v_j);
}
// we'll use a temporary matrix for the c quantities.
Matrix<BaseFloat> c_j(num_pdfs_for_group, tgt_M);
for (int32 i = 0; i < num_pdfs_for_group; i++) {
int32 j2 = pdfs[i];
c_j.Row(i).Range(0, cur_M).CopyFromVec(c_[j2]);
}
// Keep splitting substates until obtaining the desired number
for (; cur_M < tgt_M; cur_M++) {
int32 split_m; // substate to split.
{
Vector<BaseFloat> substate_count(tgt_M);
substate_count.AddRowSumMat(1.0, c_j);
BaseFloat *data = substate_count.Data();
split_m = std::max_element(data, data + cur_M) - data;
}
for (int32 i = 0; i < num_pdfs_for_group; i++) { // divide count of split
// substate. [extended for SCTM]
// c_{jkm} := c_{jmk}' := c_{jkm} / 2
c_j(i, split_m) = c_j(i, cur_M) = c_j(i, split_m) / 2;
}
// v_{jkm} := +/- split_perturb * H_k^{(sm)}^{-0.5} * rand_vec
std::generate(rand_vec.Data(), rand_vec.Data() + rand_vec.Dim(),
_RandGauss);
v_shift.AddSpVec(opts.perturb_factor, sqrt_H_sm, rand_vec, 0.0);
v_[j1].Row(cur_M).CopyFromVec(v_[j1].Row(split_m));
v_[j1].Row(cur_M).AddVec(1.0, v_shift);
v_[j1].Row(split_m).AddVec(-1.0, v_shift);
}
// copy the temporary matrix for the c_ (sub-state weight)
// quantities back to the place it belongs.
for (int32 i = 0; i < num_pdfs_for_group; i++) {
int32 j2 = pdfs[i];
c_[j2].Resize(tgt_M);
c_[j2].CopyFromVec(c_j.Row(i));
}
}
void AmSgmm2::SplitSubstates(const Vector<BaseFloat> &pdf_occupancies,
const Sgmm2SplitSubstatesConfig &opts) {
KALDI_ASSERT(pdf_occupancies.Dim() == NumPdfs());
int32 J1 = NumGroups(), J2 = NumPdfs();
Vector<BaseFloat> group_occupancies(J1);
for (int32 j2 = 0; j2 < J2; j2++)
group_occupancies(Pdf2Group(j2)) += pdf_occupancies(j2);
vector<int32> tgt_num_substates;
GetSplitTargets(group_occupancies, opts.split_substates, opts.power,
opts.min_count, &tgt_num_substates);
int32 tot_num_substates_old = 0, tot_num_substates_new = 0;
vector<SpMatrix<BaseFloat> > H_i;
SpMatrix<BaseFloat> sqrt_H_sm;
ComputeH(&H_i); // set up that array.
ComputeHsmFromModel(H_i, pdf_occupancies, &sqrt_H_sm, opts.max_cond);
H_i.clear();
sqrt_H_sm.ApplyPow(-0.5);
for (int32 j1 = 0; j1 < J1; j1++) {
int32 cur_M = NumSubstatesForGroup(j1), tgt_M = tgt_num_substates[j1];
tot_num_substates_old += cur_M;
tot_num_substates_new += std::max(cur_M, tgt_M);
if (cur_M < tgt_M)
SplitSubstatesInGroup(pdf_occupancies, opts, sqrt_H_sm, j1, tgt_M);
}
if (tot_num_substates_old == tot_num_substates_new) {
KALDI_LOG << "Not splitting substates; current #substates is "
<< tot_num_substates_old << " and target is "
<< opts.split_substates;
} else {
KALDI_LOG << "Getting rid of normalizers as they will no longer be valid";
n_.clear();
KALDI_LOG << "Split " << tot_num_substates_old << " substates to "
<< tot_num_substates_new;
}
}
void AmSgmm2::IncreasePhoneSpaceDim(int32 target_dim,
const Matrix<BaseFloat> &norm_xform) {
KALDI_ASSERT(!M_.empty());
int32 initial_dim = PhoneSpaceDim(), feat_dim = FeatureDim();
KALDI_ASSERT(norm_xform.NumRows() == feat_dim);
if (target_dim < initial_dim)
KALDI_ERR << "You asked to increase phn dim to a value lower than the "
<< " current dimension, " << target_dim << " < " << initial_dim;
if (target_dim > initial_dim + feat_dim) {
KALDI_WARN << "Cannot increase phone subspace dimensionality from "
<< initial_dim << " to " << target_dim << ", increasing to "
<< initial_dim + feat_dim;
target_dim = initial_dim + feat_dim;
}
if (initial_dim < target_dim) {
Matrix<BaseFloat> tmp_M(feat_dim, initial_dim);
for (int32 i = 0; i < NumGauss(); i++) {
tmp_M.CopyFromMat(M_[i]);
M_[i].Resize(feat_dim, target_dim);
M_[i].Range(0, feat_dim, 0, tmp_M.NumCols()).CopyFromMat(tmp_M);
M_[i]
.Range(0, feat_dim, tmp_M.NumCols(), target_dim - tmp_M.NumCols())
.CopyFromMat(
norm_xform.Range(0, feat_dim, 0, target_dim - tmp_M.NumCols()));
}
Matrix<BaseFloat> tmp_w = w_;
w_.Resize(tmp_w.NumRows(), target_dim);
w_.Range(0, tmp_w.NumRows(), 0, tmp_w.NumCols()).CopyFromMat(tmp_w);
for (int32 j1 = 0; j1 < NumGroups(); j1++) {
// Resize phonetic-subspce vectors.
Matrix<BaseFloat> tmp_v_j = v_[j1];
v_[j1].Resize(tmp_v_j.NumRows(), target_dim);
v_[j1]
.Range(0, tmp_v_j.NumRows(), 0, tmp_v_j.NumCols())
.CopyFromMat(tmp_v_j);
}
KALDI_LOG << "Phone subspace dimensionality increased from " << initial_dim
<< " to " << target_dim;
} else {
KALDI_LOG << "Phone subspace dimensionality unchanged, since target "
<< "dimension (" << target_dim << ") <= initial dimansion ("
<< initial_dim << ")";
}
}
void AmSgmm2::IncreaseSpkSpaceDim(int32 target_dim,
const Matrix<BaseFloat> &norm_xform,
bool speaker_dependent_weights) {
int32 initial_dim = SpkSpaceDim(), feat_dim = FeatureDim();
KALDI_ASSERT(norm_xform.NumRows() == feat_dim);
if (N_.size() == 0) N_.resize(NumGauss());
if (target_dim < initial_dim)
KALDI_ERR << "You asked to increase spk dim to a value lower than the "
<< " current dimension, " << target_dim << " < " << initial_dim;
if (target_dim > initial_dim + feat_dim) {
KALDI_WARN << "Cannot increase speaker subspace dimensionality from "
<< initial_dim << " to " << target_dim << ", increasing to "
<< initial_dim + feat_dim;
target_dim = initial_dim + feat_dim;
}
if (initial_dim < target_dim) {
int32 dim_change = target_dim - initial_dim;
Matrix<BaseFloat> tmp_N((initial_dim != 0) ? feat_dim : 0, initial_dim);
for (int32 i = 0; i < NumGauss(); i++) {
if (initial_dim != 0) tmp_N.CopyFromMat(N_[i]);
N_[i].Resize(feat_dim, target_dim);
if (initial_dim != 0) {
N_[i].Range(0, feat_dim, 0, tmp_N.NumCols()).CopyFromMat(tmp_N);
}
N_[i]
.Range(0, feat_dim, tmp_N.NumCols(), dim_change)
.CopyFromMat(norm_xform.Range(0, feat_dim, 0, dim_change));
}
// if we already have speaker-dependent weights or we are increasing
// spk-dim from zero and are asked to add them...
if (u_.NumRows() != 0 || (initial_dim == 0 && speaker_dependent_weights))
u_.Resize(NumGauss(), target_dim, kCopyData); // extend dim of u_i's
KALDI_LOG << "Speaker subspace dimensionality increased from "
<< initial_dim << " to " << target_dim;
if (initial_dim == 0 && speaker_dependent_weights)
KALDI_LOG << "Added parameters u for speaker-dependent weights.";
} else {
KALDI_LOG << "Speaker subspace dimensionality unchanged, since target "
<< "dimension (" << target_dim << ") <= initial dimansion ("
<< initial_dim << ")";
}
}
void AmSgmm2::ComputeWeights() {
int32 J1 = NumGroups();
w_jmi_.resize(J1);
int32 i = NumGauss();
for (int32 j1 = 0; j1 < J1; j1++) {
int32 M = NumSubstatesForGroup(j1);
w_jmi_[j1].Resize(M, i);
w_jmi_[j1].AddMatMat(1.0, v_[j1], kNoTrans, w_, kTrans, 0.0);
// now w_jmi_ contains un-normalized log weights.
for (int32 m = 0; m < M; m++)
w_jmi_[j1].Row(m).ApplySoftMax(); // get the actual weights.
}
}
void AmSgmm2::ComputeDerivedVars() {
if (n_.empty()) ComputeNormalizers();
if (diag_ubm_.NumGauss() != full_ubm_.NumGauss() ||
diag_ubm_.Dim() != full_ubm_.Dim()) {
diag_ubm_.CopyFromFullGmm(full_ubm_);
}
if (w_jmi_.empty() && HasSpeakerDependentWeights()) ComputeWeights();
}
class ComputeNormalizersClass : public MultiThreadable { // For multi-threaded.
public:
ComputeNormalizersClass(AmSgmm2 *am_sgmm, int32 *entropy_count_ptr,
double *entropy_sum_ptr)
: am_sgmm_(am_sgmm),
entropy_count_ptr_(entropy_count_ptr),
entropy_sum_ptr_(entropy_sum_ptr),
entropy_count_(0),
entropy_sum_(0.0) {}
~ComputeNormalizersClass() {
*entropy_count_ptr_ += entropy_count_;
*entropy_sum_ptr_ += entropy_sum_;
}
inline void operator()() {
// Note: give them local copy of the sums we're computing,
// which will be propagated to original pointer in the destructor.
am_sgmm_->ComputeNormalizersInternal(num_threads_, thread_id_,
&entropy_count_, &entropy_sum_);
}
private:
ComputeNormalizersClass() {} // Disallow empty constructor.
AmSgmm2 *am_sgmm_;
int32 *entropy_count_ptr_;
double *entropy_sum_ptr_;
int32 entropy_count_;
double entropy_sum_;
};
void AmSgmm2::ComputeNormalizers() {
KALDI_LOG << "Computing normalizers";
n_.resize(NumPdfs());
int32 entropy_count = 0;
double entropy_sum = 0.0;
ComputeNormalizersClass c(this, &entropy_count, &entropy_sum);
RunMultiThreaded(c);
KALDI_LOG << "Entropy of weights in substates is "
<< (entropy_sum / entropy_count) << " over " << entropy_count
<< " substates, equivalent to perplexity of "
<< (exp(entropy_sum / entropy_count));
KALDI_LOG << "Done computing normalizers";
}
void AmSgmm2::ComputeNormalizersInternal(int32 num_threads, int32 thread,
int32 *entropy_count,
double *entropy_sum) {
BaseFloat DLog2pi = FeatureDim() * log(2 * M_PI);
Vector<BaseFloat> log_det_Sigma(NumGauss());
for (int32 i = 0; i < NumGauss(); i++) {
try {
log_det_Sigma(i) = -SigmaInv_[i].LogPosDefDet();
} catch (...) {
if (thread == 0) // just for one thread, print errors [else, duplicates]
KALDI_WARN << "Covariance is not positive definite, setting to unit";
SigmaInv_[i].SetUnit();
log_det_Sigma(i) = 0.0;
}
}
int32 J1 = NumGroups();
int block_size = (NumPdfs() + num_threads - 1) / num_threads;
int j_start = thread * block_size, j_end = std::min(J1, j_start + block_size);
int32 I = NumGauss();
for (int32 j1 = j_start; j1 < j_end; j1++) {
int32 M = NumSubstatesForGroup(j1);
Matrix<BaseFloat> log_w_jm(M, I);
n_[j1].Resize(I, M);
Matrix<BaseFloat> mu_jmi(M, FeatureDim());
Matrix<BaseFloat> SigmaInv_mu(M, FeatureDim());
// (in logs): w_jm = softmax([w_{k1}^T ... w_{kD}^T] * v_{jkm}) eq.(7)
log_w_jm.AddMatMat(1.0, v_[j1], kNoTrans, w_, kTrans, 0.0);
for (int32 m = 0; m < M; m++) {
log_w_jm.Row(m).Add(-1.0 * log_w_jm.Row(m).LogSumExp());
{ // DIAGNOSTIC CODE
(*entropy_count)++;
for (int32 i = 0; i < NumGauss(); i++) {
(*entropy_sum) -= log_w_jm(m, i) * exp(log_w_jm(m, i));
}
}
}
for (int32 i = 0; i < I; i++) {
// mu_jmi = M_{i} * v_{jm}
mu_jmi.AddMatMat(1.0, v_[j1], kNoTrans, M_[i], kTrans, 0.0);
SigmaInv_mu.AddMatSp(1.0, mu_jmi, kNoTrans, SigmaInv_[i], 0.0);
for (int32 m = 0; m < M; m++) {
// mu_{jmi} * \Sigma_{i}^{-1} * mu_{jmi}
BaseFloat mu_SigmaInv_mu = VecVec(mu_jmi.Row(m), SigmaInv_mu.Row(m));
// Previously had:
// BaseFloat logc = log(c_[j](m));
// but because of STCM aspect, we can't include the sub-state mixture
// weights
// at this point [included later on.]
// eq.(31)
n_[j1](i, m) = log_w_jm(m, i) -
0.5 * (log_det_Sigma(i) + DLog2pi + mu_SigmaInv_mu);
{ // Mainly diagnostic code. Not necessary.
BaseFloat tmp = n_[j1](i, m);
if (!KALDI_ISFINITE(tmp)) { // NaN or inf
KALDI_LOG << "Warning: normalizer for j1 = " << j1 << ", m = " << m
<< ", i = " << i << " is infinite or NaN " << tmp << "= "
<< log_w_jm(m, i) << "+" << (-0.5 * log_det_Sigma(i))
<< "+" << (-0.5 * DLog2pi) << "+" << (mu_SigmaInv_mu)
<< ", setting to finite.";
n_[j1](i, m) =
-1.0e+40; // future work(arnab): get rid of magic number
}
}
}
}
}
}
BaseFloat AmSgmm2::GetDjms(int32 j1, int32 m,
Sgmm2PerSpkDerivedVars *spk_vars) const {
// This relates to SSGMMs (speaker-dependent weights).
if (spk_vars->log_d_jms.empty()) return -1; // this would be
// because we don't have speaker-dependent weights ("u" not set up).
KALDI_ASSERT(!w_jmi_.empty() && "You need to call ComputeWeights() on SGMM.");
Vector<BaseFloat> &log_d = spk_vars->log_d_jms[j1];
if (log_d.Dim() == 0) {
log_d.Resize(NumSubstatesForGroup(j1));
log_d.AddMatVec(1.0, w_jmi_[j1], kNoTrans, spk_vars->b_is, 0.0);
log_d.ApplyLog();
}
return exp(log_d(m));
}
void AmSgmm2::ComputeFmllrPreXform(const Vector<BaseFloat> &state_occs,
Matrix<BaseFloat> *xform,
Matrix<BaseFloat> *inv_xform,
Vector<BaseFloat> *diag_mean_scatter) const {
int32 num_pdfs = NumPdfs(), num_gauss = NumGauss(), dim = FeatureDim();
KALDI_ASSERT(state_occs.Dim() == num_pdfs);
BaseFloat total_occ = state_occs.Sum();
// Degenerate case: unlikely to ever happen.
if (total_occ == 0) {
KALDI_WARN << "Zero probability (computing transform). Using unit "
<< "pre-transform";
xform->Resize(dim, dim + 1, kUndefined);
xform->SetUnit();
inv_xform->Resize(dim, dim + 1, kUndefined);
inv_xform->SetUnit();
diag_mean_scatter->Resize(dim, kSetZero);
return;
}
// Convert state occupancies to posteriors; Eq. (B.1)
Vector<BaseFloat> state_posteriors(state_occs);
state_posteriors.Scale(1 / total_occ);
Vector<BaseFloat> mu_jmi(dim), global_mean(dim);
SpMatrix<BaseFloat> within_class_covar(dim), between_class_covar(dim);
Vector<BaseFloat> gauss_weight(num_gauss); // weights for within-class vars.
Vector<BaseFloat> w_jm(num_gauss);
for (int32 j1 = 0; j1 < NumGroups(); j1++) {
const std::vector<int32> &pdfs = group2pdf_[j1];
int32 M = NumSubstatesForGroup(j1);
Vector<BaseFloat> substate_weight(M); // total weight for each substate.
for (size_t i = 0; i < pdfs.size(); i++) {
int32 j2 = pdfs[i];
substate_weight.AddVec(state_posteriors(j2), c_[j2]);
}
for (int32 m = 0; m < M; m++) {
BaseFloat this_substate_weight = substate_weight(m);
// Eq. (7): w_jm = softmax([w_{1}^T ... w_{D}^T] * v_{jm})
w_jm.AddMatVec(1.0, w_, kNoTrans, v_[j1].Row(m), 0.0);
w_jm.ApplySoftMax();
for (int32 i = 0; i < num_gauss; i++) {
BaseFloat weight = this_substate_weight * w_jm(i);
mu_jmi.AddMatVec(1.0, M_[i], kNoTrans, v_[j1].Row(m), 0.0); // Eq. (6)
// Eq. (B.3): \mu_avg = \sum_{jmi} p(j) c_{jm} w_{jmi} \mu_{jmi}
global_mean.AddVec(weight, mu_jmi);
// \Sigma_B = \sum_{jmi} p(j) c_{jm} w_{jmi} \mu_{jmi} \mu_{jmi}^T
between_class_covar.AddVec2(weight, mu_jmi); // Eq. (B.4)
gauss_weight(i) += weight;
}
}
}
between_class_covar.AddVec2(-1.0, global_mean); // Eq. (B.4)
for (int32 i = 0; i < num_gauss; i++) {
SpMatrix<BaseFloat> Sigma(SigmaInv_[i]);
Sigma.InvertDouble();
// Eq. (B.2): \Sigma_W = \sum_{jmi} p(j) c_{jm} w_{jmi} \Sigma_i
within_class_covar.AddSp(gauss_weight(i), Sigma);
}
TpMatrix<BaseFloat> tmpL(dim);
Matrix<BaseFloat> tmpLInvFull(dim, dim);
tmpL.Cholesky(within_class_covar); // \Sigma_W = L L^T
tmpL.InvertDouble(); // L^{-1}
tmpLInvFull.CopyFromTp(tmpL); // get as full matrix.
// B := L^{-1} * \Sigma_B * L^{-T}
SpMatrix<BaseFloat> tmpB(dim);
tmpB.AddMat2Sp(1.0, tmpLInvFull, kNoTrans, between_class_covar, 0.0);
Matrix<BaseFloat> U(dim, dim);
diag_mean_scatter->Resize(dim);
xform->Resize(dim, dim + 1);
inv_xform->Resize(dim, dim + 1);
tmpB.Eig(diag_mean_scatter, &U); // Eq. (B.5): B = U D V^T
int32 n;
if ((n = diag_mean_scatter->ApplyFloor(1.0e-04)) != 0)
KALDI_WARN << "Floored " << n << " elements of the mean-scatter matrix.";
// Eq. (B.6): A_{pre} = U^T * L^{-1}
SubMatrix<BaseFloat> Apre(*xform, 0, dim, 0, dim);
Apre.AddMatMat(1.0, U, kTrans, tmpLInvFull, kNoTrans, 0.0);
#ifdef KALDI_PARANOID
{
SpMatrix<BaseFloat> tmp(dim);
tmp.AddMat2Sp(1.0, Apre, kNoTrans, within_class_covar, 0.0);
KALDI_ASSERT(tmp.IsUnit(0.01));
}
{
SpMatrix<BaseFloat> tmp(dim);
tmp.AddMat2Sp(1.0, Apre, kNoTrans, between_class_covar, 0.0);
KALDI_ASSERT(tmp.IsDiagonal(0.01));
}
#endif
// Eq. (B.7): b_{pre} = - A_{pre} \mu_{avg}
Vector<BaseFloat> b_pre(dim);
b_pre.AddMatVec(-1.0, Apre, kNoTrans, global_mean, 0.0);
for (int32 r = 0; r < dim; r++) {
xform->Row(r)(dim) = b_pre(r); // W_{pre} = [ A_{pre}, b_{pre} ]
}
// Eq. (B.8) & (B.9): W_{inv} = [ A_{pre}^{-1}, \mu_{avg} ]
inv_xform->CopyFromMat(*xform);
inv_xform->Range(0, dim, 0, dim).InvertDouble();
for (int32 r = 0; r < dim; r++) inv_xform->Row(r)(dim) = global_mean(r);
} // End of ComputePreXform()
template <typename Real>
void AmSgmm2::GetNtransSigmaInv(vector<Matrix<Real> > *out) const {
KALDI_ASSERT(
SpkSpaceDim() > 0 &&
"Cannot compute N^{T} \\Sigma_{i}^{-1} without speaker projections.");
out->resize(NumGauss());
Matrix<Real> tmpcov(FeatureDim(), FeatureDim());
Matrix<Real> tmp_n(FeatureDim(), SpkSpaceDim());
for (int32 i = 0; i < NumGauss(); i++) {
tmpcov.CopyFromSp(SigmaInv_[i]);
tmp_n.CopyFromMat(N_[i]);
(*out)[i].Resize(SpkSpaceDim(), FeatureDim());
(*out)[i].AddMatMat(1.0, tmp_n, kTrans, tmpcov, kNoTrans, 0.0);
}
}
// Instantiate the above template.
template void AmSgmm2::GetNtransSigmaInv(vector<Matrix<float> > *out) const;
template void AmSgmm2::GetNtransSigmaInv(vector<Matrix<double> > *out) const;
///////////////////////////////////////////////////////////////////////////////
template <class Real>
void AmSgmm2::ComputeH(std::vector<SpMatrix<Real> > *H_i) const {
KALDI_ASSERT(NumGauss() != 0);
(*H_i).resize(NumGauss());
SpMatrix<BaseFloat> H_i_tmp(PhoneSpaceDim());
for (int32 i = 0; i < NumGauss(); i++) {
(*H_i)[i].Resize(PhoneSpaceDim());
H_i_tmp.AddMat2Sp(1.0, M_[i], kTrans, SigmaInv_[i], 0.0);
(*H_i)[i].CopyFromSp(H_i_tmp);
}
}
// Instantiate the template.
template void AmSgmm2::ComputeH(std::vector<SpMatrix<float> > *H_i) const;
template void AmSgmm2::ComputeH(std::vector<SpMatrix<double> > *H_i) const;
// Initializes the matrices M_{i} and w_i
void AmSgmm2::InitializeMw(int32 phn_subspace_dim,
const Matrix<BaseFloat> &norm_xform) {
int32 ddim = full_ubm_.Dim();
KALDI_ASSERT(phn_subspace_dim <= ddim + 1);
KALDI_ASSERT(phn_subspace_dim <= norm_xform.NumCols() + 1);
KALDI_ASSERT(ddim <= norm_xform.NumRows());
Vector<BaseFloat> mean(ddim);
int32 num_gauss = full_ubm_.NumGauss();
w_.Resize(num_gauss, phn_subspace_dim);
M_.resize(num_gauss);
for (int32 i = 0; i < num_gauss; i++) {
full_ubm_.GetComponentMean(i, &mean);
Matrix<BaseFloat> &thisM(M_[i]);
thisM.Resize(ddim, phn_subspace_dim);
// Eq. (27): M_{i} = [ \bar{\mu}_{i} (J)_{1:D, 1:(S-1)}]
thisM.CopyColFromVec(mean, 0);
int32 nonrandom_dim = std::min(phn_subspace_dim - 1, ddim),
random_dim = phn_subspace_dim - 1 - nonrandom_dim;
thisM.Range(0, ddim, 1, nonrandom_dim)
.CopyFromMat(norm_xform.Range(0, ddim, 0, nonrandom_dim), kNoTrans);
// The following extension to the original paper allows us to
// initialize the model with a larger dimension of phone-subspace vector.
if (random_dim > 0)
thisM.Range(0, ddim, nonrandom_dim + 1, random_dim).SetRandn();
}
}
// Initializes the matrices N_i, and [if speaker_dependent_weights==true] u_i.
void AmSgmm2::InitializeNu(int32 spk_subspace_dim,
const Matrix<BaseFloat> &norm_xform,
bool speaker_dependent_weights) {
int32 ddim = full_ubm_.Dim();
int32 num_gauss = full_ubm_.NumGauss();
N_.resize(num_gauss);
for (int32 i = 0; i < num_gauss; i++) {
N_[i].Resize(ddim, spk_subspace_dim);
// Eq. (28): N_{i} = [ (J)_{1:D, 1:T)}]
int32 nonrandom_dim = std::min(spk_subspace_dim, ddim),
random_dim = spk_subspace_dim - nonrandom_dim;
N_[i]
.Range(0, ddim, 0, nonrandom_dim)
.CopyFromMat(norm_xform.Range(0, ddim, 0, nonrandom_dim), kNoTrans);
// The following extension to the original paper allows us to
// initialize the model with a larger dimension of speaker-subspace vector.
if (random_dim > 0)
N_[i].Range(0, ddim, nonrandom_dim, random_dim).SetRandn();
}
if (speaker_dependent_weights) {
u_.Resize(num_gauss, spk_subspace_dim); // will set to zero.
} else {
u_.Resize(0, 0);
}
}
void AmSgmm2::CopyGlobalsInitVecs(const AmSgmm2 &other,
const std::vector<int32> &pdf2group,
BaseFloat self_weight) {
KALDI_LOG << "Initializing model";
pdf2group_ = pdf2group;
ComputePdfMappings();
// Copy background GMMs
diag_ubm_.CopyFromDiagGmm(other.diag_ubm_);
full_ubm_.CopyFromFullGmm(other.full_ubm_);
// Copy global params
SigmaInv_ = other.SigmaInv_;
M_ = other.M_;
w_ = other.w_;
u_ = other.u_;
N_ = other.N_;
InitializeVecsAndSubstateWeights(self_weight);
}
// Initializes the vectors v_{j1,m} and substate weights c_{j2,m}.
void AmSgmm2::InitializeVecsAndSubstateWeights(BaseFloat self_weight) {
int32 J1 = NumGroups(), J2 = NumPdfs();
KALDI_ASSERT(J1 > 0 && J2 >= J1);
int32 phn_subspace_dim = PhoneSpaceDim();
KALDI_ASSERT(phn_subspace_dim > 0 && "Initialize M and w first.");
v_.resize(J1);
if (self_weight == 1.0) {
for (int32 j1 = 0; j1 < J1; j1++) {
v_[j1].Resize(1, phn_subspace_dim);
v_[j1](0, 0) = 1.0; // Eq. (26): v_{j1} = [1 0 0 ... 0]
}
c_.resize(J2);
for (int32 j2 = 0; j2 < J2; j2++) {
c_[j2].Resize(1);
c_[j2](0) = 1.0; // Eq. (25): c_{j1} = 1.0
}
} else {
for (int32 j1 = 0; j1 < J1; j1++) {
int32 npdfs = group2pdf_[j1].size();
v_[j1].Resize(npdfs, phn_subspace_dim);
for (int32 m = 0; m < npdfs; m++)
v_[j1](m, 0) = 1.0; // Eq. (26): v_{j1} = [1 0 0 ... 0]
}
c_.resize(J2);
for (int32 j2 = 0; j2 < J2; j2++) {
int32 j1 = pdf2group_[j2], npdfs = group2pdf_[j1].size();
c_[j2].Resize(npdfs);
if (npdfs == 1)
c_[j2].Set(1.0);
else {
// note: just avoid NaNs if npdfs-1... value won't matter.
double other_weight = (1.0 - self_weight) / std::max((1 - npdfs), 1);
c_[j2].Set(other_weight);
for (int32 k = 0; k < npdfs; k++)
if (group2pdf_[j1][k] == j2) c_[j2](k) = self_weight;
}
}
}
}
// Initializes the within-class vars Sigma_{ki}
void AmSgmm2::InitializeCovars() {
std::vector<SpMatrix<BaseFloat> > &inv_covars(full_ubm_.inv_covars());
int32 num_gauss = full_ubm_.NumGauss();
int32 dim = full_ubm_.Dim();
SigmaInv_.resize(num_gauss);
for (int32 i = 0; i < num_gauss; i++) {
SigmaInv_[i].Resize(dim);
SigmaInv_[i].CopyFromSp(inv_covars[i]);
}
}
// Compute the "smoothing" matrix H^{(sm)} from expected counts given the model.
void AmSgmm2::ComputeHsmFromModel(const std::vector<SpMatrix<BaseFloat> > &H,
const Vector<BaseFloat> &state_occupancies,
SpMatrix<BaseFloat> *H_sm,
BaseFloat max_cond) const {
int32 num_gauss = NumGauss();
BaseFloat tot_sum = 0.0;
KALDI_ASSERT(state_occupancies.Dim() == NumPdfs());
Vector<BaseFloat> w_jm(num_gauss);
H_sm->Resize(PhoneSpaceDim());
H_sm->SetZero();
Vector<BaseFloat> gamma_i;
ComputeGammaI(state_occupancies, &gamma_i);
BaseFloat sum = 0.0;
for (int32 i = 0; i < num_gauss; i++) {
if (gamma_i(i) > 0) {
H_sm->AddSp(gamma_i(i), H[i]);
sum += gamma_i(i);
}
}
if (sum == 0.0) {
KALDI_WARN << "Sum of counts is zero. ";
// set to unit matrix--arbitrary non-singular matrix.. won't ever matter.
H_sm->SetUnit();
} else {
H_sm->Scale(1.0 / sum);
int32 tmp = H_sm->LimitCondDouble(max_cond);
if (tmp > 0) {
KALDI_WARN << "Limited " << (tmp) << " eigenvalues of H_sm";
}
}
tot_sum += sum;
KALDI_LOG << "total count is " << tot_sum;
}
void ComputeFeatureNormalizingTransform(const FullGmm &gmm,
Matrix<BaseFloat> *xform) {
int32 dim = gmm.Dim();
int32 num_gauss = gmm.NumGauss();
SpMatrix<BaseFloat> within_class_covar(dim);
SpMatrix<BaseFloat> between_class_covar(dim);
Vector<BaseFloat> global_mean(dim);
// Accumulate LDA statistics from the GMM parameters.
{
BaseFloat total_weight = 0.0;
Vector<BaseFloat> tmp_weight(num_gauss);
Matrix<BaseFloat> tmp_means;
std::vector<SpMatrix<BaseFloat> > tmp_covars;
tmp_weight.CopyFromVec(gmm.weights());
gmm.GetCovarsAndMeans(&tmp_covars, &tmp_means);
for (int32 i = 0; i < num_gauss; i++) {
BaseFloat w_i = tmp_weight(i);
total_weight += w_i;
within_class_covar.AddSp(w_i, tmp_covars[i]);
between_class_covar.AddVec2(w_i, tmp_means.Row(i));
global_mean.AddVec(w_i, tmp_means.Row(i));
}
KALDI_ASSERT(total_weight > 0);
if (fabs(total_weight - 1.0) > 0.001) {
KALDI_WARN << "Total weight across the GMMs is " << (total_weight)
<< ", renormalizing.";
global_mean.Scale(1.0 / total_weight);
within_class_covar.Scale(1.0 / total_weight);
between_class_covar.Scale(1.0 / total_weight);
}
between_class_covar.AddVec2(-1.0, global_mean);
}
TpMatrix<BaseFloat> chol(dim);
chol.Cholesky(within_class_covar); // Sigma_W = L L^T
TpMatrix<BaseFloat> chol_inv(chol);
chol_inv.InvertDouble();
Matrix<BaseFloat> chol_full(dim, dim);
chol_full.CopyFromTp(chol_inv);
SpMatrix<BaseFloat> LBL(dim);
// LBL = L^{-1} \Sigma_B L^{-T}
LBL.AddMat2Sp(1.0, chol_full, kNoTrans, between_class_covar, 0.0);
Vector<BaseFloat> Dvec(dim);
Matrix<BaseFloat> U(dim, dim);
LBL.Eig(&Dvec, &U);
SortSvd(&Dvec, &U);
xform->Resize(dim, dim);
chol_full.CopyFromTp(chol);
// T := L U, eq (23)
xform->AddMatMat(1.0, chol_full, kNoTrans, U, kNoTrans, 0.0);
#ifdef KALDI_PARANOID
Matrix<BaseFloat> inv_xform(*xform);
inv_xform.InvertDouble();
{ // Check that T*within_class_covar*T' = I.
Matrix<BaseFloat> wc_covar_full(dim, dim), tmp(dim, dim);
wc_covar_full.CopyFromSp(within_class_covar);
tmp.AddMatMat(1.0, inv_xform, kNoTrans, wc_covar_full, kNoTrans, 0.0);
wc_covar_full.AddMatMat(1.0, tmp, kNoTrans, inv_xform, kTrans, 0.0);
KALDI_ASSERT(wc_covar_full.IsUnit(0.01));
}
{ // Check that T*between_class_covar*T' = diagonal.
Matrix<BaseFloat> bc_covar_full(dim, dim), tmp(dim, dim);
bc_covar_full.CopyFromSp(between_class_covar);
tmp.AddMatMat(1.0, inv_xform, kNoTrans, bc_covar_full, kNoTrans, 0.0);
bc_covar_full.AddMatMat(1.0, tmp, kNoTrans, inv_xform, kTrans, 0.0);
KALDI_ASSERT(bc_covar_full.IsDiagonal(0.01));
}
#endif
}
void AmSgmm2::ComputePerSpkDerivedVars(Sgmm2PerSpkDerivedVars *vars) const {
KALDI_ASSERT(vars != NULL);
if (vars->v_s.Dim() != 0) {
KALDI_ASSERT(vars->v_s.Dim() == SpkSpaceDim());
vars->o_s.Resize(NumGauss(), FeatureDim());
int32 num_gauss = NumGauss();
// first compute the o_i^{(s)} quantities.
for (int32 i = 0; i < num_gauss; i++) {
// Eqn. (32): o_i^{(s)} = N_i v^{(s)}
vars->o_s.Row(i).AddMatVec(1.0, N_[i], kNoTrans, vars->v_s, 0.0);
}
// the rest relates to the SSGMM. We only need to to this
// if we're using speaker-dependent weights.
if (HasSpeakerDependentWeights()) {
vars->log_d_jms.clear();
vars->log_d_jms.resize(NumGroups());
vars->log_b_is.Resize(NumGauss());
vars->log_b_is.AddMatVec(1.0, u_, kNoTrans, vars->v_s, 0.0);
vars->b_is.Resize(NumGauss());
vars->b_is.CopyFromVec(vars->log_b_is);
vars->b_is.ApplyExp();
for (int32 i = 0; i < vars->b_is.Dim(); i++) {
if (vars->b_is(i) - vars->b_is(i) != 0.0) { // NaN.
vars->b_is(i) = 1.0;
KALDI_WARN << "Set NaN in b_is to 1.0";
}
}
} else {
vars->b_is.Resize(0);
vars->log_b_is.Resize(0);
vars->log_d_jms.resize(0);
}
} else {
vars->Clear(); // make sure everything is cleared.
}
}
BaseFloat AmSgmm2::GaussianSelection(const Sgmm2GselectConfig &config,
const VectorBase<BaseFloat> &data,
std::vector<int32> *gselect) const {
KALDI_ASSERT(diag_ubm_.NumGauss() != 0 &&
diag_ubm_.NumGauss() == full_ubm_.NumGauss() &&
diag_ubm_.Dim() == data.Dim());
KALDI_ASSERT(config.diag_gmm_nbest > 0 && config.full_gmm_nbest > 0 &&
config.full_gmm_nbest < config.diag_gmm_nbest);
int32 num_gauss = diag_ubm_.NumGauss();
std::vector<std::pair<BaseFloat, int32> > pruned_pairs;
if (config.diag_gmm_nbest < num_gauss) {
Vector<BaseFloat> loglikes(num_gauss);
diag_ubm_.LogLikelihoods(data, &loglikes);
Vector<BaseFloat> loglikes_copy(loglikes);
BaseFloat *ptr = loglikes_copy.Data();
std::nth_element(ptr, ptr + num_gauss - config.diag_gmm_nbest,
ptr + num_gauss);
BaseFloat thresh = ptr[num_gauss - config.diag_gmm_nbest];
for (int32 g = 0; g < num_gauss; g++)
if (loglikes(g) >= thresh) // met threshold for diagonal phase.
pruned_pairs.push_back(
std::make_pair(full_ubm_.ComponentLogLikelihood(data, g), g));
} else {
Vector<BaseFloat> loglikes(num_gauss);
full_ubm_.LogLikelihoods(data, &loglikes);
for (int32 g = 0; g < num_gauss; g++)
pruned_pairs.push_back(std::make_pair(loglikes(g), g));
}
KALDI_ASSERT(!pruned_pairs.empty());
if (pruned_pairs.size() > static_cast<size_t>(config.full_gmm_nbest)) {
std::nth_element(pruned_pairs.begin(),
pruned_pairs.end() - config.full_gmm_nbest,
pruned_pairs.end());
pruned_pairs.erase(pruned_pairs.begin(),
pruned_pairs.end() - config.full_gmm_nbest);
}
Vector<BaseFloat> loglikes_tmp(pruned_pairs.size()); // for return value.
KALDI_ASSERT(gselect != NULL);
gselect->resize(pruned_pairs.size());
// Make sure pruned Gaussians appear from best to worst.
std::sort(pruned_pairs.begin(), pruned_pairs.end(),
std::greater<std::pair<BaseFloat, int32> >());
for (size_t i = 0; i < pruned_pairs.size(); i++) {
loglikes_tmp(i) = pruned_pairs[i].first;
(*gselect)[i] = pruned_pairs[i].second;
}
return loglikes_tmp.LogSumExp();
}
void Sgmm2GauPost::Write(std::ostream &os, bool binary) const {
WriteToken(os, binary, "<Sgmm2GauPost>");
int32 T = this->size();
WriteBasicType(os, binary, T);
for (int32 t = 0; t < T; t++) {
WriteToken(os, binary, "<gselect>");
WriteIntegerVector(os, binary, (*this)[t].gselect);
WriteToken(os, binary, "<tids>");
WriteIntegerVector(os, binary, (*this)[t].tids);
KALDI_ASSERT((*this)[t].tids.size() == (*this)[t].posteriors.size());
for (size_t i = 0; i < (*this)[t].posteriors.size(); i++) {
(*this)[t].posteriors[i].Write(os, binary);
}
}
WriteToken(os, binary, "</Sgmm2GauPost>");
}
void Sgmm2GauPost::Read(std::istream &is, bool binary) {
ExpectToken(is, binary, "<Sgmm2GauPost>");
int32 T;
ReadBasicType(is, binary, &T);
KALDI_ASSERT(T >= 0);
this->resize(T);
for (int32 t = 0; t < T; t++) {
ExpectToken(is, binary, "<gselect>");
ReadIntegerVector(is, binary, &((*this)[t].gselect));
ExpectToken(is, binary, "<tids>");
ReadIntegerVector(is, binary, &((*this)[t].tids));
size_t sz = (*this)[t].tids.size();
(*this)[t].posteriors.resize(sz);
for (size_t i = 0; i < sz; i++) (*this)[t].posteriors[i].Read(is, binary);
}
ExpectToken(is, binary, "</Sgmm2GauPost>");
}
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/sgmmbin/sgmm-est-fmllrbasis.cc<|end_filename|>
// sgmmbin/sgmm-est-fmllrbasis.cc
// Copyright 2009-2011 Saarland University
// Author: <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "util/common-utils.h"
#include "matrix/matrix-lib.h"
#include "hmm/transition-model.h"
#include "sgmm/am-sgmm.h"
#include "sgmm/fmllr-sgmm.h"
int main(int argc, char *argv[]) {
try {
typedef kaldi::int32 int32;
const char *usage =
"Sum multiple accumulated stats files for SGMM training.\n"
"Usage: sgmm-est-fmllrbasis [options] <model-in> <model-out> "
"<stats-in1> [stats-in2 ...]\n";
bool binary = true;
int32 num_bases = 50;
kaldi::ParseOptions po(usage);
po.Register("binary", &binary, "Write output in binary mode.");
po.Register("num-bases", &num_bases,
"Number of fMLLR basis matrices to estimate.");
po.Read(argc, argv);
if (po.NumArgs() < 3) {
po.PrintUsage();
exit(1);
}
std::string model_in_filename = po.GetArg(1),
model_out_filename = po.GetArg(2);
kaldi::AmSgmm am_sgmm;
kaldi::TransitionModel trans_model;
kaldi::SgmmFmllrGlobalParams fmllr_globals;
{
bool binary_read;
kaldi::Input ki(model_in_filename, &binary_read);
trans_model.Read(ki.Stream(), binary_read);
am_sgmm.Read(ki.Stream(), binary_read);
fmllr_globals.Read(ki.Stream(), binary_read);
}
kaldi::SpMatrix<double> fmllr_grad_scatter;
int32 dim = am_sgmm.FeatureDim();
fmllr_grad_scatter.Resize(dim * (dim + 1), kaldi::kSetZero);
for (int i = 3, max = po.NumArgs(); i <= max; i++) {
std::string stats_in_filename = po.GetArg(i);
bool binary_read;
kaldi::Input ki(stats_in_filename, &binary_read);
fmllr_grad_scatter.Read(ki.Stream(), binary_read,
true /* add read values */);
}
kaldi::EstimateSgmmFmllrSubspace(fmllr_grad_scatter, num_bases, dim,
&fmllr_globals);
// Write out the accs
{
kaldi::Output ko(model_out_filename, binary);
trans_model.Write(ko.Stream(), binary);
am_sgmm.Write(ko.Stream(), binary, kaldi::kSgmmWriteAll);
fmllr_globals.Write(ko.Stream(), binary);
}
KALDI_LOG << "Written model to " << model_out_filename;
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/online2/online-gmm-decodable.h<|end_filename|>
// online/online-gmm-decodable.h
// Copyright 2012 Cisco Systems (author: <NAME>)
// 2013 <NAME>
// 2014 Johns Hopkins Universithy (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_ONLINE2_ONLINE_GMM_DECODABLE_H_
#define KALDI_ONLINE2_ONLINE_GMM_DECODABLE_H_
#include "itf/online-feature-itf.h"
#include "gmm/decodable-am-diag-gmm.h"
#include "matrix/matrix-lib.h"
namespace kaldi {
class DecodableDiagGmmScaledOnline : public DecodableInterface {
public:
DecodableDiagGmmScaledOnline(const AmDiagGmm &am,
const TransitionModel &trans_model,
const BaseFloat scale,
OnlineFeatureInterface *input_feats);
/// Returns the scaled log likelihood
virtual BaseFloat LogLikelihood(int32 frame, int32 index);
virtual bool IsLastFrame(int32 frame) const;
virtual int32 NumFramesReady() const;
/// Indices are one-based! This is for compatibility with OpenFst.
virtual int32 NumIndices() const { return trans_model_.NumTransitionIds(); }
private:
void CacheFrame(int32 frame);
OnlineFeatureInterface *features_;
const AmDiagGmm &ac_model_;
BaseFloat ac_scale_;
const TransitionModel &trans_model_;
const int32 feat_dim_; // dimensionality of the input features
Vector<BaseFloat> cur_feats_;
int32 cur_frame_;
std::vector<std::pair<int32, BaseFloat> > cache_;
KALDI_DISALLOW_COPY_AND_ASSIGN(DecodableDiagGmmScaledOnline);
};
} // namespace kaldi
#endif // KALDI_ONLINE2_ONLINE_GMM_DECODABLE_H_
<|start_filename|>tonic-suite/asr/src/bin/compute-wer.cc<|end_filename|>
// bin/compute-wer.cc
// Copyright 2009-2011 Microsoft Corporation
// 2014 Johns Hopkins University (authors: <NAME>, Daniel
// Povey)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "util/parse-options.h"
#include "tree/context-dep.h"
#include "util/edit-distance.h"
namespace kaldi {
template <typename T>
void PrintAlignmentStats(const std::vector<T> &ref, const std::vector<T> &hyp,
T eps, std::ostream &os) {
// Make sure the eps symbol is not in the sentences we're aligning; this would
// not make sense.
KALDI_ASSERT(std::find(ref.begin(), ref.end(), eps) == ref.end());
KALDI_ASSERT(std::find(hyp.begin(), hyp.end(), eps) == hyp.end());
std::vector<std::pair<T, T> > aligned;
typedef
typename std::vector<std::pair<T, T> >::const_iterator aligned_iterator;
LevenshteinAlignment(ref, hyp, eps, &aligned);
for (aligned_iterator it = aligned.begin(); it != aligned.end(); ++it) {
KALDI_ASSERT(!(it->first == eps && it->second == eps));
if (it->first == eps) {
os << "insertion " << it->second << std::endl;
} else if (it->second == eps) {
os << "deletion " << it->first << std::endl;
} else if (it->first != it->second) {
os << "substitution " << it->first << ' ' << it->second << std::endl;
} else {
os << "correct " << it->first << std::endl;
}
}
}
}
int main(int argc, char *argv[]) {
using namespace kaldi;
typedef kaldi::int32 int32;
try {
const char *usage =
"Compute WER by comparing different transcriptions\n"
"Takes two transcription files, in integer or text format,\n"
"and outputs overall WER statistics to standard output.\n"
"Optionally, the third argument can be used to obtain detailed "
"statistics\n"
"\n"
"Usage: compute-wer [options] <ref-rspecifier> <hyp-rspecifier> "
"[<stats-out>]\n"
"\n"
"E.g.: compute-wer --text --mode=present ark:data/train/text "
"ark:hyp_text\n"
"or: compute-wer --text --mode=present ark:data/train/text "
"ark:hyp_text - | \\\n"
" sort | uniq -c\n";
ParseOptions po(usage);
std::string mode = "strict";
bool text_input = false; // if this is true, we expect symbols as strings,
po.Register("mode", &mode,
"Scoring mode: \"present\"|\"all\"|\"strict\":\n"
" \"present\" means score those we have transcriptions for\n"
" \"all\" means treat absent transcriptions as empty\n"
" \"strict\" means die if all in ref not also in hyp");
po.Register("text", &text_input, "Expect strings, not integers, as input.");
po.Read(argc, argv);
if (po.NumArgs() < 2 || po.NumArgs() > 3) {
po.PrintUsage();
exit(1);
}
std::string ref_rspecifier = po.GetArg(1);
std::string hyp_rspecifier = po.GetArg(2);
Output stats_output;
bool detailed_stats = (po.NumArgs() == 3);
if (detailed_stats)
stats_output.Open(po.GetOptArg(3), false, false); // non-binary output
if (mode != "strict" && mode != "present" && mode != "all") {
KALDI_ERR << "--mode option invalid: expected "
"\"present\"|\"all\"|\"strict\", got " << mode;
}
int32 num_words = 0, word_errs = 0, num_sent = 0, sent_errs = 0,
num_ins = 0, num_del = 0, num_sub = 0, num_absent_sents = 0;
if (!text_input) {
SequentialInt32VectorReader ref_reader(ref_rspecifier);
RandomAccessInt32VectorReader hyp_reader(hyp_rspecifier);
for (; !ref_reader.Done(); ref_reader.Next()) {
std::string key = ref_reader.Key();
const std::vector<int32> &ref_sent = ref_reader.Value();
std::vector<int32> hyp_sent;
if (!hyp_reader.HasKey(key)) {
if (mode == "strict")
KALDI_ERR << "No hypothesis for key " << key << " and strict "
"mode specifier.";
num_absent_sents++;
if (mode == "present") // do not score this one.
continue;
} else {
hyp_sent = hyp_reader.Value(key);
}
num_words += ref_sent.size();
int32 ins, del, sub;
word_errs +=
LevenshteinEditDistance(ref_sent, hyp_sent, &ins, &del, &sub);
num_ins += ins;
num_del += del;
num_sub += sub;
if (detailed_stats) {
const int32 eps = -1;
PrintAlignmentStats(ref_sent, hyp_sent, eps, stats_output.Stream());
}
num_sent++;
sent_errs += (ref_sent != hyp_sent);
}
} else {
SequentialTokenVectorReader ref_reader(ref_rspecifier);
RandomAccessTokenVectorReader hyp_reader(hyp_rspecifier);
for (; !ref_reader.Done(); ref_reader.Next()) {
std::string key = ref_reader.Key();
const std::vector<std::string> &ref_sent = ref_reader.Value();
std::vector<std::string> hyp_sent;
if (!hyp_reader.HasKey(key)) {
if (mode == "strict")
KALDI_ERR << "No hypothesis for key " << key << " and strict "
"mode specifier.";
num_absent_sents++;
if (mode == "present") // do not score this one.
continue;
} else {
hyp_sent = hyp_reader.Value(key);
}
num_words += ref_sent.size();
int32 ins, del, sub;
word_errs +=
LevenshteinEditDistance(ref_sent, hyp_sent, &ins, &del, &sub);
num_ins += ins;
num_del += del;
num_sub += sub;
if (detailed_stats) {
const std::string eps = "";
PrintAlignmentStats(ref_sent, hyp_sent, eps, stats_output.Stream());
}
num_sent++;
sent_errs += (ref_sent != hyp_sent);
}
}
BaseFloat percent_wer = 100.0 * static_cast<BaseFloat>(word_errs) /
static_cast<BaseFloat>(num_words);
std::cout.precision(2);
std::cerr.precision(2);
std::cout << "%WER " << std::fixed << percent_wer << " [ " << word_errs
<< " / " << num_words << ", " << num_ins << " ins, " << num_del
<< " del, " << num_sub << " sub ]"
<< (num_absent_sents != 0 ? " [PARTIAL]" : "") << '\n';
BaseFloat percent_ser = 100.0 * static_cast<BaseFloat>(sent_errs) /
static_cast<BaseFloat>(num_sent);
std::cout << "%SER " << std::fixed << percent_ser << " [ " << sent_errs
<< " / " << num_sent << " ]\n";
std::cout << "Scored " << num_sent << " sentences, " << num_absent_sents
<< " not present in hyp.\n";
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/featbin/extract-feature-segments.cc<|end_filename|>
// featbin/extract-feature-segments.cc
// Copyright 2009-2011 Microsoft Corporation; Govivace Inc.
// 2012-2013 <NAME>; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "feat/feature-mfcc.h"
#include "matrix/kaldi-matrix.h"
/** @brief This is a program for extracting segments from feature files/archives
- usage :
- extract-feature-segments [options ..] <scriptfile/archive>
<segments-file> <features-written-specifier>
- "segments-file" should have the information of the segments that needs to
be extracted from the feature files
- the format of the segments file : speaker_name filename start_time(in
secs) end_time(in secs)
- "features-written-specifier" is the output segment format
*/
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Create feature files by segmenting input files.\n"
"Usage: extract-feature-segments [options...] <feats-rspecifier> "
"<segments-file> <feats-wspecifier>\n"
" (segments-file has lines like: output-utterance-id "
"input-utterance-or-spk-id 1.10 2.36)\n";
// construct all the global objects
ParseOptions po(usage);
BaseFloat min_segment_length = 0.1, // Minimum segment length in seconds.
max_overshoot = 0.0; // max time by which last segment can overshoot
BaseFloat samp_freq =
100; // feature sampling frequency (assuming 10ms window shift)
// Register the options
po.Register("min-segment-length", &min_segment_length,
"Minimum segment length in seconds (reject shorter segments)");
po.Register("frame-rate", &samp_freq,
"Feature sampling frequency (e.g. 100 for 10ms window shift)");
po.Register("max-overshoot", &max_overshoot,
"End segments overshooting by less (in seconds) are truncated,"
" else rejected.");
// OPTION PARSING ...
// parse options (+filling the registered variables)
po.Read(argc, argv);
// number of arguments should be 3(scriptfile,segments file and outputwav
// write mode)
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string rspecifier = po.GetArg(1); // get script file/feature archive
std::string segments_rxfilename = po.GetArg(2); // get segment file
std::string wspecifier = po.GetArg(3); // get written archive name
BaseFloatMatrixWriter feat_writer(wspecifier);
RandomAccessBaseFloatMatrixReader feat_reader(rspecifier);
Input ki(segments_rxfilename); // no binary argment: never binary.
int32 num_lines = 0, num_success = 0;
std::string line;
/* read each line from segments file */
while (std::getline(ki.Stream(), line)) {
num_lines++;
std::vector<std::string> split_line;
// Split the line by space or tab and check the number of fields in each
// line. There must be 4 fields--segment name , reacording wav file name,
// start time, end time; 5th field (channel info) is optional.
SplitStringToVector(line, " \t\r", true, &split_line);
if (split_line.size() != 4 && split_line.size() != 5) {
KALDI_WARN << "Invalid line in segments file: " << line;
continue;
}
std::string segment = split_line[0], utterance = split_line[1],
start_str = split_line[2], end_str = split_line[3];
// Convert the start time and endtime to real from string. Segment is
// ignored if start or end time cannot be converted to real.
double start, end;
if (!ConvertStringToReal(start_str, &start)) {
KALDI_WARN << "Invalid line in segments file [bad start]: " << line;
continue;
}
if (!ConvertStringToReal(end_str, &end)) {
KALDI_WARN << "Invalid line in segments file [bad end]: " << line;
continue;
}
// start time must not be negative; start time must not be greater than
// end time, except if end time is -1
if (start < 0 || end <= 0 || start >= end) {
KALDI_WARN
<< "Invalid line in segments file [empty or invalid segment]: "
<< line;
continue;
}
int32 channel = -1; // means channel info is unspecified.
// if each line has 5 elements then 5th element must be channel identifier
if (split_line.size() == 5) {
if (!ConvertStringToInteger(split_line[4], &channel) || channel < 0) {
KALDI_WARN << "Invalid line in segments file [bad channel]: " << line;
continue;
}
}
/* check whether a segment start time and end time exists in utterance
* if fails , skips the segment.
*/
if (!feat_reader.HasKey(utterance)) {
KALDI_WARN << "Did not find features for utterance " << utterance
<< ", skipping segment " << segment;
continue;
}
const Matrix<BaseFloat> &feats = feat_reader.Value(utterance);
int32 num_samp =
feats.NumRows(), // total number of samples present in wav data
num_chan =
feats.NumCols(); // total number of channels present in wav file
// Convert start & end times of the segment to corresponding sample number
int32 start_samp = static_cast<int32>(start * samp_freq);
int32 end_samp = static_cast<int32>(end * samp_freq);
/* start sample must be less than total number of samples
* otherwise skip the segment
*/
if (start_samp < 0 || start_samp >= num_samp) {
KALDI_WARN << "Start sample out of range " << start_samp
<< " [length:] " << num_samp << "x" << num_chan
<< ", skipping segment " << segment;
continue;
}
/* end sample must be less than total number samples
* otherwise skip the segment
*/
if (end_samp > num_samp) {
if (end_samp >=
num_samp + static_cast<int32>(max_overshoot * samp_freq)) {
KALDI_WARN << "End sample too far out of range " << end_samp
<< " [length:] " << num_samp << "x" << num_chan
<< ", skipping segment " << segment;
continue;
}
end_samp = num_samp; // for small differences, just truncate.
}
/* check whether the segment size is less than minimum segment
* length(default 0.1 sec)
* if yes, skip the segment
*/
if (end_samp <=
start_samp + static_cast<int32>(min_segment_length * samp_freq)) {
KALDI_WARN << "Segment " << segment << " too short, skipping it.";
continue;
}
SubMatrix<BaseFloat> segment_matrix(feats, start_samp,
end_samp - start_samp, 0, num_chan);
Matrix<BaseFloat> outmatrix(segment_matrix);
feat_writer.Write(segment,
outmatrix); // write segment in feature archive.
num_success++;
}
KALDI_LOG << "Successfully processed " << num_success << " lines out of "
<< num_lines << " in the segments file. ";
/* prints number of segments processed */
if (num_success == 0) return -1;
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/ivectorbin/ivector-extract-online.cc<|end_filename|>
// ivectorbin/ivector-extract-online.cc
// Copyright 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "ivector/ivector-extractor.h"
#include "thread/kaldi-task-sequence.h"
int main(int argc, char *argv[]) {
using namespace kaldi;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
try {
const char *usage =
"Extract iVectors for utterances, using a trained iVector extractor,\n"
"and features and Gaussian-level posteriors. This version extracts "
"an\n"
"iVector every n frames (see the --ivector-period option), by "
"including\n"
"all frames up to that point in the utterance. This is designed to\n"
"correspond with what will happen in a streaming decoding scenario;\n"
"the iVectors would be used in neural net training. The iVectors are\n"
"output as an archive of matrices, indexed by utterance-id; each row\n"
"corresponds to an iVector.\n"
"See also ivector-extract-online2\n"
"\n"
"Usage: ivector-extract-online [options] <model-in> "
"<feature-rspecifier>"
"<posteriors-rspecifier> <ivector-wspecifier>\n"
"e.g.: \n"
" gmm-global-get-post 1.dubm '$feats' ark:- | \\\n"
" ivector-extract-online --ivector-period=10 final.ie '$feats' "
"ark,s,cs:- ark,t:ivectors.1.ark\n";
ParseOptions po(usage);
int32 num_cg_iters = 15;
int32 ivector_period = 10;
g_num_threads = 8;
po.Register("num-cg-iters", &num_cg_iters,
"Number of iterations of conjugate gradient descent to perform "
"each time we re-estimate the iVector.");
po.Register("ivector-period", &ivector_period,
"Controls how frequently we re-estimate the iVector as we get "
"more data.");
po.Register("num-threads", &g_num_threads,
"Number of threads to use for computing derived variables "
"of iVector extractor, at process start-up.");
po.Read(argc, argv);
if (po.NumArgs() != 4) {
po.PrintUsage();
exit(1);
}
std::string ivector_extractor_rxfilename = po.GetArg(1),
feature_rspecifier = po.GetArg(2),
posteriors_rspecifier = po.GetArg(3),
ivectors_wspecifier = po.GetArg(4);
IvectorExtractor extractor;
ReadKaldiObject(ivector_extractor_rxfilename, &extractor);
double tot_objf_impr = 0.0, tot_t = 0.0, tot_length = 0.0,
tot_length_utt_end = 0.0;
int32 num_done = 0, num_err = 0;
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
RandomAccessPosteriorReader posteriors_reader(posteriors_rspecifier);
BaseFloatMatrixWriter ivector_writer(ivectors_wspecifier);
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string utt = feature_reader.Key();
if (!posteriors_reader.HasKey(utt)) {
KALDI_WARN << "No posteriors for utterance " << utt;
num_err++;
continue;
}
const Matrix<BaseFloat> &feats = feature_reader.Value();
const Posterior &posterior = posteriors_reader.Value(utt);
if (static_cast<int32>(posterior.size()) != feats.NumRows()) {
KALDI_WARN << "Size mismatch between posterior " << posterior.size()
<< " and features " << feats.NumRows() << " for utterance "
<< utt;
num_err++;
continue;
}
Matrix<BaseFloat> ivectors;
double objf_impr_per_frame;
objf_impr_per_frame = EstimateIvectorsOnline(
feats, posterior, extractor, ivector_period, num_cg_iters, &ivectors);
BaseFloat offset = extractor.PriorOffset();
for (int32 i = 0; i < ivectors.NumRows(); i++) ivectors(i, 0) -= offset;
double tot_post = TotalPosterior(posterior);
KALDI_VLOG(2) << "For utterance " << utt << " objf impr/frame is "
<< objf_impr_per_frame << " per frame, over " << tot_post
<< " frames (weighted).";
ivector_writer.Write(utt, ivectors);
tot_t += tot_post;
tot_objf_impr += objf_impr_per_frame * tot_post;
tot_length_utt_end +=
ivectors.Row(ivectors.NumRows() - 1).Norm(2.0) * tot_post;
for (int32 i = 0; i < ivectors.NumRows(); i++)
tot_length += ivectors.Row(i).Norm(2.0) * tot_post / ivectors.NumRows();
num_done++;
}
KALDI_LOG << "Estimated iVectors for " << num_done << " files, " << num_err
<< " with errors.";
KALDI_LOG << "Average objective-function improvement was "
<< (tot_objf_impr / tot_t) << " per frame, over " << tot_t
<< " frames (weighted).";
KALDI_LOG << "Average iVector length was " << (tot_length / tot_t)
<< " and at utterance-end was " << (tot_length_utt_end / tot_t)
<< ", over " << tot_t << " frames (weighted); "
<< " expected length is " << sqrt(extractor.IvectorDim());
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/thread/kaldi-semaphore.cc<|end_filename|>
// thread/kaldi-semaphore.cc
// Copyright 2012 <NAME> (Brno University of Technology)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-error.h"
#include "thread/kaldi-semaphore.h"
namespace kaldi {
Semaphore::Semaphore(int32 initValue) {
counter_ = initValue;
if (pthread_mutex_init(&mutex_, NULL) != 0) {
KALDI_ERR << "Cannot initialize pthread mutex";
}
if (pthread_cond_init(&cond_, NULL) != 0) {
KALDI_ERR << "Cannot initialize pthread conditional variable";
}
}
Semaphore::~Semaphore() {
if (pthread_mutex_destroy(&mutex_) != 0) {
KALDI_ERR << "Cannot destroy pthread mutex";
}
if (pthread_cond_destroy(&cond_) != 0) {
KALDI_ERR << "Cannot destroy pthread conditional variable";
}
}
bool Semaphore::TryWait() {
int32 ret = 0;
bool try_wait_succeeded = false;
ret |= pthread_mutex_lock(&mutex_);
if (counter_ > 0) {
counter_--;
try_wait_succeeded = true;
}
ret |= pthread_mutex_unlock(&mutex_);
if (ret != 0) {
KALDI_ERR << "Error in pthreads";
}
return try_wait_succeeded;
}
void Semaphore::Wait() {
int32 ret = 0;
ret |= pthread_mutex_lock(&mutex_);
while (counter_ <= 0) {
ret |= pthread_cond_wait(&cond_, &mutex_);
}
counter_--;
ret |= pthread_mutex_unlock(&mutex_);
if (ret != 0) {
KALDI_ERR << "Error in pthreads";
}
}
void Semaphore::Signal() {
int32 ret = 0;
ret |= pthread_mutex_lock(&mutex_);
counter_++;
ret |= pthread_cond_signal(&cond_);
ret |= pthread_mutex_unlock(&mutex_);
if (ret != 0) {
KALDI_ERR << "Error in pthreads";
}
}
} // namespace kaldi
<|start_filename|>tonic-suite/nlp/src/SENNA_Hash.cpp<|end_filename|>
#include "SENNA_utils.h"
#include "SENNA_Hash.h"
#define MAX_KEY_SIZE 256
SENNA_Hash *SENNA_Hash_new(const char *path, const char *filename) {
FILE *f;
SENNA_Hash *hash;
char **keys = NULL;
int n_keys;
char key[MAX_KEY_SIZE];
int i;
SENNA_message("loading hash: %s%s", (path ? path : ""),
(filename ? filename : ""));
f = SENNA_fopen(path, filename, "rt"); /* the t is to comply with Windows */
n_keys = 0;
while (fgets(key, MAX_KEY_SIZE, f)) n_keys++;
SENNA_fclose(f);
keys = SENNA_malloc(n_keys, sizeof(char *));
f = SENNA_fopen(path, filename, "rt"); /* the t is to comply with Windows */
n_keys = 0;
while (fgets(key, MAX_KEY_SIZE, f)) {
int key_size = strlen(key);
key[key_size - 1] = '\0'; /* discard the newline */
keys[n_keys] = SENNA_malloc(key_size, sizeof(char));
strcpy(keys[n_keys], key);
n_keys++;
}
SENNA_fclose(f);
hash = SENNA_malloc(sizeof(SENNA_Hash), 1);
hash->keys = keys;
hash->size = n_keys;
hash->is_admissible_key = NULL;
/* sorted or unsorted hash ? */
/* (unsorted cannot return an index for a key) */
hash->is_sorted = 1;
for (i = 0; i < n_keys - 1; i++) {
if (strcmp(keys[i], keys[i + 1]) >= 0) {
hash->is_sorted = 0;
break;
}
}
return hash;
}
SENNA_Hash *SENNA_Hash_new_with_admissible_keys(
const char *path, const char *filename,
const char *admissible_keys_filename) {
SENNA_Hash *hash = SENNA_Hash_new(path, filename);
FILE *f;
int admissiblekeyssize = 0;
f = SENNA_fopen(path, admissible_keys_filename, "rb");
SENNA_fseek(f, 0, SEEK_END);
admissiblekeyssize = SENNA_ftell(f);
if (admissiblekeyssize != hash->size)
SENNA_error("inconsistent hash and admissible key files");
SENNA_fseek(f, 0, SEEK_SET);
hash->is_admissible_key = SENNA_malloc(sizeof(char), admissiblekeyssize);
SENNA_fread(hash->is_admissible_key, 1, admissiblekeyssize, f);
SENNA_fclose(f);
return hash;
}
void SENNA_Hash_convert_IOBES_to_IOB(SENNA_Hash *hash) {
int i;
for (i = 0; i < hash->size; i++) {
char *key = hash->keys[i];
if (strlen(key) < 3) continue;
if ((key[0] == 'E') && (key[1] == '-')) key[0] = 'I';
if ((key[0] == 'S') && (key[1] == '-')) key[0] = 'B';
}
}
void SENNA_Hash_convert_IOBES_to_brackets(SENNA_Hash *hash) {
int i, j;
for (i = 0; i < hash->size; i++) {
char *key = hash->keys[i];
int key_size = strlen(key);
if (!strcmp(key, "O")) key[0] = '*';
if (key_size < 3) continue;
if ((key[0] == 'B') && (key[1] == '-')) {
key[0] = '(';
for (j = 1; j < key_size - 1; j++) key[j] = key[j + 1];
key[key_size - 1] = '*';
}
if ((key[0] == 'I') && (key[1] == '-')) {
key[0] = '*';
key[1] = '\0';
}
if ((key[0] == 'E') && (key[1] == '-')) {
key[0] = '*';
key[1] = ')';
key[2] = '\0';
}
if ((key[0] == 'S') && (key[1] == '-')) {
key = SENNA_realloc(key, key_size + 2, sizeof(char));
key[0] = '(';
for (j = 1; j < key_size - 1; j++) key[j] = key[j + 1];
key[key_size - 1] = '*';
key[key_size] = ')';
key[key_size + 1] = '\0';
hash->keys[i] = key;
}
}
}
void SENNA_Hash_free(SENNA_Hash *hash) {
int i;
for (i = 0; i < hash->size; i++) SENNA_free(hash->keys[i]);
SENNA_free(hash->keys);
if (hash->is_admissible_key) SENNA_free(hash->is_admissible_key);
SENNA_free(hash);
}
int SENNA_Hash_index(SENNA_Hash *hash, const char *key) {
char **keys = hash->keys;
int idxinf = 0;
int idxsup = hash->size - 1;
if (!hash->is_sorted)
SENNA_error("cannot search a key into an unsorted hash");
if (strcmp(key, keys[idxinf]) < 0 || strcmp(key, keys[idxsup]) > 0) return -1;
while (idxinf <= idxsup) {
int idxmiddle = (idxsup + idxinf) / 2;
int status = strcmp(key, keys[idxmiddle]);
/* printf("%d %d %d [%d]\n", idxinf, idxmiddle, idxsup, status); */
if (status < 0)
idxsup = idxmiddle - 1;
else if (status > 0)
idxinf = idxmiddle + 1;
else
return idxmiddle;
}
return -1;
}
const char *SENNA_Hash_key(SENNA_Hash *hash, int idx) {
if ((idx < 0) || (idx >= hash->size)) SENNA_error("hash index out of bounds");
return hash->keys[idx];
}
int SENNA_Hash_size(SENNA_Hash *hash) { return hash->size; }
char SENNA_Hash_is_admissible_index(SENNA_Hash *hash, int idx) {
if (!hash->is_admissible_key)
SENNA_error("hash does not handle admissible keys");
if ((idx < 0) || (idx >= hash->size)) SENNA_error("hash index out of bounds");
return hash->is_admissible_key[idx];
}
<|start_filename|>tonic-suite/asr/src/online2bin/online2-wav-dump-features.cc<|end_filename|>
// onlinebin/online2-wav-dump-features.cc
// Copyright 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "feat/wave-reader.h"
#include "online2/online-nnet2-decoding.h"
#include "online2/onlinebin-util.h"
#include "online2/online-timing.h"
#include "online2/online-endpoint.h"
#include "fstext/fstext-lib.h"
#include "lat/lattice-functions.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace fst;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
const char *usage =
"Reads in wav file(s) and processes them as in "
"online2-wav-nnet2-latgen-faster,\n"
"but instead of decoding, dumps the features. Most of the parameters\n"
"are set via configuration variables.\n"
"\n"
"Usage: online2-wav-dump-features [options] <spk2utt-rspecifier> "
"<wav-rspecifier> <feature-wspecifier>\n"
"The spk2utt-rspecifier can just be <utterance-id> <utterance-id> if\n"
"you want to generate features utterance by utterance.\n"
"See steps/online/nnet2/dump_nnet_activations.sh for an example.\n";
ParseOptions po(usage);
// feature_config includes configuration for the iVector adaptation,
// as well as the basic features.
OnlineNnet2FeaturePipelineConfig feature_config;
BaseFloat chunk_length_secs = 0.05;
po.Register("chunk-length", &chunk_length_secs,
"Length of chunk size in seconds, that we process.");
feature_config.Register(&po);
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
return 1;
}
std::string spk2utt_rspecifier = po.GetArg(1),
wav_rspecifier = po.GetArg(2), feats_wspecifier = po.GetArg(3);
OnlineNnet2FeaturePipelineInfo feature_info(feature_config);
int32 num_done = 0, num_err = 0;
int64 num_frames_tot = 0;
SequentialTokenVectorReader spk2utt_reader(spk2utt_rspecifier);
RandomAccessTableReader<WaveHolder> wav_reader(wav_rspecifier);
BaseFloatMatrixWriter feats_writer(feats_wspecifier);
for (; !spk2utt_reader.Done(); spk2utt_reader.Next()) {
std::string spk = spk2utt_reader.Key();
const std::vector<std::string> &uttlist = spk2utt_reader.Value();
OnlineIvectorExtractorAdaptationState adaptation_state(
feature_info.ivector_extractor_info);
for (size_t i = 0; i < uttlist.size(); i++) {
std::string utt = uttlist[i];
if (!wav_reader.HasKey(utt)) {
KALDI_WARN << "Did not find audio for utterance " << utt;
num_err++;
continue;
}
const WaveData &wave_data = wav_reader.Value(utt);
// get the data for channel zero (if the signal is not mono, we only
// take the first channel).
SubVector<BaseFloat> data(wave_data.Data(), 0);
OnlineNnet2FeaturePipeline feature_pipeline(feature_info);
feature_pipeline.SetAdaptationState(adaptation_state);
std::vector<Vector<BaseFloat> *> feature_data;
// We retrieve data from the feature pipeline while adding the wav data
// bit
// by bit... for features like pitch features, this may make a
// difference to what we get, and we want to make sure that the data we
// get it exactly compatible with online decoding.
BaseFloat samp_freq = wave_data.SampFreq();
int32 chunk_length = int32(samp_freq * chunk_length_secs);
if (chunk_length == 0) chunk_length = 1;
int32 samp_offset = 0;
while (samp_offset < data.Dim()) {
int32 samp_remaining = data.Dim() - samp_offset;
int32 num_samp =
chunk_length < samp_remaining ? chunk_length : samp_remaining;
SubVector<BaseFloat> wave_part(data, samp_offset, num_samp);
feature_pipeline.AcceptWaveform(samp_freq, wave_part);
samp_offset += num_samp;
if (samp_offset ==
data.Dim()) // no more input. flush out last frames
feature_pipeline.InputFinished();
while (static_cast<int32>(feature_data.size()) <
feature_pipeline.NumFramesReady()) {
int32 t = static_cast<int32>(feature_data.size());
feature_data.push_back(
new Vector<BaseFloat>(feature_pipeline.Dim(), kUndefined));
feature_pipeline.GetFrame(t, feature_data.back());
}
}
int32 T = static_cast<int32>(feature_data.size());
if (T == 0) {
KALDI_WARN << "Got no frames of data for utterance " << utt;
num_err++;
continue;
}
Matrix<BaseFloat> feats(T, feature_pipeline.Dim());
for (int32 t = 0; t < T; t++) {
feats.Row(t).CopyFromVec(*(feature_data[t]));
delete feature_data[t];
}
num_frames_tot += T;
feats_writer.Write(utt, feats);
feature_pipeline.GetAdaptationState(&adaptation_state);
num_done++;
}
}
KALDI_LOG << "Processed " << num_done << " utterances, " << num_err
<< " with errors; " << num_frames_tot << " frames in total.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
} // main()
<|start_filename|>tonic-suite/asr/src/transform/exponential-transform.h<|end_filename|>
// transform/exponential-transform.h
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_TRANSFORM_EXPONENTIAL_TRANSFORM_H_
#define KALDI_TRANSFORM_EXPONENTIAL_TRANSFORM_H_
#include "base/kaldi-common.h"
#include "matrix/matrix-lib.h"
#include "transform/fmllr-diag-gmm.h"
namespace kaldi {
// We define an exponential transform as a transform of the form
// W_s = D_s exp(t_s A) B, which takes x^+ -> x (where ^+ is adding a one);
// only t_s and D_s are speaker-specific. It is roughly analogous to th elog
// of the vtln warp factor.
// D_s is either a diagonal or an offset-only fMLLR matrix (or just
// the "default" transform [ I ; 0 ]), depending on options.
// "exp" here is matrix exponential, defined by exp(A) = I + A + 1/2! A A + 1/3!
// A A A + ...
// note that the last row of A is 0 0 0 ... and the last row of B is
// 0 0 0 ... 0 1. The "globally trained" things are A and B.
// We train A and B on separate iterations.
enum EtNormalizeType { kEtNormalizeOffset, kEtNormalizeDiag, kEtNormalizeNone };
// Note: Revision 121 corresponds to the submitted version of the ASRU paper.
// There has been a correction to the update for A since then.
class ExponentialTransformAccsA;
// Class ExponentialTransform holds just the globally shared parts of the
// exponential
// transform, i.e. A_ and B_.
class ExponentialTransform {
public:
ExponentialTransform() {} // typically use this constructor only prior to
// calling Read().
ExponentialTransform(int32 dim, EtNormalizeType norm_type, int32 seed = 0) {
Init(dim, norm_type, seed);
}
void Init(
int32 dim, EtNormalizeType norm_type,
int32 seed = 0); // Initializes A to a pseudo-random unit-norm matrix
// (with last row zero), and B to unity. "dim" is the feature dim, so both A
// and B
// are of dimension dim+1
// SetNormalizeType sets the normalization type to this. But it only allows
// you to increase the normalization type, i.e. None->Offset or Diag
// or Offset->Diag
void SetNormalizeType(EtNormalizeType norm_type);
// ComputeTransform does not attempt to work out the objective function
// change,
// because of possible confusion about what the correct baseline should be.
// You can use FmllrAuxFuncDiagGmm to measure the change.
void ComputeTransform(const FmllrDiagGmmAccs &accs,
MatrixBase<BaseFloat> *Ws, // output fMLLR transform,
// should be size dim x
// dim+1
BaseFloat *t, MatrixBase<BaseFloat> *Ds,
BaseFloat *objf_impr = NULL, // versus just B
BaseFloat *count = NULL);
int32 Dim() const { return A_.NumRows() - 1; } // returns feature dim.
// Ds is the first term in
// fmllr_mat = W_s = D_s exp(t_s A) B, which is a diagonal-only
// fMLLR (or possibly
// just mean-offset or [ I; 0 ], depending on whether norm_type_ is
// {Diag, Offset, None}.
void Write(std::ostream &os, bool binary) const;
void Read(std::istream &is, bool binary);
/// Returns B minus its last row, which is the closest thing to a "default
/// transform"
/// that we have.
void GetDefaultTransform(Matrix<BaseFloat> *transform) const;
void GetATransform(Matrix<BaseFloat> *transform) const {
transform->CopyFromMat(A_);
}
/// Make B unit; this can be useful for combining the B part of the
/// transform with MLLT.
void MakeBUnit() { B_.SetUnit(); }
void ComputeDs(const MatrixBase<BaseFloat> &Ws, BaseFloat t,
MatrixBase<BaseFloat> *Ds) const; // Computes the D_s matrix,
// given W_s and the value of t.
// takes "Cpart" which is a d x d STC/MLLT matrix, and applies
// it to the transform by doing A <-- C A C^{-1}, B <-- C B,
// where C is Cpart extended with an extra row and column with values
// equal to those in the unit matrix.
void ApplyC(const MatrixBase<BaseFloat> &Cpart);
friend class ExponentialTransformAccsA;
friend class ExponentialTransformAccsANew;
protected:
Matrix<BaseFloat> A_; // d+1 by d+1 matrix; last row 0 0 0 .. 0 0.
Matrix<BaseFloat> B_; // d+1 by d+1 matrix; last row 0 0 0 .. 0 1.
EtNormalizeType norm_type_; // tells us how to train D_s.
private:
static void ComposeAffineTransforms(const MatrixBase<BaseFloat> &A,
const MatrixBase<BaseFloat> &B,
MatrixBase<BaseFloat> *C);
};
struct ExponentialTransformUpdateAOptions {
BaseFloat learning_rate;
bool renormalize; // renormalize A and recenter the warp factors on each
// iteration...
ExponentialTransformUpdateAOptions()
: learning_rate(1.0), renormalize(true) {}
void Register(OptionsItf *po) {
po->Register(
"learning-rate", &learning_rate,
"Learning rate for updating A (make <1 if instability suspected)\n");
po->Register("renormalize", &renormalize,
"True if you want to renormalize the warp factors on each "
"iteration of update (recommended).");
}
};
class ExponentialTransformAccsA {
public:
// This class does the accumulation and upate for the "A" part of the
// global transform.
// AccumulateForSpeaker does the accumulation for the speaker,
// given standard fMLLR accs that have been accumulated given the
// un-transformed data.
void AccumulateForSpeaker(const FmllrDiagGmmAccs &accs,
const ExponentialTransform &et,
const MatrixBase<BaseFloat> &Ds, BaseFloat t);
ExponentialTransformAccsA() {
} // typically use this constructor prior to Read().
ExponentialTransformAccsA(int32 dim) { Init(dim); }
void Init(int32 dim);
void Write(std::ostream &os, bool binary) const;
void Read(std::istream &is, bool binary, bool add = false);
// Updates the matrix A (also changes B as a side effect).
void Update(const ExponentialTransformUpdateAOptions &opts,
ExponentialTransform *et, BaseFloat *objf_impr, BaseFloat *count);
private:
double beta_; // sum of speaker betas. for diagnostics.
double beta_t_; // sum of speaker betas times T. for log-det term.
std::vector<SpMatrix<double> > G_; // Like the G stats of
// fMLLR, taken after the B transform. Summed over speakers and
// weighted by t^2.
Matrix<double> Ahat_; // local gradient w.r.t. the first d rows of A.
// note, \hat{A} in the paper has an extra row;
// this is never used.
};
} // End namespace kaldi
#endif // KALDI_TRANSFORM_LDA_ESTIMATE_H_
<|start_filename|>tonic-suite/nlp/src/SENNA_SRL.cpp<|end_filename|>
#include <sys/time.h>
#include "SENNA_SRL.h"
#include "SENNA_utils.h"
#include "SENNA_nn.h"
#include "socket.h"
int **SENNA_SRL_forward(SENNA_SRL *srl, const int *sentence_words,
const int *sentence_caps, const int *sentence_chkl,
const int *sentence_isvb, int sentence_size,
int socketfd) {
int vbidx;
int idx;
int n_verbs = 0;
int i;
struct timeval tv1, tv2;
gettimeofday(&tv1, NULL);
srl->sentence_posv = SENNA_realloc(srl->sentence_posv, sizeof(int),
sentence_size + srl->window_size - 1);
srl->sentence_posw = SENNA_realloc(srl->sentence_posw, sizeof(int),
sentence_size + srl->window_size - 1);
srl->input_state_wcc = SENNA_realloc(
srl->input_state_wcc, sizeof(float),
(sentence_size + srl->window_size - 1) *
(srl->ll_word_size + srl->ll_caps_size + srl->ll_chkl_size));
srl->input_state_pv =
SENNA_realloc(srl->input_state_pv, sizeof(float),
(sentence_size + srl->window_size - 1) * srl->ll_posv_size);
srl->input_state_pw =
SENNA_realloc(srl->input_state_pw, sizeof(float),
(sentence_size + srl->window_size - 1) * srl->ll_posw_size);
srl->hidden_state1_wcc =
SENNA_realloc(srl->hidden_state1_wcc, sizeof(float),
sentence_size * srl->hidden_state1_size);
srl->hidden_state1_pv =
SENNA_realloc(srl->hidden_state1_pv, sizeof(float),
sentence_size * srl->hidden_state1_size);
srl->hidden_state1_pw =
SENNA_realloc(srl->hidden_state1_pw, sizeof(float),
sentence_size * srl->hidden_state1_size);
srl->hidden_state1 = SENNA_realloc(srl->hidden_state1, sizeof(float),
sentence_size * srl->hidden_state1_size);
srl->hidden_state2 =
SENNA_realloc(srl->hidden_state2, sizeof(float), srl->hidden_state1_size);
srl->hidden_state3 =
SENNA_realloc(srl->hidden_state3, sizeof(float), srl->hidden_state3_size);
srl->output_state = SENNA_realloc(srl->output_state, sizeof(float),
sentence_size * srl->output_state_size);
/* words and caps are common for all words and all verbs */
SENNA_nn_lookup(srl->input_state_wcc,
srl->ll_word_size + srl->ll_caps_size + srl->ll_chkl_size,
srl->ll_word_weight, srl->ll_word_size, srl->ll_word_max_idx,
sentence_words, sentence_size, srl->ll_word_padding_idx,
(srl->window_size - 1) / 2);
SENNA_nn_lookup(srl->input_state_wcc + srl->ll_word_size,
srl->ll_word_size + srl->ll_caps_size + srl->ll_chkl_size,
srl->ll_caps_weight, srl->ll_caps_size, srl->ll_caps_max_idx,
sentence_caps, sentence_size, srl->ll_caps_padding_idx,
(srl->window_size - 1) / 2);
SENNA_nn_lookup(srl->input_state_wcc + srl->ll_word_size + srl->ll_caps_size,
srl->ll_word_size + srl->ll_caps_size + srl->ll_chkl_size,
srl->ll_chkl_weight, srl->ll_chkl_size, srl->ll_chkl_max_idx,
sentence_chkl, sentence_size, srl->ll_chkl_padding_idx,
(srl->window_size - 1) / 2);
SENNA_nn_temporal_convolution(
srl->hidden_state1_wcc, srl->hidden_state1_size, srl->l1_weight_wcc,
srl->l1_bias, srl->input_state_wcc,
srl->ll_word_size + srl->ll_caps_size + srl->ll_chkl_size,
sentence_size + srl->window_size - 1, srl->window_size);
gettimeofday(&tv2, NULL);
srl->apptime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
/* for all verbs... */
for (vbidx = 0; vbidx < sentence_size; vbidx++) {
gettimeofday(&tv1, NULL);
if (!sentence_isvb[vbidx]) continue;
SENNA_nn_distance(srl->sentence_posv, vbidx, srl->ll_posv_max_idx,
sentence_size, (srl->window_size - 1) / 2);
SENNA_nn_lookup(srl->input_state_pv, srl->ll_posv_size, srl->ll_posv_weight,
srl->ll_posv_size, srl->ll_posv_max_idx, srl->sentence_posv,
sentence_size + srl->window_size - 1, 0, 0);
SENNA_nn_temporal_convolution(
srl->hidden_state1_pv, srl->hidden_state1_size, srl->l1_weight_pv, NULL,
srl->input_state_pv, srl->ll_posv_size,
sentence_size + srl->window_size - 1, srl->window_size);
gettimeofday(&tv2, NULL);
srl->apptime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
/* for all words... */
for (idx = 0; idx < sentence_size; idx++) {
gettimeofday(&tv1, NULL);
SENNA_nn_distance(srl->sentence_posw, idx, srl->ll_posw_max_idx,
sentence_size, (srl->window_size - 1) / 2);
SENNA_nn_lookup(srl->input_state_pw, srl->ll_posw_size,
srl->ll_posw_weight, srl->ll_posw_size,
srl->ll_posw_max_idx, srl->sentence_posw,
sentence_size + srl->window_size - 1, 0, 0);
SENNA_nn_temporal_convolution(
srl->hidden_state1_pw, srl->hidden_state1_size, srl->l1_weight_pw,
NULL, srl->input_state_pw, srl->ll_posw_size,
sentence_size + srl->window_size - 1, srl->window_size);
memcpy(srl->hidden_state1, srl->hidden_state1_wcc,
sizeof(float) * srl->hidden_state1_size * sentence_size);
for (i = 0; i < srl->hidden_state1_size * sentence_size; i++)
srl->hidden_state1[i] += srl->hidden_state1_pv[i];
for (i = 0; i < srl->hidden_state1_size * sentence_size; i++)
srl->hidden_state1[i] += srl->hidden_state1_pw[i];
SENNA_nn_temporal_max(srl->hidden_state2, srl->hidden_state1,
srl->hidden_state1_size, sentence_size);
gettimeofday(&tv2, NULL);
srl->apptime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
gettimeofday(&tv1, NULL);
if (srl->service) {
SOCKET_send(socketfd, (char *)(srl->hidden_state2),
srl->hidden_state1_size * sizeof(float), srl->debug);
SOCKET_receive(socketfd, (char *)(srl->output_state +
idx * srl->output_state_size),
srl->output_state_size * sizeof(float), srl->debug);
} else {
SENNA_nn_linear(srl->hidden_state3, srl->hidden_state3_size,
srl->l3_weight, srl->l3_bias, srl->hidden_state2,
srl->hidden_state1_size);
SENNA_nn_hardtanh(srl->hidden_state3, srl->hidden_state3,
srl->hidden_state3_size);
SENNA_nn_linear(srl->output_state + idx * srl->output_state_size,
srl->output_state_size, srl->l4_weight, srl->l4_bias,
srl->hidden_state3, srl->hidden_state3_size);
}
gettimeofday(&tv2, NULL);
srl->dnntime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
srl->calls++;
}
gettimeofday(&tv1, NULL);
if (n_verbs >= srl->labels_size) {
srl->labels = SENNA_realloc(srl->labels, sizeof(int *), n_verbs + 1);
for (i = srl->labels_size; i < n_verbs + 1; i++) srl->labels[i] = NULL;
srl->labels_size = n_verbs + 1;
}
srl->labels[n_verbs] =
SENNA_realloc(srl->labels[n_verbs], sizeof(int), sentence_size);
SENNA_nn_viterbi(srl->labels[n_verbs], srl->viterbi_score_init,
srl->viterbi_score_trans, srl->output_state,
srl->output_state_size, sentence_size);
n_verbs++;
gettimeofday(&tv2, NULL);
srl->apptime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
}
return srl->labels;
}
SENNA_SRL *SENNA_SRL_new(const char *path, const char *subpath) {
SENNA_SRL *srl = SENNA_malloc(sizeof(SENNA_SRL), 1);
FILE *f;
float dummy;
int dummy_size;
f = SENNA_fopen(path, subpath, "rb");
SENNA_fread(&srl->window_size, sizeof(int), 1, f);
SENNA_fread_tensor_2d(&srl->ll_word_weight, &srl->ll_word_size,
&srl->ll_word_max_idx, f);
SENNA_fread_tensor_2d(&srl->ll_caps_weight, &srl->ll_caps_size,
&srl->ll_caps_max_idx, f);
SENNA_fread_tensor_2d(&srl->ll_chkl_weight, &srl->ll_chkl_size,
&srl->ll_chkl_max_idx, f);
SENNA_fread_tensor_2d(&srl->ll_posv_weight, &srl->ll_posv_size,
&srl->ll_posv_max_idx, f);
SENNA_fread_tensor_2d(&srl->ll_posw_weight, &srl->ll_posw_size,
&srl->ll_posw_max_idx, f);
SENNA_fread_tensor_2d(&srl->l1_weight_wcc, &dummy_size,
&srl->hidden_state1_size, f);
SENNA_fread_tensor_2d(&srl->l1_weight_pv, &dummy_size,
&srl->hidden_state1_size, f);
SENNA_fread_tensor_2d(&srl->l1_weight_pw, &dummy_size,
&srl->hidden_state1_size, f);
SENNA_fread_tensor_1d(&srl->l1_bias, &srl->hidden_state1_size, f);
SENNA_fread_tensor_2d(&srl->l3_weight, &srl->hidden_state1_size,
&srl->hidden_state3_size, f);
SENNA_fread_tensor_1d(&srl->l3_bias, &srl->hidden_state3_size, f);
SENNA_fread_tensor_2d(&srl->l4_weight, &srl->hidden_state3_size,
&srl->output_state_size, f);
SENNA_fread_tensor_1d(&srl->l4_bias, &srl->output_state_size, f);
SENNA_fread_tensor_1d(&srl->viterbi_score_init, &srl->output_state_size, f);
SENNA_fread_tensor_2d(&srl->viterbi_score_trans, &srl->output_state_size,
&srl->output_state_size, f);
SENNA_fread(&srl->ll_word_padding_idx, sizeof(int), 1, f);
SENNA_fread(&srl->ll_caps_padding_idx, sizeof(int), 1, f);
SENNA_fread(&srl->ll_chkl_padding_idx, sizeof(int), 1, f);
SENNA_fread(&dummy, sizeof(float), 1, f);
SENNA_fclose(f);
if ((int)dummy != 777)
SENNA_error("srl: data corrupted (or not IEEE floating computer)");
/* states */
srl->sentence_posv = NULL;
srl->sentence_posw = NULL;
srl->input_state = NULL;
srl->input_state_wcc = NULL;
srl->input_state_pv = NULL;
srl->input_state_pw = NULL;
srl->hidden_state1 = NULL;
srl->hidden_state1_wcc = NULL;
srl->hidden_state1_pv = NULL;
srl->hidden_state1_pw = NULL;
srl->hidden_state2 = NULL;
srl->hidden_state3 = NULL;
srl->output_state = NULL;
srl->labels = NULL;
srl->labels_size = 0;
srl->service = false;
srl->debug = false;
srl->calls = 0;
srl->dnntime = 0;
srl->apptime = 0;
/* some info if you want verbose */
SENNA_message("srl: window size: %d", srl->window_size);
SENNA_message("srl: vector size in word lookup table: %d", srl->ll_word_size);
SENNA_message("srl: word lookup table size: %d", srl->ll_word_max_idx);
SENNA_message("srl: vector size in caps lookup table: %d", srl->ll_caps_size);
SENNA_message("srl: caps lookup table size: %d", srl->ll_caps_max_idx);
SENNA_message("srl: vector size in verb position lookup table: %d",
srl->ll_posv_size);
SENNA_message("srl: verb position lookup table size: %d",
srl->ll_posv_max_idx);
SENNA_message("srl: vector size in word position lookup table: %d",
srl->ll_posw_size);
SENNA_message("srl: word position lookup table size: %d",
srl->ll_posw_max_idx);
SENNA_message("srl: number of hidden units (convolution): %d",
srl->hidden_state1_size);
SENNA_message("srl: number of hidden units (hidden layer): %d",
srl->hidden_state3_size);
SENNA_message("srl: number of classes: %d", srl->output_state_size);
return srl;
}
void SENNA_SRL_free(SENNA_SRL *srl) {
int i;
/* weights */
SENNA_free(srl->ll_word_weight);
SENNA_free(srl->ll_caps_weight);
SENNA_free(srl->ll_chkl_weight);
SENNA_free(srl->ll_posv_weight);
SENNA_free(srl->ll_posw_weight);
SENNA_free(srl->l1_weight_wcc);
SENNA_free(srl->l1_weight_pv);
SENNA_free(srl->l1_weight_pw);
SENNA_free(srl->l1_bias);
SENNA_free(srl->l3_weight);
SENNA_free(srl->l3_bias);
SENNA_free(srl->l4_weight);
SENNA_free(srl->l4_bias);
SENNA_free(srl->viterbi_score_init);
SENNA_free(srl->viterbi_score_trans);
/* extra inputs */
SENNA_free(srl->sentence_posw);
SENNA_free(srl->sentence_posv);
/* states */
SENNA_free(srl->input_state);
SENNA_free(srl->input_state_wcc);
SENNA_free(srl->input_state_pv);
SENNA_free(srl->input_state_pw);
SENNA_free(srl->hidden_state1);
SENNA_free(srl->hidden_state1_wcc);
SENNA_free(srl->hidden_state1_pv);
SENNA_free(srl->hidden_state1_pw);
SENNA_free(srl->hidden_state2);
SENNA_free(srl->hidden_state3);
SENNA_free(srl->output_state);
for (i = 0; i < srl->labels_size; i++) SENNA_free(srl->labels[i]);
SENNA_free(srl->labels);
/* the end */
SENNA_free(srl);
}
<|start_filename|>tonic-suite/asr/src/nnet2/online-nnet2-decodable.cc<|end_filename|>
// nnet2/online-nnet2-decodable.cc
// Copyright 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet2/online-nnet2-decodable.h"
namespace kaldi {
namespace nnet2 {
DecodableNnet2Online::DecodableNnet2Online(
const AmNnet &nnet, const TransitionModel &trans_model,
const DecodableNnet2OnlineOptions &opts,
OnlineFeatureInterface *input_feats)
: features_(input_feats),
nnet_(nnet),
trans_model_(trans_model),
opts_(opts),
feat_dim_(input_feats->Dim()),
left_context_(nnet.GetNnet().LeftContext()),
right_context_(nnet.GetNnet().RightContext()),
num_pdfs_(nnet.GetNnet().OutputDim()),
begin_frame_(-1) {
KALDI_ASSERT(opts_.max_nnet_batch_size > 0);
log_priors_ = nnet_.Priors();
KALDI_ASSERT(log_priors_.Dim() == trans_model_.NumPdfs() &&
"Priors in neural network not set up (or mismatch "
"with transition model).");
log_priors_.ApplyLog();
}
BaseFloat DecodableNnet2Online::LogLikelihood(int32 frame, int32 index) {
ComputeForFrame(frame);
int32 pdf_id = trans_model_.TransitionIdToPdf(index);
KALDI_ASSERT(frame >= begin_frame_ &&
frame < begin_frame_ + scaled_loglikes_.NumRows());
return scaled_loglikes_(frame - begin_frame_, pdf_id);
}
bool DecodableNnet2Online::IsLastFrame(int32 frame) const {
if (opts_.pad_input) { // normal case
return features_->IsLastFrame(frame);
} else {
return features_->IsLastFrame(frame + left_context_ + right_context_);
}
}
int32 DecodableNnet2Online::NumFramesReady() const {
int32 features_ready = features_->NumFramesReady();
if (features_ready == 0) return 0;
bool input_finished = features_->IsLastFrame(features_ready - 1);
if (opts_.pad_input) {
// normal case... we'll pad with duplicates of first + last frame to get the
// required left and right context.
if (input_finished)
return features_ready;
else
return std::max<int32>(0, features_ready - right_context_);
} else {
return std::max<int32>(0, features_ready - right_context_ - left_context_);
}
}
void DecodableNnet2Online::ComputeForFrame(int32 frame) {
int32 features_ready = features_->NumFramesReady();
bool input_finished = features_->IsLastFrame(features_ready - 1);
KALDI_ASSERT(frame >= 0);
if (frame >= begin_frame_ &&
frame < begin_frame_ + scaled_loglikes_.NumRows())
return;
KALDI_ASSERT(frame < NumFramesReady());
int32 input_frame_begin;
if (opts_.pad_input)
input_frame_begin = frame - left_context_;
else
input_frame_begin = frame;
int32 max_possible_input_frame_end = features_ready;
if (input_finished && opts_.pad_input)
max_possible_input_frame_end += right_context_;
int32 input_frame_end =
std::min<int32>(max_possible_input_frame_end,
input_frame_begin + left_context_ + right_context_ +
opts_.max_nnet_batch_size);
KALDI_ASSERT(input_frame_end > input_frame_begin);
Matrix<BaseFloat> features(input_frame_end - input_frame_begin, feat_dim_);
for (int32 t = input_frame_begin; t < input_frame_end; t++) {
SubVector<BaseFloat> row(features, t - input_frame_begin);
int32 t_modified = t;
// The next two if-statements take care of "pad_input"
if (t_modified < 0) t_modified = 0;
if (t_modified >= features_ready) t_modified = features_ready - 1;
features_->GetFrame(t_modified, &row);
}
CuMatrix<BaseFloat> cu_features;
cu_features.Swap(&features); // Copy to GPU, if we're using one.
int32 num_frames_out =
input_frame_end - input_frame_begin - left_context_ - right_context_;
CuMatrix<BaseFloat> cu_posteriors(num_frames_out, num_pdfs_);
// The "false" below tells it not to pad the input: we've already done
// any padding that we needed to do.
NnetComputation(nnet_.GetNnet(), cu_features, false, &cu_posteriors);
cu_posteriors.ApplyFloor(1.0e-20); // Avoid log of zero which leads to NaN.
cu_posteriors.ApplyLog();
// subtract log-prior (divide by prior)
cu_posteriors.AddVecToRows(-1.0, log_priors_);
// apply probability scale.
cu_posteriors.Scale(opts_.acoustic_scale);
// Transfer the scores the CPU for faster access by the
// decoding process.
scaled_loglikes_.Resize(0, 0);
cu_posteriors.Swap(&scaled_loglikes_);
begin_frame_ = frame;
}
} // namespace nnet2
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-acc-mllt-global.cc<|end_filename|>
// gmmbin/gmm-acc-mllt-global.cc
// Copyright 2009-2011 Microsoft Corporation
// 2014 <NAME>
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "transform/mllt.h"
#include "hmm/posterior.h"
int main(int argc, char *argv[]) {
using namespace kaldi;
try {
const char *usage =
"Accumulate MLLT (global STC) statistics: this version is for where "
"there is\n"
"one global GMM (e.g. a UBM)\n"
"Usage: gmm-acc-mllt-global [options] <gmm-in> <feature-rspecifier> "
"<stats-out>\n"
"e.g.: \n"
" gmm-acc-mllt-global 1.dubm scp:feats.scp 1.macc\n";
ParseOptions po(usage);
bool binary = true;
BaseFloat rand_prune = 0.25;
std::string gselect_rspecifier;
po.Register("binary", &binary, "Write output in binary mode");
po.Register("rand-prune", &rand_prune,
"Randomized pruning parameter to speed up "
"accumulation (larger -> more pruning. May exceed one).");
po.Register("gselect", &gselect_rspecifier,
"Rspecifier for Gaussian selection "
"information");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string gmm_filename = po.GetArg(1), feature_rspecifier = po.GetArg(2),
accs_wxfilename = po.GetArg(3);
using namespace kaldi;
typedef kaldi::int32 int32;
DiagGmm gmm;
ReadKaldiObject(gmm_filename, &gmm);
MlltAccs mllt_accs(gmm.Dim(), rand_prune);
double tot_like = 0.0;
double tot_t = 0.0;
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
RandomAccessInt32VectorVectorReader gselect_reader(gselect_rspecifier);
int32 num_done = 0, num_err = 0;
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string utt = feature_reader.Key();
const Matrix<BaseFloat> &mat = feature_reader.Value();
num_done++;
BaseFloat tot_like_this_file = 0.0, tot_weight = 0.0;
if (gselect_rspecifier == "") {
for (int32 i = 0; i < mat.NumRows(); i++) {
tot_like_this_file +=
mllt_accs.AccumulateFromGmm(gmm, mat.Row(i), 1.0);
tot_weight += 1.0;
}
} else {
if (!gselect_reader.HasKey(utt)) {
KALDI_WARN << "No gselect information for utterance " << utt;
num_err++;
continue;
}
const std::vector<std::vector<int32> > &gselect =
gselect_reader.Value(utt);
if (static_cast<int32>(gselect.size()) != mat.NumRows()) {
KALDI_WARN << "Gselect information has wrong size for utterance "
<< utt << ", " << gselect.size() << " vs. "
<< mat.NumRows();
num_err++;
continue;
}
for (int32 i = 0; i < mat.NumRows(); i++) {
tot_like_this_file += mllt_accs.AccumulateFromGmmPreselect(
gmm, gselect[i], mat.Row(i), 1.0);
tot_weight += 1.0;
}
}
KALDI_LOG << "Average like for this file is "
<< (tot_like_this_file / tot_weight) << " over " << tot_weight
<< " frames.";
tot_like += tot_like_this_file;
tot_t += tot_weight;
if (num_done % 10 == 0)
KALDI_LOG << "Avg like per frame so far is " << (tot_like / tot_t);
}
KALDI_LOG << "Done " << num_done << " files. ";
KALDI_LOG << "Overall avg like per frame (Gaussian only) = "
<< (tot_like / tot_t) << " over " << tot_t << " frames.";
WriteKaldiObject(mllt_accs, accs_wxfilename, binary);
KALDI_LOG << "Written accs.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/fstbin/fsts-to-transcripts.cc<|end_filename|>
// fstbin/fsts-to-transcripts.cc
// Copyright 2012-2013 Johns Hopkins University (Authors: <NAME>, <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "fstext/fstext-utils.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace fst;
typedef kaldi::int32 int32;
typedef kaldi::uint64 uint64;
const char *usage =
"Reads a table of FSTs; for each element, finds the best path and "
"prints out the\n"
"output-symbol sequence (if --output-side=true), or input-symbol "
"sequence"
"otherwise.\n"
"\n"
"Usage: fsts-to-transcripts [options] fsts-rspecifier "
"transcriptions-wspecifier\n"
" e.g.: fsts-to-transcripts ark:train.fsts ark,t:train.text\n";
ParseOptions po(usage);
bool output_side = true;
po.Register("output-side", &output_side,
"If true, extract the symbols on the output\n"
"side of the FSTs, else the input side.");
po.Read(argc, argv);
if (po.NumArgs() < 2 || po.NumArgs() > 3) {
po.PrintUsage();
exit(1);
}
std::string fst_rspecifier = po.GetArg(1),
transcript_wspecifier = po.GetOptArg(2);
SequentialTableReader<VectorFstHolder> fst_reader(fst_rspecifier);
Int32VectorWriter transcript_writer(transcript_wspecifier);
int32 n_done = 0, n_err = 0;
for (; !fst_reader.Done(); fst_reader.Next()) {
std::string key = fst_reader.Key();
const VectorFst<StdArc> &fst = fst_reader.Value();
VectorFst<StdArc> shortest_path;
ShortestPath(fst, &shortest_path); // the OpenFst algorithm ShortestPath.
if (shortest_path.NumStates() == 0) {
KALDI_WARN
<< "Input FST (after shortest path) was empty. Producing no "
<< "output for key " << key;
n_err++;
continue;
}
std::vector<int32> transcript;
bool ans;
if (output_side)
ans = fst::GetLinearSymbolSequence<StdArc, int32>(shortest_path, NULL,
&transcript, NULL);
else
ans = fst::GetLinearSymbolSequence<StdArc, int32>(
shortest_path, &transcript, NULL, NULL);
if (!ans) {
KALDI_ERR << "GetLinearSymbolSequence returned false (code error);";
}
transcript_writer.Write(key, transcript);
n_done++;
}
KALDI_LOG << "Converted " << n_done << " FSTs, " << n_err << " with errors";
return (n_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/matrix/tp-matrix.h<|end_filename|>
// matrix/tp-matrix.h
// Copyright 2009-2011 <NAME>; <NAME>; Microsoft Corporation;
// Saarland University; <NAME>; <NAME>
// 2013 Johns Hopkins Universith (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_MATRIX_TP_MATRIX_H_
#define KALDI_MATRIX_TP_MATRIX_H_
#include "matrix/packed-matrix.h"
namespace kaldi {
/// \addtogroup matrix_group
/// @{
template <typename Real>
class TpMatrix;
/// @brief Packed symetric matrix class
template <typename Real>
class TpMatrix : public PackedMatrix<Real> {
friend class CuTpMatrix<float>;
friend class CuTpMatrix<double>;
public:
TpMatrix() : PackedMatrix<Real>() {}
explicit TpMatrix(MatrixIndexT r, MatrixResizeType resize_type = kSetZero)
: PackedMatrix<Real>(r, resize_type) {}
TpMatrix(const TpMatrix<Real> &orig) : PackedMatrix<Real>(orig) {}
/// Copy constructor from CUDA TpMatrix
/// This is defined in ../cudamatrix/cu-tp-matrix.cc
explicit TpMatrix(const CuTpMatrix<Real> &cu);
template <typename OtherReal>
explicit TpMatrix(const TpMatrix<OtherReal> &orig)
: PackedMatrix<Real>(orig) {}
Real operator()(MatrixIndexT r, MatrixIndexT c) const {
if (static_cast<UnsignedMatrixIndexT>(c) >
static_cast<UnsignedMatrixIndexT>(r)) {
KALDI_ASSERT(static_cast<UnsignedMatrixIndexT>(c) <
static_cast<UnsignedMatrixIndexT>(this->num_rows_));
return 0;
}
KALDI_ASSERT(static_cast<UnsignedMatrixIndexT>(r) <
static_cast<UnsignedMatrixIndexT>(this->num_rows_));
// c<=r now so don't have to check c.
return *(this->data_ + (r * (r + 1)) / 2 + c);
// Duplicating code from PackedMatrix.h
}
Real &operator()(MatrixIndexT r, MatrixIndexT c) {
KALDI_ASSERT(static_cast<UnsignedMatrixIndexT>(r) <
static_cast<UnsignedMatrixIndexT>(this->num_rows_));
KALDI_ASSERT(static_cast<UnsignedMatrixIndexT>(c) <=
static_cast<UnsignedMatrixIndexT>(r) &&
"you cannot access the upper triangle of TpMatrix using "
"a non-const matrix object.");
return *(this->data_ + (r * (r + 1)) / 2 + c);
// Duplicating code from PackedMatrix.h
}
// Note: Cholesky may throw std::runtime_error
void Cholesky(const SpMatrix<Real> &orig);
void Invert();
// Inverts in double precision.
void InvertDouble() {
TpMatrix<double> dmat(*this);
dmat.Invert();
(*this).CopyFromTp(dmat);
}
/// Shallow swap
void Swap(TpMatrix<Real> *other);
/// Returns the determinant of the matrix (product of diagonals)
Real Determinant();
/// CopyFromMat copies the lower triangle of M into *this
/// (or the upper triangle, if Trans == kTrans).
void CopyFromMat(const MatrixBase<Real> &M,
MatrixTransposeType Trans = kNoTrans);
/// This is implemented in ../cudamatrix/cu-tp-matrix.cc
void CopyFromMat(const CuTpMatrix<Real> &other);
/// CopyFromTp copies another triangular matrix into this one.
void CopyFromTp(const TpMatrix<Real> &other) {
PackedMatrix<Real>::CopyFromPacked(other);
}
template <typename OtherReal>
void CopyFromTp(const TpMatrix<OtherReal> &other) {
PackedMatrix<Real>::CopyFromPacked(other);
}
/// AddTp does *this += alpha * M.
void AddTp(const Real alpha, const TpMatrix<Real> &M) {
this->AddPacked(alpha, M);
}
using PackedMatrix<Real>::operator=;
using PackedMatrix<Real>::Scale;
void Resize(MatrixIndexT nRows, MatrixResizeType resize_type = kSetZero) {
PackedMatrix<Real>::Resize(nRows, resize_type);
}
};
/// @} end of "addtogroup matrix_group".
} // namespace kaldi
#endif
<|start_filename|>tonic-suite/asr/src/featbin/append-feats.cc<|end_filename|>
// featbin/append-feats.cc
// Copyright 2012 <NAME> <NAME>
// Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/kaldi-matrix.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Append 2 feature-streams [and possibly change format]\n"
"Note, this is deprecated; please use paste-feats\n"
"Usage: append-feats [options] <in-rspecifier1> <in-rspecifier2> "
"<out-wspecifier>\n"
"\n"
"e.g.: append-feats --feats-offset-in1 5 --num-feats-in1 5 "
"scp:list1.scp "
"scp:list2.scp ark:-\n";
ParseOptions po(usage);
bool truncate_frames = false;
po.Register(
"truncate-frames", &truncate_frames,
"If true, do not treat it "
"as an error when files differ in number of frames, but truncate "
"the longest one.");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string rspecifier1 = po.GetArg(1);
std::string rspecifier2 = po.GetArg(2);
std::string wspecifier = po.GetArg(3);
BaseFloatMatrixWriter feats_writer(wspecifier);
SequentialBaseFloatMatrixReader feats_reader1(rspecifier1);
RandomAccessBaseFloatMatrixReader feats_reader2(rspecifier2);
int32 num_done = 0, num_err = 0;
for (; !feats_reader1.Done(); feats_reader1.Next()) {
std::string utt = feats_reader1.Key();
if (!feats_reader2.HasKey(utt)) {
KALDI_WARN << "Could not find features for " << utt << " in "
<< rspecifier2 << ": producing no output for the utterance";
num_err++;
continue;
}
const Matrix<BaseFloat> &feats1 = feats_reader1.Value();
const Matrix<BaseFloat> &feats2 = feats_reader2.Value(utt);
if (feats1.NumRows() != feats2.NumRows() && !truncate_frames) {
KALDI_WARN << "For utterance " << utt << ", features have different "
<< "#frames " << feats1.NumRows() << " vs. "
<< feats2.NumRows() << ", producing no output (use "
<< "--truncate-frames=true if you want output)";
num_err++;
continue;
}
int32 num_frames = std::min(feats1.NumRows(), feats2.NumRows()),
dim1 = feats1.NumCols(), dim2 = feats2.NumCols();
Matrix<BaseFloat> output(num_frames, dim1 + dim2, kUndefined);
output.Range(0, num_frames, 0, dim1)
.CopyFromMat(feats1.Range(0, num_frames, 0, dim1));
output.Range(0, num_frames, dim1, dim2)
.CopyFromMat(feats2.Range(0, num_frames, 0, dim2));
feats_writer.Write(utt, output);
num_done++;
}
KALDI_LOG << "Appended " << num_done << " feats; " << num_err
<< " with errors.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/featbin/process-pitch-feats.cc<|end_filename|>
// featbin/process-pitch-feats.cc
// Copyright 2013 <NAME>
// Johns Hopkins University (author: <NAME>)
//
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
namespace kaldi {
// For the probability-of-voicing features (the first element of
// each row), change p -> log ((p + 0.0001) / (1.00001 - p))
// This makes it more Gaussian; otherwise it clumps up near
// the edges of the range [0, 1].
void ProcessPovFeatures(Matrix<BaseFloat> *mat) {
int32 num_frames = mat->NumRows();
for (int32 i = 0; i < num_frames; i++) {
BaseFloat p = (*mat)(i, 0);
KALDI_ASSERT(p >= 0.0 && p <= 1.0);
(*mat)(i, 0) = log((p + 0.0001) / (1.0001 - p));
}
}
void TakeLogOfPitch(Matrix<BaseFloat> *mat) {
int32 num_frames = mat->NumRows();
for (int32 i = 0; i < num_frames; i++) {
KALDI_ASSERT((*mat)(i, 1) > 0.0);
(*mat)(i, 1) = log((*mat)(i, 1));
}
}
// Subtract the moving average over a largish window
// (e.g. 151 frames)
void SubtractMovingAverage(int32 normalization_window_size,
Matrix<BaseFloat> *mat) {
int32 num_frames = mat->NumRows();
Vector<BaseFloat> temp_pitch(num_frames);
Matrix<BaseFloat> &features = *mat;
int32 i;
for (i = 0; i < num_frames; i++) temp_pitch(i) = features(i, 1);
// Moving Window Normalization
BaseFloat mean = 0.0;
int32 mid_win = (normalization_window_size - 1) / 2;
for (i = 0; (i < num_frames) && (i < normalization_window_size); i++) {
mean += features(i, 1);
}
mean /= i;
if (num_frames <= normalization_window_size) {
for (i = 0; i < num_frames; i++) {
features(i, 1) -= mean;
}
} else {
for (i = 0; i <= mid_win; i++) {
features(i, 1) -= mean;
}
for (i = (mid_win + 1); i < num_frames; i++) {
if (i + (mid_win + 1) < num_frames)
mean -=
(temp_pitch(i - (mid_win + 1)) - temp_pitch(i + (mid_win + 1))) /
normalization_window_size;
features(i, 1) -= mean;
}
}
}
// Set to the moving average over a small window, e.g. 5 frames.
void SetToMovingAverage(int32 average_window_size, Matrix<BaseFloat> *mat) {
int32 num_frames = mat->NumRows();
Matrix<BaseFloat> &features = *mat;
Vector<BaseFloat> temp_pitch(num_frames);
int32 width = (average_window_size - 1) / 2, i;
// e.g. if average_window_size is 5, width will equal 2.
for (i = width; i < num_frames - width; i++) {
temp_pitch(i) = features(i, 1);
for (int j = 1; j <= width; ++j) {
temp_pitch(i) += (features(i - j, 1) + features(i + j, 1));
}
temp_pitch(i) /= (2 * width + 1);
}
for (i = width; i < num_frames - width; i++) features(i, 1) = temp_pitch(i);
}
} // namespace kaldi
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"This is a rather special-purpose program which processes "
"2-dimensional\n"
"features consisting of (prob-of-voicing, pitch) into something "
"suitable\n"
"to put into a speech recognizer. First use interpolate-feats\n"
"Usage: process-pitch-feats [options...] <feats-rspecifier> "
"<feats-wspecifier>\n";
// construct all the global objects
ParseOptions po(usage);
int32 normalization_window_size = 151; // should be odd number
int32 average_window_size = 5;
// Register the options
po.Register("normalization-window-size", &normalization_window_size,
"Size of window used for "
"moving window nomalization (must be odd).");
po.Register("average-window-size", &average_window_size,
"Size of moving average window (must be odd).");
// parse options (+filling the registered variables)
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
KALDI_ASSERT(
average_window_size > 0 && average_window_size % 2 == 1 &&
"--average-window-size option must be an odd positive number.");
KALDI_ASSERT(
normalization_window_size > 0 && normalization_window_size % 2 == 1 &&
"--normalization-window-size option must be an odd positive number.");
std::string input_rspecifier = po.GetArg(1);
std::string output_wspecifier = po.GetArg(2);
SequentialBaseFloatMatrixReader reader(input_rspecifier);
BaseFloatMatrixWriter kaldi_writer; // typedef to TableWriter<something>.
if (!kaldi_writer.Open(output_wspecifier))
KALDI_ERR << "Could not initialize output with wspecifier "
<< output_wspecifier;
int32 num_done = 0, num_err = 0;
for (; !reader.Done(); reader.Next()) {
std::string utt = reader.Key();
Matrix<BaseFloat> features = reader.Value();
int num_frames = features.NumRows();
if (num_frames == 0 && features.NumCols() != 2) {
KALDI_WARN << "Feature file has bad size " << features.NumRows()
<< " by " << features.NumCols();
num_err++;
continue;
}
ProcessPovFeatures(&features);
TakeLogOfPitch(&features);
SubtractMovingAverage(normalization_window_size, &features);
SetToMovingAverage(average_window_size, &features);
kaldi_writer.Write(utt, features);
num_done++;
if (num_done % 10 == 0)
KALDI_LOG << "Processed " << num_done << " utterances";
KALDI_VLOG(2) << "Processed features for key " << utt;
}
KALDI_LOG << "Done " << num_done << " utterances, " << num_err
<< " with errors.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/hmm/transition-model-test.cc<|end_filename|>
// hmm/transition-model-test.cc
// Copyright 2014 Johns Hopkins University
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "hmm/transition-model.h"
namespace kaldi {
void TestTransitionModel() {
std::vector<int32> phones;
phones.push_back(1);
for (int32 i = 2; i < 20; i++)
if (rand() % 2 == 0) phones.push_back(i);
int32 N = 2 + rand() % 2, // context-size N is 2 or 3.
P = rand() % N; // Central-phone is random on [0, N)
std::vector<int32> num_pdf_classes;
ContextDependency *ctx_dep =
GenRandContextDependencyLarge(phones, N, P, true, &num_pdf_classes);
HmmTopology topo = GetDefaultTopology(phones);
TransitionModel trans_model(*ctx_dep, topo);
delete ctx_dep; // We won't need this further.
ctx_dep = NULL;
bool binary = (rand() % 2 == 0);
std::ostringstream os;
trans_model.Write(os, binary);
TransitionModel trans_model2;
std::istringstream is2(os.str());
trans_model2.Read(is2, binary);
{
std::ostringstream os1, os2;
trans_model.Write(os1, false);
trans_model2.Write(os2, false);
KALDI_ASSERT(os1.str() == os2.str());
KALDI_ASSERT(trans_model.Compatible(trans_model2));
}
}
}
int main() {
for (int i = 0; i < 2; i++) kaldi::TestTransitionModel();
KALDI_LOG << "Test OK.\n";
}
<|start_filename|>tonic-suite/asr/src/fstbin/fstminimizeencoded.cc<|end_filename|>
// fstbin/fstminimizeencoded.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/kaldi-io.h"
#include "util/parse-options.h"
#include "util/text-utils.h"
#include "fst/fstlib.h"
#include "fstext/determinize-star.h"
#include "fstext/fstext-utils.h"
/* some test examples:
( echo "0 0 0 0"; echo "0 0" ) | fstcompile | fstminimizeencoded | fstprint
( echo "0 1 0 0"; echo " 0 2 0 0"; echo "1 0"; echo "2 0"; ) | fstcompile |
fstminimizeencoded | fstprint
*/
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace fst;
using kaldi::int32;
const char *usage =
"Minimizes FST after encoding [similar to fstminimize, but no "
"weight-pushing]\n"
"\n"
"Usage: fstminimizeencoded [in.fst [out.fst] ]\n";
float delta = kDelta;
ParseOptions po(usage);
po.Register("delta", &delta,
"Delta likelihood used for quantization of weights");
po.Read(argc, argv);
if (po.NumArgs() > 2) {
po.PrintUsage();
exit(1);
}
std::string fst_in_filename = po.GetOptArg(1),
fst_out_filename = po.GetOptArg(2);
VectorFst<StdArc> *fst = ReadFstKaldi(fst_in_filename);
MinimizeEncoded(fst, delta);
WriteFstKaldi(*fst, fst_out_filename);
delete fst;
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
return 0;
}
<|start_filename|>tonic-suite/asr/src/bin/est-pca.cc<|end_filename|>
// bin/est-pca.cc
// Copyright 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/matrix-lib.h"
int main(int argc, char *argv[]) {
using namespace kaldi;
typedef kaldi::int32 int32;
try {
const char *usage =
"Estimate PCA transform; dimension reduction is optional (if not "
"specified\n"
"we don't reduce the dimension; if you specify "
"--normalize-variance=true,\n"
"we normalize the (centered) covariance of the features, and if you "
"specify\n"
"--normalize-mean=true the mean is also normalized. So a variety of "
"transform\n"
"types are supported. Because this type of transform does not need "
"too much\n"
"data to estimate robustly, we don't support separate accumulator "
"files;\n"
"this program reads in the features directly. For large datasets you "
"may\n"
"want to subset the features (see example below)\n"
"By default the program reads in matrices (e.g. features), but with\n"
"--read-vectors=true, can read in vectors (e.g. iVectors).\n"
"\n"
"Usage: est-pca [options] (<feature-rspecifier>|<vector-rspecifier>) "
"<pca-matrix-out>\n"
"e.g.:\n"
"utils/shuffle_list.pl data/train/feats.scp | head -n 5000 | sort | "
"\\\n"
" est-pca --dim=50 scp:- some/dir/0.mat\n";
bool binary = true;
bool read_vectors = false;
bool normalize_variance = false;
bool normalize_mean = false;
int32 dim = -1;
std::string full_matrix_wxfilename;
ParseOptions po(usage);
po.Register("binary", &binary, "Write accumulators in binary mode.");
po.Register("dim", &dim,
"Feature dimension requested (if <= 0, uses full "
"feature dimension");
po.Register("read-vectors", &read_vectors,
"If true, read in single vectors "
"instead of feature matrices");
po.Register("normalize-variance", &normalize_variance,
"If true, make a "
"transform that normalizes variance to one.");
po.Register("normalize-mean", &normalize_mean,
"If true, output an affine "
"transform that subtracts the data mean.");
po.Register("write-full-matrix", &full_matrix_wxfilename,
"Write full version of the matrix to this location (including "
"rejected rows)");
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
std::string rspecifier = po.GetArg(1), pca_mat_wxfilename = po.GetArg(2);
int32 num_done = 0, num_err = 0;
int64 count = 0;
Vector<double> sum;
SpMatrix<double> sumsq;
if (!read_vectors) {
SequentialBaseFloatMatrixReader feat_reader(rspecifier);
for (; !feat_reader.Done(); feat_reader.Next()) {
Matrix<double> mat(feat_reader.Value());
if (mat.NumRows() == 0) {
KALDI_WARN << "Empty feature matrix";
num_err++;
continue;
}
if (sum.Dim() == 0) {
sum.Resize(mat.NumCols());
sumsq.Resize(mat.NumCols());
}
if (sum.Dim() != mat.NumCols()) {
KALDI_WARN << "Feature dimension mismatch " << sum.Dim() << " vs. "
<< mat.NumCols();
num_err++;
continue;
}
sum.AddRowSumMat(1.0, mat);
sumsq.AddMat2(1.0, mat, kTrans, 1.0);
count += mat.NumRows();
num_done++;
}
KALDI_LOG << "Accumulated stats from " << num_done << " feature files, "
<< num_err << " with errors; " << count << " frames.";
} else {
// read in vectors, not matrices
SequentialBaseFloatVectorReader vec_reader(rspecifier);
for (; !vec_reader.Done(); vec_reader.Next()) {
Vector<double> vec(vec_reader.Value());
if (vec.Dim() == 0) {
KALDI_WARN << "Empty input vector";
num_err++;
continue;
}
if (sum.Dim() == 0) {
sum.Resize(vec.Dim());
sumsq.Resize(vec.Dim());
}
if (sum.Dim() != vec.Dim()) {
KALDI_WARN << "Feature dimension mismatch " << sum.Dim() << " vs. "
<< vec.Dim();
num_err++;
continue;
}
sum.AddVec(1.0, vec);
sumsq.AddVec2(1.0, vec);
count += 1.0;
num_done++;
}
KALDI_LOG << "Accumulated stats from " << num_done << " vectors, "
<< num_err << " with errors.";
}
if (num_done == 0) KALDI_ERR << "No data accumulated.";
sum.Scale(1.0 / count);
sumsq.Scale(1.0 / count);
sumsq.AddVec2(-1.0, sum); // now sumsq is centered covariance.
int32 full_dim = sum.Dim();
if (dim <= 0) dim = full_dim;
if (dim > full_dim)
KALDI_ERR << "Final dimension " << dim << " is greater than feature "
<< "dimension " << full_dim;
Matrix<double> P(full_dim, full_dim);
Vector<double> s(full_dim);
sumsq.Eig(&s, &P);
SortSvd(&s, &P);
KALDI_LOG << "Eigenvalues in PCA are " << s;
KALDI_LOG << "Sum of PCA eigenvalues is " << s.Sum() << ", sum of kept "
<< "eigenvalues is " << s.Range(0, dim).Sum();
Matrix<double> transform(P, kTrans); // Transpose of P. This is what
// appears in the transform.
if (normalize_variance) {
for (int32 i = 0; i < full_dim; i++) {
double this_var = s(i), min_var = 1.0e-15;
if (this_var < min_var) {
KALDI_WARN << "--normalize-variance option: very tiny variance "
<< s(i) << "encountered, treating as " << min_var;
this_var = min_var;
}
double scale =
1.0 / sqrt(this_var); // scale on features that will make
// the variance unit.
transform.Row(i).Scale(scale);
}
}
Vector<double> offset(full_dim);
if (normalize_mean) {
offset.AddMatVec(-1.0, transform, kNoTrans, sum, 0.0);
transform.Resize(full_dim, full_dim + 1,
kCopyData); // Add column to transform.
transform.CopyColFromVec(offset, full_dim);
}
Matrix<BaseFloat> transform_float(transform);
if (full_matrix_wxfilename != "") {
WriteKaldiObject(transform_float, full_matrix_wxfilename, binary);
}
transform_float.Resize(dim, transform_float.NumCols(), kCopyData);
WriteKaldiObject(transform_float, pca_mat_wxfilename, binary);
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/feat/feature-functions.h<|end_filename|>
// feat/feature-functions.h
// Copyright 2009-2011 <NAME>; <NAME>; Microsoft Corporation
// 2014 IMSL, PKU-HKUST (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_FEAT_FEATURE_FUNCTIONS_H_
#define KALDI_FEAT_FEATURE_FUNCTIONS_H_
#include <string>
#include <vector>
#include "matrix/matrix-lib.h"
#include "util/common-utils.h"
#include "base/kaldi-error.h"
#include "feat/mel-computations.h"
namespace kaldi {
/// @addtogroup feat FeatureExtraction
/// @{
struct MelBanksOptions {
int32 num_bins; // e.g. 25; number of triangular bins
BaseFloat low_freq; // e.g. 20; lower frequency cutoff
BaseFloat high_freq; // an upper frequency cutoff; 0 -> no cutoff, negative
// ->added to the Nyquist frequency to get the cutoff.
BaseFloat vtln_low; // vtln lower cutoff of warping function.
BaseFloat
vtln_high; // vtln upper cutoff of warping function: if negative, added
// to the Nyquist frequency to get the cutoff.
bool debug_mel;
// htk_mode is a "hidden" config, it does not show up on command line.
// Enables more exact compatibibility with HTK, for testing purposes. Affects
// mel-energy flooring and reproduces a bug in HTK.
bool htk_mode;
explicit MelBanksOptions(int num_bins = 25)
: num_bins(num_bins),
low_freq(20),
high_freq(0),
vtln_low(100),
vtln_high(-500),
debug_mel(false),
htk_mode(false) {}
void Register(OptionsItf *po) {
po->Register("num-mel-bins", &num_bins,
"Number of triangular mel-frequency bins");
po->Register("low-freq", &low_freq, "Low cutoff frequency for mel bins");
po->Register(
"high-freq", &high_freq,
"High cutoff frequency for mel bins (if < 0, offset from Nyquist)");
po->Register(
"vtln-low", &vtln_low,
"Low inflection point in piecewise linear VTLN warping function");
po->Register(
"vtln-high", &vtln_high,
"High inflection point in piecewise linear VTLN warping function"
" (if negative, offset from high-mel-freq");
po->Register("debug-mel", &debug_mel,
"Print out debugging information for mel bin computation");
}
};
struct FrameExtractionOptions {
BaseFloat samp_freq;
BaseFloat frame_shift_ms; // in milliseconds.
BaseFloat frame_length_ms; // in milliseconds.
BaseFloat dither; // Amount of dithering, 0.0 means no dither.
BaseFloat preemph_coeff; // Preemphasis coefficient.
bool remove_dc_offset; // Subtract mean of wave before FFT.
std::string window_type; // e.g. Hamming window
bool round_to_power_of_two;
bool snip_edges;
// Maybe "hamming", "rectangular", "povey", "hanning"
// "povey" is a window I made to be similar to Hamming but to go to zero at
// the
// edges, it's pow((0.5 - 0.5*cos(n/N*2*pi)), 0.85)
// I just don't think the Hamming window makes sense as a windowing function.
FrameExtractionOptions()
: samp_freq(16000),
frame_shift_ms(10.0),
frame_length_ms(25.0),
dither(1.0),
preemph_coeff(0.97),
remove_dc_offset(true),
window_type("povey"),
round_to_power_of_two(true),
snip_edges(true) {}
void Register(OptionsItf *po) {
po->Register(
"sample-frequency", &samp_freq,
"Waveform data sample frequency (must match the waveform file, "
"if specified there)");
po->Register("frame-length", &frame_length_ms,
"Frame length in milliseconds");
po->Register("frame-shift", &frame_shift_ms, "Frame shift in milliseconds");
po->Register("preemphasis-coefficient", &preemph_coeff,
"Coefficient for use in signal preemphasis");
po->Register("remove-dc-offset", &remove_dc_offset,
"Subtract mean from waveform on each frame");
po->Register("dither", &dither, "Dithering constant (0.0 means no dither)");
po->Register("window-type", &window_type,
"Type of window "
"(\"hamming\"|\"hanning\"|\"povey\"|\"rectangular\")");
po->Register("round-to-power-of-two", &round_to_power_of_two,
"If true, round window size to power of two.");
po->Register(
"snip-edges", &snip_edges,
"If true, end effects will be handled by outputting only frames that "
"completely fit in the file, and the number of frames depends on the "
"frame-length. If false, the number of frames depends only on the "
"frame-shift, and we reflect the data at the ends.");
}
int32 WindowShift() const {
return static_cast<int32>(samp_freq * 0.001 * frame_shift_ms);
}
int32 WindowSize() const {
return static_cast<int32>(samp_freq * 0.001 * frame_length_ms);
}
int32 PaddedWindowSize() const {
return (round_to_power_of_two ? RoundUpToNearestPowerOfTwo(WindowSize())
: WindowSize());
}
};
struct FeatureWindowFunction {
FeatureWindowFunction() {}
explicit FeatureWindowFunction(const FrameExtractionOptions &opts);
Vector<BaseFloat> window;
};
int32 NumFrames(int32 wave_length, const FrameExtractionOptions &opts);
void Dither(VectorBase<BaseFloat> *waveform, BaseFloat dither_value);
void Preemphasize(VectorBase<BaseFloat> *waveform, BaseFloat preemph_coeff);
// ExtractWindow extracts a windowed frame of waveform with a power-of-two,
// padded size. If log_energy_pre_window != NULL, outputs the log of the
// sum-of-squared samples before preemphasis and windowing
void ExtractWindow(const VectorBase<BaseFloat> &wave,
int32 f, // with 0 <= f < NumFrames(wave.Dim(), opts)
const FrameExtractionOptions &opts,
const FeatureWindowFunction &window_function,
Vector<BaseFloat> *window,
BaseFloat *log_energy_pre_window = NULL);
// ExtractWaveformRemainder is useful if the waveform is coming in segments.
// It extracts the bit of the waveform at the end of this block that you
// would have to append the next bit of waveform to, if you wanted to have
// the same effect as everything being in one big block.
void ExtractWaveformRemainder(const VectorBase<BaseFloat> &wave,
const FrameExtractionOptions &opts,
Vector<BaseFloat> *wave_remainder);
// ComputePowerSpectrum converts a complex FFT (as produced by the FFT
// functions in matrix/matrix-functions.h), and converts it into
// a power spectrum. If the complex FFT is a vector of size n (representing
// half the complex FFT of a real signal of size n, as described there),
// this function computes in the first (n/2) + 1 elements of it, the
// energies of the fft bins from zero to the Nyquist frequency. Contents of the
// remaining (n/2) - 1 elements are undefined at output.
void ComputePowerSpectrum(VectorBase<BaseFloat> *complex_fft);
inline void MaxNormalizeEnergy(Matrix<BaseFloat> *feats) {
// Just subtract the largest energy value... assume energy is the first
// column of the mfcc features. Don't do the flooring of energy (dithering
// should prevent exact zeros).
// We didn't put this in the main MFCC computation as we wanted to make sure
// it is stateless (so we can do it bit by bit for large waveforms).
// not compatible with the order_as_htk_ option in MfccOptions.
SubMatrix<BaseFloat> energy(*feats, 0, feats->NumRows(), 0, 1);
energy.Add(-energy.Max());
}
struct DeltaFeaturesOptions {
int32 order;
int32 window; // e.g. 2; controls window size (window size is 2*window + 1)
// the behavior at the edges is to replicate the first or last frame.
// this is not configurable.
DeltaFeaturesOptions(int32 order = 2, int32 window = 2)
: order(order), window(window) {}
void Register(OptionsItf *po) {
po->Register("delta-order", &order, "Order of delta computation");
po->Register(
"delta-window", &window,
"Parameter controlling window for delta computation (actual window"
" size for each delta order is 1 + 2*delta-window-size)");
}
};
class DeltaFeatures {
public:
// This class provides a low-level function to compute delta features.
// The function takes as input a matrix of features and a frame index
// that it should compute the deltas on. It puts its output in an object
// of type VectorBase, of size (original-feature-dimension) * (opts.order+1).
// This is not the most efficient way to do the computation, but it's
// state-free and thus easier to understand
explicit DeltaFeatures(const DeltaFeaturesOptions &opts);
void Process(const MatrixBase<BaseFloat> &input_feats, int32 frame,
VectorBase<BaseFloat> *output_frame) const;
private:
DeltaFeaturesOptions opts_;
std::vector<Vector<BaseFloat> > scales_; // a scaling window for each
// of the orders, including zero: multiply the features for each
// dimension by this window.
};
struct ShiftedDeltaFeaturesOptions {
int32 window, // The time delay and advance
num_blocks,
block_shift; // Distance between consecutive blocks
ShiftedDeltaFeaturesOptions() : window(1), num_blocks(7), block_shift(3) {}
void Register(OptionsItf *po) {
po->Register("delta-window", &window, "Size of delta advance and delay.");
po->Register("num-blocks", &num_blocks,
"Number of delta blocks in advance"
" of each frame to be concatenated");
po->Register("block-shift", &block_shift, "Distance between each block");
}
};
class ShiftedDeltaFeatures {
public:
// This class provides a low-level function to compute shifted
// delta cesptra (SDC).
// The function takes as input a matrix of features and a frame index
// that it should compute the deltas on. It puts its output in an object
// of type VectorBase, of size original-feature-dimension + (1 * num_blocks).
explicit ShiftedDeltaFeatures(const ShiftedDeltaFeaturesOptions &opts);
void Process(const MatrixBase<BaseFloat> &input_feats, int32 frame,
SubVector<BaseFloat> *output_frame) const;
private:
ShiftedDeltaFeaturesOptions opts_;
Vector<BaseFloat> scales_; // a scaling window for each
};
// ComputeDeltas is a convenience function that computes deltas on a feature
// file. If you want to deal with features coming in bit by bit you would have
// to use the DeltaFeatures class directly, and do the computation frame by
// frame. Later we will have to come up with a nice mechanism to do this for
// features coming in.
void ComputeDeltas(const DeltaFeaturesOptions &delta_opts,
const MatrixBase<BaseFloat> &input_features,
Matrix<BaseFloat> *output_features);
// ComputeShiftedDeltas computes deltas from a feature file by applying
// ShiftedDeltaFeatures over the frames. This function is provided for
// convenience, however, ShiftedDeltaFeatures can be used directly.
void ComputeShiftedDeltas(const ShiftedDeltaFeaturesOptions &delta_opts,
const MatrixBase<BaseFloat> &input_features,
Matrix<BaseFloat> *output_features);
// SpliceFrames will normally be used together with LDA.
// It splices frames together to make a window. At the
// start and end of an utterance, it duplicates the first
// and last frames.
// Will throw if input features are empty.
// left_context and right_context must be nonnegative.
// these both represent a number of frames (e.g. 4, 4 is
// a good choice).
void SpliceFrames(const MatrixBase<BaseFloat> &input_features,
int32 left_context, int32 right_context,
Matrix<BaseFloat> *output_features);
// ReverseFrames reverses the frames in time (used for backwards decoding)
void ReverseFrames(const MatrixBase<BaseFloat> &input_features,
Matrix<BaseFloat> *output_features);
class MelBanks;
void GetEqualLoudnessVector(const MelBanks &mel_banks, Vector<BaseFloat> *ans);
void InitIdftBases(int32 n_bases, int32 dimension, Matrix<BaseFloat> *mat_out);
// Compute LP coefficients from autocorrelation coefficients.
BaseFloat ComputeLpc(const VectorBase<BaseFloat> &autocorr_in,
Vector<BaseFloat> *lpc_out);
// This is used for speaker-id. Also see OnlineCmnOptions in ../online2/, which
// is online CMN with no latency, for online speech recognition.
struct SlidingWindowCmnOptions {
int32 cmn_window;
int32 min_window;
bool normalize_variance;
bool center;
SlidingWindowCmnOptions()
: cmn_window(600),
min_window(100),
normalize_variance(false),
center(false) {}
void Register(OptionsItf *po) {
po->Register("cmn-window", &cmn_window,
"Window in frames for running "
"average CMN computation");
po->Register("min-cmn-window", &min_window,
"Minimum CMN window "
"used at start of decoding (adds latency only at start). "
"Only applicable if center == false, ignored if center==true");
po->Register("norm-vars", &normalize_variance,
"If true, normalize "
"variance to one."); // naming this as in apply-cmvn.cc
po->Register("center", ¢er,
"If true, use a window centered on the "
"current frame (to the extent possible, modulo end effects). "
"If false, window is to the left.");
}
void Check() const;
};
/// Applies sliding-window cepstral mean and/or variance normalization. See the
/// strings registering the options in the options class for information on how
/// this works and what the options are. input and output must have the same
/// dimension.
void SlidingWindowCmn(const SlidingWindowCmnOptions &opts,
const MatrixBase<BaseFloat> &input,
MatrixBase<BaseFloat> *output);
/// @} End of "addtogroup feat"
} // namespace kaldi
#endif // KALDI_FEAT_FEATURE_FUNCTIONS_H_
<|start_filename|>tonic-suite/asr/src/transform/fmllr-diag-gmm-test.cc<|end_filename|>
// transform/fmllr-diag-gmm-test.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "util/common-utils.h"
#include "gmm/diag-gmm.h"
#include "transform/fmllr-diag-gmm.h"
namespace kaldi {
void InitRandomGmm(DiagGmm *gmm_in) {
int32 num_gauss = 5 + rand() % 4;
int32 dim = 10 + Rand() % 10;
DiagGmm &gmm(*gmm_in);
gmm.Resize(num_gauss, dim);
Matrix<BaseFloat> inv_vars(num_gauss, dim), means(num_gauss, dim);
Vector<BaseFloat> weights(num_gauss);
for (int32 i = 0; i < num_gauss; i++) {
for (int32 j = 0; j < dim; j++) {
inv_vars(i, j) = exp(RandGauss() * (1.0 / (1 + j)));
means(i, j) = RandGauss() * (1.0 / (1 + j));
}
weights(i) = exp(RandGauss());
}
weights.Scale(1.0 / weights.Sum());
gmm.SetWeights(weights);
gmm.SetInvVarsAndMeans(inv_vars, means);
gmm.ComputeGconsts();
}
// This test is statistical and relies on some identities
// related to the Aikake criterion.
void UnitTestFmllrDiagGmm() {
using namespace kaldi;
DiagGmm gmm;
InitRandomGmm(&gmm);
int32 dim = gmm.Dim();
int32 npoints = dim * (dim + 1) * 5;
Matrix<BaseFloat> rand_points(npoints, dim);
for (int32 i = 0; i < npoints; i++) {
SubVector<BaseFloat> row(rand_points, i);
gmm.Generate(&row);
}
Matrix<BaseFloat> cur_xform(dim, dim + 1);
cur_xform.SetUnit(); // set diag to unit.
int32 niters = 5;
BaseFloat objf_change_tot = 0.0, objf_change, count;
for (int32 j = 0; j < niters; j++) {
FmllrOptions opts;
FmllrDiagGmmAccs stats(dim, j % 2 == 0 ? opts : FmllrOptions());
for (int32 i = 0; i < npoints; i++) {
SubVector<BaseFloat> row(rand_points, i);
if (j == 0) { // split this case off to exercise more of the code.
stats.AccumulateForGmm(gmm, row, 1.0);
} else {
Vector<BaseFloat> xformed_row(row);
ApplyAffineTransform(cur_xform, &xformed_row);
Vector<BaseFloat> posteriors(gmm.NumGauss());
gmm.ComponentPosteriors(xformed_row, &posteriors);
stats.AccumulateFromPosteriors(gmm, row, posteriors);
}
}
stats.Update(opts, &cur_xform, &objf_change, &count);
{ // Test for ApplyFeatureTransformToStats:
BaseFloat objf_change_tmp, count_tmp;
ApplyFeatureTransformToStats(cur_xform, &stats);
Matrix<BaseFloat> mat(dim, dim + 1);
mat.SetUnit();
stats.Update(opts, &mat, &objf_change_tmp, &count_tmp);
// After we apply this transform to the stats, there should
// be nothing to gain from further transforming the data.
KALDI_ASSERT(objf_change_tmp / count_tmp < 0.01);
}
KALDI_LOG << "Objf change on iter " << j << " is " << objf_change;
objf_change_tot += objf_change;
}
KALDI_ASSERT(ApproxEqual(count, npoints));
int32 num_params = dim * (dim + 1);
BaseFloat expected_objf_change = 0.5 * num_params;
KALDI_LOG << "Expected objf change is: not much more than "
<< expected_objf_change << ", seen: " << objf_change_tot;
KALDI_ASSERT(objf_change_tot <
2.0 * expected_objf_change); // or way too much.
// This test relies on statistical laws and if it fails it does not
// *necessarily*
// mean that something is wrong.
}
// This is a test for the diagonal update and also of
// ApplyModelTransformToStats().
void UnitTestFmllrDiagGmmDiagonal() {
using namespace kaldi;
DiagGmm gmm;
InitRandomGmm(&gmm);
int32 dim = gmm.Dim();
int32 npoints = dim * (dim + 1) * 5;
Matrix<BaseFloat> rand_points(npoints, dim);
for (int32 i = 0; i < npoints; i++) {
SubVector<BaseFloat> row(rand_points, i);
gmm.Generate(&row);
}
Matrix<BaseFloat> cur_xform(dim, dim + 1);
cur_xform.SetUnit(); // set diag to unit.
int32 niters = 2;
BaseFloat objf_change_tot = 0.0, objf_change, count;
FmllrOptions opts;
opts.update_type = "diag";
for (int32 j = 0; j < niters; j++) {
FmllrDiagGmmAccs stats(dim, j % 2 == 0 ? opts : FmllrOptions());
for (int32 i = 0; i < npoints; i++) {
SubVector<BaseFloat> row(rand_points, i);
if (j == 0) { // split this case off to exercise more of the code.
stats.AccumulateForGmm(gmm, row, 1.0);
} else {
Vector<BaseFloat> xformed_row(row);
ApplyAffineTransform(cur_xform, &xformed_row);
Vector<BaseFloat> posteriors(gmm.NumGauss());
gmm.ComponentPosteriors(xformed_row, &posteriors);
stats.AccumulateFromPosteriors(gmm, row, posteriors);
}
}
stats.Update(opts, &cur_xform, &objf_change, &count);
{ // Test for ApplyModelTransformToStats:
BaseFloat objf_change_tmp, count_tmp;
ApplyModelTransformToStats(cur_xform, &stats);
Matrix<BaseFloat> mat(dim, dim + 1);
mat.SetUnit();
stats.Update(opts, &mat, &objf_change_tmp, &count_tmp);
// After we apply this transform to the stats, there should
// be nothing to gain from further transforming the data.
KALDI_ASSERT(objf_change_tmp / count_tmp < 0.01);
}
KALDI_LOG << "Objf change on iter " << j << " is " << objf_change;
objf_change_tot += objf_change;
}
KALDI_ASSERT(ApproxEqual(count, npoints));
int32 num_params = dim * 2;
BaseFloat expected_objf_change = 0.5 * num_params;
KALDI_LOG << "Expected objf change is: not much more than "
<< expected_objf_change << ", seen: " << objf_change_tot;
KALDI_ASSERT(objf_change_tot <
2.0 * expected_objf_change); // or way too much.
// This test relies on statistical laws and if it fails it does not
// *necessarily*
// mean that something is wrong.
}
// This is a test for the offset-only update and also of
// ApplyModelTransformToStats().
void UnitTestFmllrDiagGmmOffset() {
using namespace kaldi;
DiagGmm gmm;
InitRandomGmm(&gmm);
int32 dim = gmm.Dim();
int32 npoints = dim * (dim + 1) * 5;
Matrix<BaseFloat> rand_points(npoints, dim);
for (int32 i = 0; i < npoints; i++) {
SubVector<BaseFloat> row(rand_points, i);
gmm.Generate(&row);
}
Matrix<BaseFloat> cur_xform(dim, dim + 1);
cur_xform.SetUnit(); // set diag to unit.
int32 niters = 2;
BaseFloat objf_change_tot = 0.0, objf_change, count;
FmllrOptions opts;
opts.update_type = "offset";
for (int32 j = 0; j < niters; j++) {
FmllrDiagGmmAccs stats(dim, j % 2 == 0 ? opts : FmllrOptions());
for (int32 i = 0; i < npoints; i++) {
SubVector<BaseFloat> row(rand_points, i);
if (j == 0) { // split this case off to exercise more of the code.
stats.AccumulateForGmm(gmm, row, 1.0);
} else {
Vector<BaseFloat> xformed_row(row);
ApplyAffineTransform(cur_xform, &xformed_row);
Vector<BaseFloat> posteriors(gmm.NumGauss());
gmm.ComponentPosteriors(xformed_row, &posteriors);
stats.AccumulateFromPosteriors(gmm, row, posteriors);
}
}
stats.Update(opts, &cur_xform, &objf_change, &count);
{ // Test for ApplyModelTransformToStats:
BaseFloat objf_change_tmp, count_tmp;
ApplyModelTransformToStats(cur_xform, &stats);
Matrix<BaseFloat> mat(dim, dim + 1);
mat.SetUnit();
stats.Update(opts, &mat, &objf_change_tmp, &count_tmp);
// After we apply this transform to the stats, there should
// be nothing to gain from further transforming the data.
KALDI_ASSERT(objf_change_tmp / count_tmp < 0.01);
}
KALDI_LOG << "Objf change on iter " << j << " is " << objf_change;
objf_change_tot += objf_change;
}
KALDI_ASSERT(ApproxEqual(count, npoints));
int32 num_params = dim;
BaseFloat expected_objf_change = 0.5 * num_params;
KALDI_LOG << "Expected objf change is: not much more than "
<< expected_objf_change << ", seen: " << objf_change_tot;
KALDI_ASSERT(objf_change_tot <
2.0 * expected_objf_change); // or way too much.
// This test relies on statistical laws and if it fails it does not
// *necessarily*
// mean that something is wrong.
}
} // namespace kaldi ends here
int main() {
for (int i = 0; i < 2; i++) { // did more iterations when first testing...
kaldi::UnitTestFmllrDiagGmmOffset();
kaldi::UnitTestFmllrDiagGmmDiagonal();
kaldi::UnitTestFmllrDiagGmm();
}
std::cout << "Test OK.\n";
}
<|start_filename|>tonic-suite/nlp/src/SENNA_utils.cpp<|end_filename|>
#include "SENNA_utils.h"
static int is_verbose = 0;
static void buffer_reverse_memory(void *ptr_, int block_size, int n_blocks);
static int is_little_endian_cpu();
void SENNA_error(const char *fmt, ...) {
va_list args;
fprintf(stderr, "FATAL ERROR: ");
va_start(args, fmt);
vfprintf(stderr, fmt, args);
fprintf(stderr, "\n");
va_end(args);
exit(-1);
}
void SENNA_message(const char *fmt, ...) {
va_list args;
if (is_verbose) {
fprintf(stderr, "[");
va_start(args, fmt);
vfprintf(stderr, fmt, args);
fprintf(stderr, "]\n");
va_end(args);
}
}
void SENNA_set_verbose_mode(int verbose) { is_verbose = verbose; }
FILE *SENNA_fopen(const char *path, const char *subpath, const char *mode) {
FILE *f;
char *complete_path = NULL;
if (!path && !subpath)
SENNA_error("SENNA_fopen: path or subpath should be non NULL");
if (path && subpath) {
int pathsize = strlen(path);
int subpathsize = strlen(subpath);
complete_path = SENNA_malloc(sizeof(char), pathsize + subpathsize + 1);
strcpy(complete_path, path);
strcpy(complete_path + pathsize, subpath);
}
f = fopen((complete_path ? complete_path : (path ? path : subpath)), mode);
if (!f)
SENNA_error("unable to open file <%s%s>", (path ? path : ""),
(subpath ? subpath : ""));
if (sizeof(char) != 1)
SENNA_error("char size is not 1, sorry can't load binary files");
if (sizeof(int) != 4)
SENNA_error("int size is not 4, sorry can't load binary files");
if (sizeof(float) != 4)
SENNA_error("float size is not 1, sorry can't load binary files");
SENNA_free(complete_path);
return f;
}
void SENNA_fseek(FILE *stream, long offset, int whence) {
if (fseek(stream, offset, whence)) SENNA_error("unable to seek into a file");
}
long SENNA_ftell(FILE *stream) {
long res = ftell(stream);
if (res == -1) SENNA_error("unable to tell where we are in a file");
return res;
}
void SENNA_fread(void *ptr, size_t size, size_t nmemb, FILE *stream) {
size_t res = fread(ptr, size, nmemb, stream);
if (res != nmemb)
SENNA_error("read error: read %ld elements instead of %ld (of size %ld)",
res, nmemb, size);
if (size > 1 && !is_little_endian_cpu())
buffer_reverse_memory(ptr, size, nmemb);
}
void SENNA_fread_tensor_1d(float **ptr, int *n_row, FILE *stream) {
SENNA_fread(n_row, sizeof(int), 1, stream);
*ptr = SENNA_malloc(sizeof(float), *n_row);
SENNA_fread(*ptr, sizeof(float), *n_row, stream);
}
void SENNA_fread_tensor_2d(float **ptr, int *n_row, int *n_column,
FILE *stream) {
SENNA_fread(n_row, sizeof(int), 1, stream);
SENNA_fread(n_column, sizeof(int), 1, stream);
*ptr = SENNA_malloc(sizeof(float), (*n_row) * (*n_column));
SENNA_fread(*ptr, sizeof(float), (*n_row) * (*n_column), stream);
}
char *SENNA_fgetline(char *str, int size, FILE *stream) {
int str_size;
if (fgets(str, size, stream)) {
str_size = strlen(str);
if ((str_size > 0) && (str[str_size - 1] == '\n')) str[str_size - 1] = '\0';
return str;
} else
return NULL;
}
void SENNA_fclose(FILE *stream) { fclose(stream); }
void *SENNA_malloc(size_t size, size_t nitems) {
void *res = malloc(size * nitems);
if (!res)
SENNA_error("memory allocation error [%ldGB] -- buy new RAM", size << 30);
return res;
}
void *SENNA_realloc(void *ptr, size_t size, size_t nitems) {
ptr = realloc(ptr, size * nitems);
if (!ptr)
SENNA_error("memory allocation error [%ldGB] -- buy new RAM", size << 30);
return ptr;
}
void SENNA_free(void *ptr) { free(ptr); }
static void buffer_reverse_memory(void *ptr_, int block_size, int n_blocks) {
char *ptr;
char *ptrr;
char *ptrw;
int i, j;
char *buffer_block;
if (block_size == 1) return;
ptr = (char *)ptr_;
buffer_block = SENNA_malloc(sizeof(char), block_size);
for (i = 0; i < n_blocks; i++) {
ptrr = ptr + ((i + 1) * block_size);
ptrw = buffer_block;
for (j = 0; j < block_size; j++) {
ptrr--;
*ptrw++ = *ptrr;
}
ptrr = buffer_block;
ptrw = ptr + (i * block_size);
for (j = 0; j < block_size; j++) *ptrw++ = *ptrr++;
}
SENNA_free(buffer_block);
}
static int is_little_endian_cpu() {
int x = 7;
char *ptr = (char *)&x;
if (ptr[0] == 0)
return 0;
else
return 1;
}
void SENNA_print_tensor_1d(float *tensor, int nrow) {
int r;
printf("\n---\n");
for (r = 0; r < nrow; r++) printf("%f ", tensor[r]);
printf("[Tensor of size %d]\n", nrow);
}
void SENNA_print_tensor_2d(float *tensor, int nrow, int ncolumn) {
int c, r;
printf("\n---\n");
for (c = 0; c < ncolumn; c++) {
for (r = 0; r < nrow; r++) printf("%f ", tensor[c * nrow + r]);
printf("\n---\n");
}
printf("[Tensor of size %dx%d]\n", nrow, ncolumn);
}
<|start_filename|>tonic-suite/asr/src/tree/cluster-utils.h<|end_filename|>
// tree/cluster-utils.h
// Copyright 2012 <NAME>
// Copyright 2009-2011 Microsoft Corporation; Saarland University
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_TREE_CLUSTER_UTILS_H_
#define KALDI_TREE_CLUSTER_UTILS_H_
#include <vector>
#include "matrix/matrix-lib.h"
#include "itf/clusterable-itf.h"
namespace kaldi {
/// \addtogroup clustering_group_simple
/// @{
/// Returns the total objective function after adding up all the
/// statistics in the vector (pointers may be NULL).
BaseFloat SumClusterableObjf(const std::vector<Clusterable *> &vec);
/// Returns the total normalizer (usually count) of the cluster (pointers may be
/// NULL).
BaseFloat SumClusterableNormalizer(const std::vector<Clusterable *> &vec);
/// Sums stats (ptrs may be NULL). Returns NULL if no non-NULL stats present.
Clusterable *SumClusterable(const std::vector<Clusterable *> &vec);
/** Fills in any (NULL) holes in "stats" vector, with empty stats, because
* certain algorithms require non-NULL stats. If "stats" nonempty, requires it
* to contain at least one non-NULL pointer that we can call Copy() on.
*/
void EnsureClusterableVectorNotNull(std::vector<Clusterable *> *stats);
/** Given stats and a vector "assignments" of the same size (that maps to
* cluster indices), sums the stats up into "clusters." It will add to any
* stats already present in "clusters" (although typically "clusters" will be
* empty when called), and it will extend with NULL pointers for any unseen
* indices. Call EnsureClusterableStatsNotNull afterwards if you want to ensure
* all non-NULL clusters. Pointer in "clusters" are owned by caller. Pointers in
* "stats" do not have to be non-NULL.
*/
void AddToClusters(const std::vector<Clusterable *> &stats,
const std::vector<int32> &assignments,
std::vector<Clusterable *> *clusters);
/// AddToClustersOptimized does the same as AddToClusters (it sums up the stats
/// within each cluster, except it uses the sum of all the stats ("total") to
/// optimize the computation for speed, if possible. This will generally only
/// be
/// a significant speedup in the case where there are just two clusters, which
/// can happen in algorithms that are doing binary splits; the idea is that we
/// sum up all the stats in one cluster (the one with the fewest points in it),
/// and then subtract from the total.
void AddToClustersOptimized(const std::vector<Clusterable *> &stats,
const std::vector<int32> &assignments,
const Clusterable &total,
std::vector<Clusterable *> *clusters);
/// @} end "addtogroup clustering_group_simple"
/// \addtogroup clustering_group_algo
/// @{
// Note, in the algorithms below, it is assumed that the input "points" (which
// is std::vector<Clusterable*>) is all non-NULL.
/** A bottom-up clustering algorithm. There are two parameters that control how
* many clusters we get: a "max_merge_thresh" which is a threshold for merging
* clusters, and a min_clust which puts a floor on the number of clusters we
*want. Set
* max_merge_thresh = large to use the min_clust only, or min_clust to 0 to use
* the max_merge_thresh only.
*
* The algorithm is:
* \code
* while (num-clusters > min_clust && smallest_merge_cost <=
*max_merge_thresh)
* merge the closest two clusters.
* \endcode
*
* @param points [in] Points to be clustered (may not contain NULL pointers)
* @param thresh [in] Threshold on cost change from merging clusters; clusters
* won't be merged if the cost is more than this
* @param min_clust [in] Minimum number of clusters desired; we'll stop merging
* after reaching this number.
* @param clusters_out [out] If non-NULL, will be set to a vector of size equal
* to the number of output clusters, containing the clustered
* statistics. Must be empty when called.
* @param assignments_out [out] If non-NULL, will be resized to the number of
* points, and each element is the index of the cluster that
*point
* was assigned to.
* @return Returns the total objf change relative to all clusters being
*separate, which is
* a negative. Note that this is not the same as what the other clustering
*algorithms return.
*/
BaseFloat ClusterBottomUp(const std::vector<Clusterable *> &points,
BaseFloat thresh, int32 min_clust,
std::vector<Clusterable *> *clusters_out,
std::vector<int32> *assignments_out);
/** This is a bottom-up clustering where the points are pre-clustered in a set
* of compartments, such that only points in the same compartment are clustered
* together. The compartment and pair of points with the smallest merge cost
* is selected and the points are clustered. The result stays in the same
* compartment. The code does not merge compartments, and hence assumes that
* the number of compartments is smaller than the 'min_clust' option.
* The clusters in "clusters_out" are newly allocated and owned by the caller.
*/
BaseFloat ClusterBottomUpCompartmentalized(
const std::vector<std::vector<Clusterable *> > &points, BaseFloat thresh,
int32 min_clust, std::vector<std::vector<Clusterable *> > *clusters_out,
std::vector<std::vector<int32> > *assignments_out);
struct RefineClustersOptions {
int32 num_iters; // must be >= 0. If zero, does nothing.
int32 top_n; // must be >= 2.
RefineClustersOptions() : num_iters(100), top_n(5) {}
RefineClustersOptions(int32 num_iters_in, int32 top_n_in)
: num_iters(num_iters_in), top_n(top_n_in) {}
// include Write and Read functions because this object gets written/read as
// part of the QuestionsForKeyOptions class.
void Write(std::ostream &os, bool binary) const;
void Read(std::istream &is, bool binary);
};
/** RefineClusters is mainly used internally by other clustering algorithms.
*
* It starts with a given assignment of points to clusters and
* keeps trying to improve it by moving points from cluster to cluster, up to
* a maximum number of iterations.
*
* "clusters" and "assignments" are both input and output variables, and so
* both MUST be non-NULL.
*
* "top_n" (>=2) is a pruning value: more is more exact, fewer is faster. The
* algorithm initially finds the "top_n" closest clusters to any given point,
* and from that point only consider move to those "top_n" clusters. Since
* RefineClusters is called multiple times from ClusterKMeans (for instance),
* this is not really a limitation.
*/
BaseFloat RefineClusters(const std::vector<Clusterable *> &points,
std::vector<Clusterable *> *clusters /*non-NULL*/,
std::vector<int32> *assignments /*non-NULL*/,
RefineClustersOptions cfg = RefineClustersOptions());
struct ClusterKMeansOptions {
RefineClustersOptions refine_cfg;
int32 num_iters;
int32 num_tries; // if >1, try whole procedure >once and pick best.
bool verbose;
ClusterKMeansOptions()
: refine_cfg(), num_iters(20), num_tries(2), verbose(true) {}
};
/** ClusterKMeans is a K-means-like clustering algorithm. It starts with
* pseudo-random initialization of points to clusters and uses RefineClusters
* to iteratively improve the cluster assignments. It does this for
* multiple iterations and picks the result with the best objective function.
*
*
* ClusterKMeans implicitly uses Rand(). It will not necessarily return
* the same value on different calls. Use sRand() if you want consistent
* results.
* The algorithm used in ClusterKMeans is a "k-means-like" algorithm that tries
* to be as efficient as possible. Firstly, since the algorithm it uses
* includes random initialization, it tries the whole thing cfg.num_tries times
* and picks the one with the best objective function. Each try, it does as
* follows: it randomly initializes points to clusters, and then for
* cfg.num_iters iterations it calls RefineClusters(). The options to
* RefineClusters() are given by cfg.refine_cfg. Calling RefineClusters once
* will always be at least as good as doing one iteration of reassigning points
*to
* clusters, but will generally be quite a bit better (without taking too
* much extra time).
*
* @param points [in] points to be clustered (must be all non-NULL).
* @param num_clust [in] number of clusters requested (it will always return
*exactly
* this many, or will fail if num_clust > points.size()).
* @param clusters_out [out] may be NULL; if non-NULL, should be empty when
*called.
* Will be set to a vector of statistics corresponding to the output
*clusters.
* @param assignments_out [out] may be NULL; if non-NULL, will be set to a
*vector of
* same size as "points", which says for each point which cluster
* it is assigned to.
* @param cfg [in] configuration class specifying options to the algorithm.
* @return Returns the objective function improvement versus everything being
* in the same cluster.
*
*/
BaseFloat ClusterKMeans(
const std::vector<Clusterable *> &points,
int32 num_clust, // exact number of clusters
std::vector<Clusterable *> *clusters_out, // may be NULL
std::vector<int32> *assignments_out, // may be NULL
ClusterKMeansOptions cfg = ClusterKMeansOptions());
struct TreeClusterOptions {
ClusterKMeansOptions kmeans_cfg;
int32 branch_factor;
BaseFloat
thresh; // Objf change: if >0, may be used to control number of leaves.
TreeClusterOptions() : kmeans_cfg(), branch_factor(2), thresh(0) {
kmeans_cfg.verbose = false;
}
};
/** TreeCluster is a top-down clustering algorithm, using a binary tree (not
* necessarily balanced). Returns objf improvement versus having all points
* in one cluster. The algorithm is:
* - Initialize to 1 cluster (tree with 1 node).
* - Maintain, for each cluster, a "best-binary-split" (using ClusterKMeans
* to do so). Always split the highest scoring cluster, until we can do no
* more splits.
*
* @param points [in] Data points to be clustered
* @param max_clust [in] Maximum number of clusters (you will get exactly this
*number,
* if there are at least this many points, except if you set the
* cfg.thresh value nonzero, in which case that threshold may
*limit
* the number of clusters.
* @param clusters_out [out] If non-NULL, will be set to the a vector whose
*first
* (*num_leaves_out) elements are the leaf clusters, and whose
* subsequent elements are the nonleaf nodes in the tree, in
* topological order with the root node last. Must be empty
*vector
* when this function is called.
* @param assignments_out [out] If non-NULL, will be set to a vector to a
*vector the
* same size as "points", where assignments[i] is the leaf node
*index i
* to which the i'th point gets clustered.
* @param clust_assignments_out [out] If non-NULL, will be set to a vector the
*same size
* as clusters_out which says for each node (leaf or nonleaf),
*the
* index of its parent. For the root node (which is last),
* assignments_out[i] == i. For each i, assignments_out[i]>=i,
*i.e.
* any node's parent is higher numbered than itself. If you
*don't need
* this information, consider using instead the ClusterTopDown
*function.
* @param num_leaves_out [out] If non-NULL, will be set to the number of leaf
*nodes
* in the tree.
* @param cfg [in] Configuration object that controls clustering behavior. Most
* important value is "thresh", which provides an alternative
*mechanism
* [other than max_clust] to limit the number of leaves.
*/
BaseFloat TreeCluster(const std::vector<Clusterable *> &points,
int32 max_clust, // max number of leaf-level clusters.
std::vector<Clusterable *> *clusters_out,
std::vector<int32> *assignments_out,
std::vector<int32> *clust_assignments_out,
int32 *num_leaves_out,
TreeClusterOptions cfg = TreeClusterOptions());
/**
* A clustering algorithm that internally uses TreeCluster,
* but does not give you the information about the structure of the tree.
* The "clusters_out" and "assignments_out" may be NULL if the outputs are not
* needed.
*
* @param points [in] points to be clustered (must be all non-NULL).
* @param max_clust [in] Maximum number of clusters (you will get exactly this
*number,
* if there are at least this many points, except if you set the
* cfg.thresh value nonzero, in which case that threshold may
*limit
* the number of clusters.
* @param clusters_out [out] may be NULL; if non-NULL, should be empty when
*called.
* Will be set to a vector of statistics corresponding to the output
*clusters.
* @param assignments_out [out] may be NULL; if non-NULL, will be set to a
*vector of
* same size as "points", which says for each point which cluster
* it is assigned to.
* @param cfg [in] Configuration object that controls clustering behavior. Most
* important value is "thresh", which provides an alternative
*mechanism
* [other than max_clust] to limit the number of leaves.
*/
BaseFloat ClusterTopDown(const std::vector<Clusterable *> &points,
int32 max_clust, // max number of clusters.
std::vector<Clusterable *> *clusters_out,
std::vector<int32> *assignments_out,
TreeClusterOptions cfg = TreeClusterOptions());
/// @} end of "addtogroup clustering_group_algo"
} // end namespace kaldi.
#endif // KALDI_TREE_CLUSTER_UTILS_H_
<|start_filename|>tonic-suite/asr/src/matrix/matrix-lib.h<|end_filename|>
// matrix/matrix-lib.h
// Copyright 2009-2011 <NAME>; Microsoft Corporation; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
// Include everything from this directory.
// These files include other stuff that we need.
#ifndef KALDI_MATRIX_MATRIX_LIB_H_
#define KALDI_MATRIX_MATRIX_LIB_H_
#include "matrix/cblas-wrappers.h"
#include "base/kaldi-common.h"
#include "matrix/kaldi-vector.h"
#include "matrix/kaldi-matrix.h"
#include "matrix/sp-matrix.h"
#include "matrix/tp-matrix.h"
#include "matrix/matrix-functions.h"
#include "matrix/srfft.h"
#include "matrix/compressed-matrix.h"
#include "matrix/optimization.h"
#endif
<|start_filename|>tonic-suite/asr/src/ivectorbin/ivector-mean.cc<|end_filename|>
// ivectorbin/ivector-mean.cc
// Copyright 2013-2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
int main(int argc, char *argv[]) {
using namespace kaldi;
typedef kaldi::int32 int32;
try {
const char *usage =
"With 3 or 4 arguments, averages iVectors over all the\n"
"utterances of each speaker using the spk2utt file.\n"
"Input the spk2utt file and a set of iVectors indexed by\n"
"utterance; output is iVectors indexed by speaker. If 4\n"
"arguments are given, extra argument is a table for the number\n"
"of utterances per speaker (can be useful for PLDA). If 2\n"
"arguments are given, computes the mean of all input files and\n"
"writes out the mean vector.\n"
"\n"
"Usage: ivector-mean <spk2utt-rspecifier> <ivector-rspecifier> "
"<ivector-wspecifier> [<num-utt-wspecifier>]\n"
"or: ivector-mean <ivector-rspecifier> <mean-wxfilename>\n"
"e.g.: ivector-mean data/spk2utt exp/ivectors.ark exp/spk_ivectors.ark "
"exp/spk_num_utts.ark\n"
"or: ivector-mean exp/ivectors.ark exp/mean.vec\n"
"See also: ivector-subtract-global-mean\n";
ParseOptions po(usage);
bool binary_write = false;
po.Register("binary", &binary_write,
"If true, write output in binary "
"(only applicable when writing files, not archives/tables.");
po.Read(argc, argv);
if (po.NumArgs() < 2 || po.NumArgs() > 4) {
po.PrintUsage();
exit(1);
}
if (po.NumArgs() == 2) {
// Compute the mean of the input vectors and write it out.
std::string ivector_rspecifier = po.GetArg(1),
mean_wxfilename = po.GetArg(2);
int32 num_done = 0;
SequentialBaseFloatVectorReader ivector_reader(ivector_rspecifier);
Vector<double> sum;
for (; !ivector_reader.Done(); ivector_reader.Next()) {
if (sum.Dim() == 0) sum.Resize(ivector_reader.Value().Dim());
sum.AddVec(1.0, ivector_reader.Value());
num_done++;
}
if (num_done == 0) {
KALDI_ERR << "No iVectors read";
} else {
sum.Scale(1.0 / num_done);
WriteKaldiObject(sum, mean_wxfilename, binary_write);
return 0;
}
} else {
std::string spk2utt_rspecifier = po.GetArg(1),
ivector_rspecifier = po.GetArg(2),
ivector_wspecifier = po.GetArg(3),
num_utts_wspecifier = po.GetOptArg(4);
double spk_sumsq = 0.0;
Vector<double> spk_sum;
int64 num_spk_done = 0, num_spk_err = 0, num_utt_done = 0,
num_utt_err = 0;
RandomAccessBaseFloatVectorReader ivector_reader(ivector_rspecifier);
SequentialTokenVectorReader spk2utt_reader(spk2utt_rspecifier);
BaseFloatVectorWriter ivector_writer(ivector_wspecifier);
Int32Writer num_utts_writer(num_utts_wspecifier);
for (; !spk2utt_reader.Done(); spk2utt_reader.Next()) {
std::string spk = spk2utt_reader.Key();
const std::vector<std::string> &uttlist = spk2utt_reader.Value();
if (uttlist.empty()) {
KALDI_ERR << "Speaker with no utterances.";
}
Vector<BaseFloat> spk_mean;
int32 utt_count = 0;
for (size_t i = 0; i < uttlist.size(); i++) {
std::string utt = uttlist[i];
if (!ivector_reader.HasKey(utt)) {
KALDI_WARN << "No iVector present in input for utterance " << utt;
num_utt_err++;
} else {
if (utt_count == 0) {
spk_mean = ivector_reader.Value(utt);
} else {
spk_mean.AddVec(1.0, ivector_reader.Value(utt));
}
num_utt_done++;
utt_count++;
}
}
if (utt_count == 0) {
KALDI_WARN << "Not producing output for speaker " << spk
<< " since no utterances had iVectors";
num_spk_err++;
} else {
spk_mean.Scale(1.0 / utt_count);
ivector_writer.Write(spk, spk_mean);
if (num_utts_wspecifier != "") num_utts_writer.Write(spk, utt_count);
num_spk_done++;
spk_sumsq += VecVec(spk_mean, spk_mean);
if (spk_sum.Dim() == 0) spk_sum.Resize(spk_mean.Dim());
spk_sum.AddVec(1.0, spk_mean);
}
}
KALDI_LOG << "Computed mean of " << num_spk_done << " speakers ("
<< num_spk_err << " with no utterances), consisting of "
<< num_utt_done << " utterances (" << num_utt_err
<< " absent from input).";
if (num_spk_done != 0) {
spk_sumsq /= num_spk_done;
spk_sum.Scale(1.0 / num_spk_done);
double mean_length = spk_sum.Norm(2.0), spk_length = sqrt(spk_sumsq),
norm_spk_length = spk_length / sqrt(spk_sum.Dim());
KALDI_LOG << "Norm of mean of speakers is " << mean_length
<< ", root-mean-square speaker-iVector length divided by "
<< "sqrt(dim) is " << norm_spk_length;
}
return (num_spk_done != 0 ? 0 : 1);
}
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/featbin/remove-mean.cc<|end_filename|>
// featbin/remove-mean.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "feat/feature-functions.h"
#include "matrix/kaldi-matrix.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Remove mean from each feature file\n"
" [ for per-speaker normalization, use add-cmvn-stats and apply-cmvn "
"]\n"
"Usage: remove-mean [options] in-rspecifier out-wspecifier\n";
ParseOptions po(usage);
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
std::string rspecifier = po.GetArg(1);
std::string wspecifier = po.GetArg(2);
BaseFloatMatrixWriter feat_writer(wspecifier);
SequentialBaseFloatMatrixReader feat_reader(rspecifier);
for (; !feat_reader.Done(); feat_reader.Next()) {
std::string key = feat_reader.Key();
Matrix<BaseFloat> feats(feat_reader.Value());
if (feats.NumRows() == 0) {
KALDI_WARN << "Empty feature matrix for key " << key;
continue;
}
Vector<BaseFloat> mean(feats.NumCols());
mean.AddRowSumMat(1.0, feats);
mean.Scale(1.0 / feats.NumRows());
for (int32 i = 0; i < feats.NumRows(); i++)
feats.Row(i).AddVec(-1.0, mean);
feat_writer.Write(key, feats);
}
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/sgmm2/estimate-am-sgmm2-ebw.h<|end_filename|>
// sgmm2/estimate-am-sgmm2-ebw.h
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_SGMM2_ESTIMATE_AM_SGMM2_EBW_H_
#define KALDI_SGMM2_ESTIMATE_AM_SGMM2_EBW_H_ 1
#include <string>
#include <vector>
#include "gmm/model-common.h"
#include "itf/options-itf.h"
#include "sgmm2/estimate-am-sgmm2.h"
namespace kaldi {
/**
This header implements a form of Extended Baum-Welch training for SGMMs.
If you are confused by this comment, see Dan Povey's thesis for an
explanation of
Extended Baum-Welch.
A note on the EBW (Extended Baum-Welch) updates for the SGMMs... In general
there is
a parameter-specific value D that is similar to the D in EBW for GMMs. The
value of
D is generally set to:
E * (denominator-count for that parameter) + tau-value for that
parameter
where the tau-values are user-specified parameters that are specific to the
type of
the parameter (e.g. phonetic vector, subspace projection, etc.). Things are
a bit
more complex for this update than for GMMs, because it's not just a question
of picking
a tau-value for smoothing: there is sometimes a scatter-matrix of some kind
(e.g.
an outer product of vectors, or something) that defines a quadratic objective
function
that we'll add as smoothing. We have to pick where to get this
scatter-matrix from.
We feel that it's appropriate for the "E" part of the D to get its
scatter-matrix from
denominator stats, and the tau part of the D to get half its scatter-matrix
from the
both the numerator and denominator stats, assigned a weight proportional to
how much
stats there were. When you see the auxiliary function written out, it's
clear why this
makes sense.
*/
struct EbwAmSgmm2Options {
BaseFloat
tau_v; ///< Smoothing constant for updates of sub-state vectors v_{jm}
BaseFloat lrate_v; ///< Learning rate used in updating v-- default 0.5
BaseFloat tau_M; ///< Smoothing constant for the M quantities
///(phone-subspace projections)
BaseFloat lrate_M; ///< Learning rate used in updating M-- default 0.5
BaseFloat tau_N; ///< Smoothing constant for the N quantities
///(speaker-subspace projections)
BaseFloat lrate_N; ///< Learning rate used in updating N-- default 0.5
BaseFloat tau_c; ///< Tau value for smoothing substate weights (c)
BaseFloat tau_w; ///< Tau value for smoothing update of phonetic-subspace
///weight projectsions (w)
BaseFloat lrate_w; ///< Learning rate used in updating w-- default 1.0
BaseFloat tau_u; ///< Tau value for smoothing update of speaker-subspace
///weight projectsions (u)
BaseFloat lrate_u; ///< Learning rate used in updating u-- default 1.0
BaseFloat max_impr_u; ///< Maximum improvement/frame allowed for u [0.25,
///carried over from ML update.]
BaseFloat tau_Sigma; ///< Tau value for smoothing covariance-matrices Sigma.
BaseFloat
lrate_Sigma; ///< Learning rate used in updating Sigma-- default 0.5
BaseFloat min_substate_weight; ///< Minimum allowed weight in a sub-state.
BaseFloat
cov_min_value; ///< E.g. 0.5-- the maximum any eigenvalue of a covariance
/// is allowed to change. [this is the minimum; the maximum is the inverse of
/// this,
/// i.e. 2.0 in this case. For example, 0.9 would constrain the covariance
/// quite tightly,
/// 0.1 would be a loose setting.
BaseFloat max_cond; ///< large value used in SolveQuadraticProblem.
BaseFloat
epsilon; ///< very small value used in SolveQuadraticProblem; workaround
/// for an issue in some implementations of SVD.
EbwAmSgmm2Options() {
tau_v = 50.0;
lrate_v = 0.5;
tau_M = 500.0;
lrate_M = 0.5;
tau_N = 500.0;
lrate_N = 0.5;
tau_c = 10.0;
tau_w = 50.0;
lrate_w = 1.0;
tau_u = 50.0;
lrate_u = 1.0;
max_impr_u = 0.25;
tau_Sigma = 500.0;
lrate_Sigma = 0.5;
min_substate_weight = 1.0e-05;
cov_min_value = 0.5;
max_cond = 1.0e+05;
epsilon = 1.0e-40;
}
void Register(OptionsItf *po) {
std::string module = "EbwAmSgmm2Options: ";
po->Register("tau-v", &tau_v,
module + "Smoothing constant for phone vector estimation.");
po->Register(
"lrate-v", &lrate_v,
module + "Learning rate constant for phone vector estimation.");
po->Register("tau-m", &tau_M, module +
"Smoothing constant for estimation of "
"phonetic-subspace projections (M).");
po->Register(
"lrate-m", &lrate_M,
module + "Learning rate constant for phonetic-subspace projections.");
po->Register("tau-n", &tau_N, module +
"Smoothing constant for estimation of "
"speaker-subspace projections (N).");
po->Register(
"lrate-n", &lrate_N,
module + "Learning rate constant for speaker-subspace projections.");
po->Register(
"tau-c", &tau_c,
module + "Smoothing constant for estimation of substate weights (c)");
po->Register("tau-w", &tau_w, module +
"Smoothing constant for estimation of "
"phonetic-space weight projections (w)");
po->Register(
"lrate-w", &lrate_w,
module +
"Learning rate constant for phonetic-space weight-projections (w)");
po->Register("tau-u", &tau_u, module +
"Smoothing constant for estimation of "
"speaker-space weight projections (u)");
po->Register(
"lrate-u", &lrate_u,
module +
"Learning rate constant for speaker-space weight-projections (u)");
po->Register("tau-sigma", &tau_Sigma, module +
"Smoothing constant for "
"estimation of within-class "
"covariances (Sigma)");
po->Register("lrate-sigma", &lrate_Sigma, module +
"Constant that controls "
"speed of learning for "
"variances (larger->slower)");
po->Register("cov-min-value", &cov_min_value,
module +
"Minimum value that an eigenvalue of the updated "
"covariance matrix can take, "
"relative to its old value (maximum is inverse of this.)");
po->Register("min-substate-weight", &min_substate_weight,
module + "Floor for weights of sub-states.");
po->Register(
"max-cond", &max_cond,
module + "Value used in handling singular matrices during update.");
po->Register(
"epsilon", &max_cond,
module + "Value used in handling singular matrices during update.");
}
};
/** \class EbwAmSgmmUpdater
* Contains the functions needed to update the SGMM parameters.
*/
class EbwAmSgmm2Updater {
public:
explicit EbwAmSgmm2Updater(const EbwAmSgmm2Options &options)
: options_(options) {}
void Update(const MleAmSgmm2Accs &num_accs, const MleAmSgmm2Accs &den_accs,
AmSgmm2 *model, SgmmUpdateFlagsType flags,
BaseFloat *auxf_change_out, BaseFloat *count_out);
protected:
// The following two classes relate to multi-core parallelization of some
// phases of the update.
friend class EbwUpdateWClass;
friend class EbwUpdatePhoneVectorsClass;
private:
EbwAmSgmm2Options options_;
Vector<double> gamma_j_; ///< State occupancies
double UpdatePhoneVectors(const MleAmSgmm2Accs &num_accs,
const MleAmSgmm2Accs &den_accs,
const std::vector<SpMatrix<double> > &H,
AmSgmm2 *model) const;
// Called from UpdatePhoneVectors; updates a subset of states
// (relates to multi-threading).
void UpdatePhoneVectorsInternal(const MleAmSgmm2Accs &num_accs,
const MleAmSgmm2Accs &den_accs,
const std::vector<SpMatrix<double> > &H,
AmSgmm2 *model, double *auxf_impr,
int32 num_threads, int32 thread_id) const;
// Called from UpdatePhoneVectorsInternal
static void ComputePhoneVecStats(const MleAmSgmm2Accs &accs,
const AmSgmm2 &model,
const std::vector<SpMatrix<double> > &H,
int32 j1, int32 m,
const Vector<double> &w_jm, double gamma_jm,
Vector<double> *g_jm,
SpMatrix<double> *H_jm);
double UpdateM(const MleAmSgmm2Accs &num_accs, const MleAmSgmm2Accs &den_accs,
const std::vector<SpMatrix<double> > &Q_num,
const std::vector<SpMatrix<double> > &Q_den,
const Vector<double> &gamma_num,
const Vector<double> &gamma_den, AmSgmm2 *model) const;
double UpdateN(const MleAmSgmm2Accs &num_accs, const MleAmSgmm2Accs &den_accs,
const Vector<double> &gamma_num,
const Vector<double> &gamma_den, AmSgmm2 *model) const;
double UpdateVars(const MleAmSgmm2Accs &num_accs,
const MleAmSgmm2Accs &den_accs,
const Vector<double> &gamma_num,
const Vector<double> &gamma_den,
const std::vector<SpMatrix<double> > &S_means,
AmSgmm2 *model) const;
/// Note: in the discriminative case we do just one iteration of
/// updating the w quantities.
double UpdateW(const MleAmSgmm2Accs &num_accs, const MleAmSgmm2Accs &den_accs,
const Vector<double> &gamma_num,
const Vector<double> &gamma_den, AmSgmm2 *model);
double UpdateU(const MleAmSgmm2Accs &num_accs, const MleAmSgmm2Accs &den_accs,
const Vector<double> &gamma_num,
const Vector<double> &gamma_den, AmSgmm2 *model);
double UpdateSubstateWeights(const MleAmSgmm2Accs &num_accs,
const MleAmSgmm2Accs &den_accs, AmSgmm2 *model);
KALDI_DISALLOW_COPY_AND_ASSIGN(EbwAmSgmm2Updater);
EbwAmSgmm2Updater() {} // Prevent unconfigured updater.
};
} // namespace kaldi
#endif // KALDI_SGMM_ESTIMATE_AM_SGMM_EBW_H_
<|start_filename|>tonic-suite/asr/src/nnet2/train-nnet-perturbed.cc<|end_filename|>
// nnet2/train-nnet-perturbed.cc
// Copyright 2012-2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet2/train-nnet-perturbed.h"
#include "nnet2/nnet-update.h"
#include "thread/kaldi-thread.h"
namespace kaldi {
namespace nnet2 {
class NnetPerturbedUpdater {
public:
// Note: in the case of training with SGD, "nnet" and "nnet_to_update" will be
// identical. They'd be different if we're accumulating the gradient for a
// held-out set and don't want to update the model, but this shouldn't happen
// for this "perturbed" update. nnet_to_update may be NULL if you don't
// want do do backprop, but this probably doesn't make sense.
// num_layers_before_input is the number of layers to ignore before what
// we consider to be the input (x) for purposes of this technique. This will
// likely equal 2: one for the feature-splicing layer (SpliceComponent) and
// one for the preconditioning layer (FixedAffineComponent). The
// within_class_covar
// argument (within_class_covar)
//
// within_class_covar is the within-class covariance matrix
NnetPerturbedUpdater(const Nnet &nnet, int32 num_layers_before_input,
const CuMatrix<BaseFloat> &within_class_covar,
Nnet *nnet_to_update);
// This function does the entire forward and backward computation for this
// minbatch. Outputs to tot_objf_orig and tot_objf_perturbed the total
// objective function (including any weighting factors) over this minibatch,
// and the same after perturbing the data.
void ComputeForMinibatch(const std::vector<NnetExample> &data, BaseFloat D,
double *tot_objf_orig, double *tot_objf_perturbed);
protected:
/// takes the input and formats as a single matrix, in forward_data_[0].
void FormatInput(const std::vector<NnetExample> &data);
/// Do the forward propagation for layers 0 ... num_layers_before_input_ - 1,
/// typically the first two layers. This will be called once per minibatch.
void PropagateInitial() { Propagate(0, num_layers_before_input_); }
/// Do the forward propagation for layers num_layers_before_input_
/// ... num-layers-1, typically all but the first two layers. This will be
/// called twice per minibatch, once before and once after perturbing the
/// inputs.
void PropagateRemaining() {
Propagate(num_layers_before_input_, nnet_.NumComponents());
}
/// Internal Propagate function, does the forward computation for
/// layers begin_layer ... end_layer - 1.
void Propagate(int32 begin_layer, int32 end_layer);
/// Computes objective function and derivative at output layer, but does not
/// do the backprop [for that, see Backprop()]. This will be called twice per
/// minibatch, once before and once after perturbing the inputs.
void ComputeObjfAndDeriv(
const std::vector<MatrixElement<BaseFloat> > &sv_labels,
CuMatrix<BaseFloat> *deriv, BaseFloat *tot_objf,
BaseFloat *tot_weight) const;
/// Computes supervision labels from data.
void ComputeSupervisionLabels(
const std::vector<NnetExample> &data,
std::vector<MatrixElement<BaseFloat> > *sv_labels);
/// Backprop must be called after ComputeObjfAndDeriv (it will be called
/// twice, the first time with a NULL nnet_to_update pointer). It does the
/// backpropagation (not including the first num_layers_before_input_ layers).
/// "nnet_to_update" is updated, if non-NULL. Note: "deriv" will contain, at
/// input, the derivative w.r.t. the output layer (as computed by
/// ComputeObjfAndDeriv), but will be used as a temporary variable by this
/// function, and exit, will contain the derivative of the objective function
/// w.r.t. the input of layer num_layers_before_input_.
void Backprop(Nnet *nnet_to_update, CuMatrix<BaseFloat> *deriv) const;
/// Perturb the input features (actually, the features at the input of layer
/// num_layers_before_input_). This modifies the value of
/// forward_data_[num_layers_before_input_]. For the math, see \ref
/// train-nnet-perturbed.h
void PerturbInput(const CuMatrix<BaseFloat> &deriv_at_input, BaseFloat D);
private:
const Nnet &nnet_;
Nnet *nnet_to_update_;
int32 num_layers_before_input_; // Number of layers before whichever layer we
// regard as the input for purposes of this
// method (normally 2, to include splicing
// layer and preconditioning layer)
const CuMatrix<BaseFloat> &within_class_covar_;
int32 num_chunks_; // same as the minibatch size.
std::vector<CuMatrix<BaseFloat> > forward_data_; // The forward data
// for the outputs of each of the components.
};
NnetPerturbedUpdater::NnetPerturbedUpdater(
const Nnet &nnet, int32 num_layers_before_input,
const CuMatrix<BaseFloat> &within_class_covar, Nnet *nnet_to_update)
: nnet_(nnet),
nnet_to_update_(nnet_to_update),
num_layers_before_input_(num_layers_before_input),
within_class_covar_(within_class_covar) {
KALDI_ASSERT(num_layers_before_input_ >= 0 &&
num_layers_before_input < nnet.NumComponents());
for (int32 c = 0; c < num_layers_before_input_; c++) {
const Component *comp = &(nnet.GetComponent(c));
const UpdatableComponent *uc =
dynamic_cast<const UpdatableComponent *>(comp);
if (uc != NULL) {
KALDI_ERR << "One of the pre-input layers is updatable.";
}
}
}
void NnetPerturbedUpdater::PerturbInput(
const CuMatrix<BaseFloat> &deriv_at_input, BaseFloat D) {
// The code doesn't handle the case where there is further splicing after the
// input.
KALDI_ASSERT(num_chunks_ == deriv_at_input.NumRows());
// For the math, see train-nnet-perturbed.h.
// deriv_at_input is \nabla in the math.
// "input" is the input features, currently unmodified, but we'll
// modify them.
CuMatrix<BaseFloat> &input(forward_data_[num_layers_before_input_]);
KALDI_ASSERT(SameDim(input, deriv_at_input));
// Each row of deriv_w will equal (W nabla_t)', where ' is transpose.
CuMatrix<BaseFloat> deriv_w(input.NumRows(), input.NumCols());
// note: for the second transpose-ness argument below we can choose either
// kTrans or kNoTrans because the matrix is symmetric. I'm guessing that
// kTrans will be faster.
deriv_w.AddMatMat(1.0, deriv_at_input, kNoTrans, within_class_covar_, kTrans,
0.0);
// k will be used to compute and store the gradient-scaling factor k_t.
CuVector<BaseFloat> k(deriv_at_input.NumRows());
// after the next call, each element of k will contain (\nabla_t^T W \nabla_t)
// We want k_t = D / sqrt(\nabla_t^T W \nabla_t)
// so we need to take this to the power -0.5.
// We can't do this if it's zero, so we first floor to a very small value.
k.AddDiagMatMat(1.0, deriv_w, kNoTrans, deriv_at_input, kTrans, 0.0);
int32 num_floored = k.ApplyFloor(1.0e-20);
if (num_floored > 0.0) {
// Should only happen at the very start of training,
KALDI_WARN << num_floored << " gradients floored (derivative at input was "
<< "close to zero).. should only happen at start of training "
<< "or when adding a new layer.";
}
k.ApplyPow(-0.5);
// now we have k_t = 1.0 / sqrt(\nabla_t^T W \nabla_t).
// in the math, k_t contains an additional factor of D, but we'll
// add this later.
// Below, we will do x'_t = x_t - k_t W \nabla_t
// Here, each row of deriv_w contains the transpose of W \nabla_t.
// The factor of D is because it was missing in k.
input.AddDiagVecMat(-1.0 * D, k, deriv_w, kNoTrans, 1.0);
}
void NnetPerturbedUpdater::ComputeForMinibatch(
const std::vector<NnetExample> &data, BaseFloat D, double *tot_objf_orig,
double *tot_objf_perturbed) {
FormatInput(data);
PropagateInitial();
PropagateRemaining();
CuMatrix<BaseFloat> tmp_deriv;
std::vector<MatrixElement<BaseFloat> > sv_labels;
ComputeSupervisionLabels(data, &sv_labels);
BaseFloat tot_objf, tot_weight;
ComputeObjfAndDeriv(sv_labels, &tmp_deriv, &tot_objf, &tot_weight);
KALDI_VLOG(4) << "Objective function (original) is "
<< (tot_objf / tot_weight) << " per sample, over " << tot_weight
<< " samples (weighted).";
*tot_objf_orig = tot_objf;
// only backprops till layer number num_layers_before_input_,
// and derivative at that layer is in tmp_deriv.
Backprop(NULL, &tmp_deriv);
// perturb forward_data_[num_layers_before_input_].
PerturbInput(tmp_deriv, D);
// Now propagate forward again from that point.
PropagateRemaining();
ComputeObjfAndDeriv(sv_labels, &tmp_deriv, &tot_objf, &tot_weight);
KALDI_VLOG(4) << "Objective function (perturbed) is "
<< (tot_objf / tot_weight) << " per sample, over " << tot_weight
<< " samples (weighted).";
*tot_objf_perturbed = tot_objf;
// The actual model updating would happen in the next call.
if (nnet_to_update_ != NULL) Backprop(nnet_to_update_, &tmp_deriv);
}
void NnetPerturbedUpdater::Propagate(int32 begin_layer, int32 end_layer) {
static int32 num_times_printed = 0;
for (int32 c = begin_layer; c < end_layer; c++) {
const Component &component = nnet_.GetComponent(c);
const CuMatrix<BaseFloat> &input = forward_data_[c];
CuMatrix<BaseFloat> &output = forward_data_[c + 1];
// Note: the Propagate function will automatically resize the
// output.
component.Propagate(input, num_chunks_, &output);
KALDI_VLOG(4) << "Propagating: sum at output of " << c << " is "
<< output.Sum();
// If we won't need the output of the previous layer for
// backprop, delete it to save memory.
bool need_last_output =
(c > 0 && nnet_.GetComponent(c - 1).BackpropNeedsOutput()) ||
component.BackpropNeedsInput();
if (g_kaldi_verbose_level >= 3 && num_times_printed < 100) {
KALDI_VLOG(3)
<< "Stddev of data for component " << c << " for this minibatch is "
<< (TraceMatMat(forward_data_[c], forward_data_[c], kTrans) /
(forward_data_[c].NumRows() * forward_data_[c].NumCols()));
num_times_printed++;
}
if (!need_last_output && c != num_layers_before_input_)
forward_data_[c].Resize(0, 0); // We won't need this data.
}
}
void NnetPerturbedUpdater::ComputeSupervisionLabels(
const std::vector<NnetExample> &data,
std::vector<MatrixElement<BaseFloat> > *sv_labels) {
sv_labels->clear();
sv_labels->reserve(num_chunks_); // We must have at least this many labels.
for (int32 m = 0; m < num_chunks_; m++) {
for (size_t i = 0; i < data[m].labels.size(); i++) {
MatrixElement<BaseFloat> tmp = {m, data[m].labels[i].first,
data[m].labels[i].second};
sv_labels->push_back(tmp);
}
}
}
void NnetPerturbedUpdater::ComputeObjfAndDeriv(
const std::vector<MatrixElement<BaseFloat> > &sv_labels,
CuMatrix<BaseFloat> *deriv, BaseFloat *tot_objf,
BaseFloat *tot_weight) const {
int32 num_components = nnet_.NumComponents();
deriv->Resize(num_chunks_, nnet_.OutputDim()); // sets to zero.
const CuMatrix<BaseFloat> &output(forward_data_[num_components]);
KALDI_ASSERT(SameDim(output, *deriv));
deriv->CompObjfAndDeriv(sv_labels, output, tot_objf, tot_weight);
}
void NnetPerturbedUpdater::Backprop(Nnet *nnet_to_update,
CuMatrix<BaseFloat> *deriv) const {
// We assume ComputeObjfAndDeriv has already been called.
for (int32 c = nnet_.NumComponents() - 1; c >= num_layers_before_input_;
c--) {
const Component &component = nnet_.GetComponent(c);
Component *component_to_update =
(nnet_to_update == NULL ? NULL : &(nnet_to_update->GetComponent(c)));
const CuMatrix<BaseFloat> &input = forward_data_[c],
&output = forward_data_[c + 1];
CuMatrix<BaseFloat> input_deriv(input.NumRows(), input.NumCols());
const CuMatrix<BaseFloat> &output_deriv(*deriv);
component.Backprop(input, output, output_deriv, num_chunks_,
component_to_update, &input_deriv);
input_deriv.Swap(deriv);
}
}
void NnetPerturbedUpdater::FormatInput(const std::vector<NnetExample> &data) {
KALDI_ASSERT(data.size() > 0);
int32 num_splice = nnet_.LeftContext() + 1 + nnet_.RightContext();
KALDI_ASSERT(data[0].input_frames.NumRows() >= num_splice);
int32 feat_dim = data[0].input_frames.NumCols(),
spk_dim = data[0].spk_info.Dim(),
tot_dim = feat_dim + spk_dim; // we append these at the neural net
// input... note, spk_dim might be 0.
KALDI_ASSERT(tot_dim == nnet_.InputDim());
KALDI_ASSERT(data[0].left_context >= nnet_.LeftContext());
int32 ignore_frames = data[0].left_context - nnet_.LeftContext(); // If
// the NnetExample has more left-context than we need, ignore some.
// this may happen in settings where we increase the amount of context during
// training, e.g. by adding layers that require more context.
num_chunks_ = data.size();
forward_data_.resize(nnet_.NumComponents() + 1);
// First copy to a single matrix on the CPU, so we can copy to
// GPU with a single copy command.
Matrix<BaseFloat> temp_forward_data(num_splice * num_chunks_, tot_dim);
for (int32 chunk = 0; chunk < num_chunks_; chunk++) {
SubMatrix<BaseFloat> dest(temp_forward_data, chunk * num_splice, num_splice,
0, feat_dim);
Matrix<BaseFloat> full_src(data[chunk].input_frames);
SubMatrix<BaseFloat> src(full_src, ignore_frames, num_splice, 0, feat_dim);
dest.CopyFromMat(src);
if (spk_dim != 0) {
SubMatrix<BaseFloat> spk_dest(temp_forward_data, chunk * num_splice,
num_splice, feat_dim, spk_dim);
spk_dest.CopyRowsFromVec(data[chunk].spk_info);
}
}
forward_data_[0].Swap(&temp_forward_data); // Copy to GPU, if being used.
}
void DoBackpropPerturbed(const Nnet &nnet, int32 num_layers_before_input,
const CuMatrix<BaseFloat> &within_class_covar,
BaseFloat D, const std::vector<NnetExample> &examples,
Nnet *nnet_to_update, double *tot_objf_orig,
double *tot_objf_perturbed) {
try {
NnetPerturbedUpdater updater(nnet, num_layers_before_input,
within_class_covar, nnet_to_update);
updater.ComputeForMinibatch(examples, D, tot_objf_orig, tot_objf_perturbed);
} catch (...) {
KALDI_LOG << "Error doing backprop, nnet info is: " << nnet.Info();
throw;
}
}
NnetPerturbedTrainer::NnetPerturbedTrainer(
const NnetPerturbedTrainerConfig &config,
const SpMatrix<BaseFloat> &within_class_covar, Nnet *nnet)
: config_(config),
nnet_(nnet),
logprob_this_phase_(0.0),
logprob_perturbed_this_phase_(0.0),
weight_this_phase_(0.0),
logprob_total_(0.0),
logprob_perturbed_total_(0.0),
weight_total_(0.0),
D_(config.initial_d) {
InitWithinClassCovar(within_class_covar);
num_phases_ = 0;
bool first_time = true;
BeginNewPhase(first_time);
}
// This function is used in class NnetPerturbedTrainer
// and the function DoBackpropPerturbedParallel.
void InitWithinClassCovar(const SpMatrix<BaseFloat> &within_class_covar,
const Nnet &nnet, int32 *num_layers_before_input,
CuMatrix<BaseFloat> *within_class_covar_out) {
CuSpMatrix<BaseFloat> orig_covar(within_class_covar);
*num_layers_before_input = 0;
KALDI_ASSERT(nnet.NumComponents() > *num_layers_before_input);
const Component *comp = &(nnet.GetComponent(*num_layers_before_input));
// Skip over any SpliceComponent that appears at the beginning of
// the network.
if (dynamic_cast<const SpliceComponent *>(comp) != NULL)
(*num_layers_before_input)++;
KALDI_ASSERT(nnet.NumComponents() > *num_layers_before_input);
comp = &(nnet.GetComponent(*num_layers_before_input));
const FixedAffineComponent *fa =
dynamic_cast<const FixedAffineComponent *>(comp);
if (fa != NULL) {
(*num_layers_before_input)++;
const CuMatrix<BaseFloat> &linear_params = fa->LinearParams();
if (linear_params.NumCols() != orig_covar.NumCols()) {
KALDI_ERR << "The neural network seems to expect a (spliced) feature "
<< "dimension of " << linear_params.NumCols() << ", but your "
<< "LDA stats have a dimension of " << orig_covar.NumCols();
}
CuMatrix<BaseFloat> temp(linear_params.NumRows(), orig_covar.NumRows());
// temp = linear_params . orig_covar
temp.AddMatSp(1.0, linear_params, kNoTrans, orig_covar, 0.0);
within_class_covar_out->Resize(linear_params.NumRows(),
linear_params.NumRows());
// temp = linear_params . orig_covar . linear_params^T
within_class_covar_out->AddMatMat(1.0, temp, kNoTrans, linear_params,
kTrans, 0.0);
// note: this should be symmetric, spot-test it like this:
KALDI_ASSERT(ApproxEqual(
TraceMatMat(*within_class_covar_out, *within_class_covar_out, kNoTrans),
TraceMatMat(*within_class_covar_out, *within_class_covar_out, kTrans)));
} else {
if (comp->InputDim() != orig_covar.NumCols()) {
KALDI_ERR << "The neural network seems to expect a (spliced) feature "
<< "dimension of " << comp->InputDim() << ", but your "
<< "LDA stats have a dimension of " << orig_covar.NumCols();
}
within_class_covar_out->Resize(orig_covar.NumRows(), orig_covar.NumCols());
within_class_covar_out->CopyFromSp(orig_covar);
}
}
void NnetPerturbedTrainer::InitWithinClassCovar(
const SpMatrix<BaseFloat> &within_class_covar) {
kaldi::nnet2::InitWithinClassCovar(within_class_covar, *nnet_,
&num_layers_before_input_,
&within_class_covar_);
}
void NnetPerturbedTrainer::TrainOnExample(const NnetExample &value) {
buffer_.push_back(value);
if (static_cast<int32>(buffer_.size()) == config_.minibatch_size)
TrainOneMinibatch();
}
void NnetPerturbedTrainer::TrainOneMinibatch() {
KALDI_ASSERT(!buffer_.empty());
double tot_objf_orig, tot_objf_perturbed;
DoBackpropPerturbed(*nnet_, num_layers_before_input_, within_class_covar_, D_,
buffer_, nnet_, &tot_objf_orig, &tot_objf_perturbed);
logprob_this_phase_ += tot_objf_orig;
logprob_perturbed_this_phase_ += tot_objf_perturbed;
double weight = TotalNnetTrainingWeight(buffer_);
UpdateD(tot_objf_orig / weight, tot_objf_perturbed / weight);
weight_this_phase_ += weight;
buffer_.clear();
minibatches_seen_this_phase_++;
if (minibatches_seen_this_phase_ == config_.minibatches_per_phase) {
bool first_time = false;
BeginNewPhase(first_time);
}
}
void NnetPerturbedTrainer::UpdateD(BaseFloat orig_objf_per_example,
BaseFloat perturbed_objf_per_example) {
BaseFloat diff = orig_objf_per_example - perturbed_objf_per_example;
// note: diff should be positive in the normal case.
KALDI_ASSERT(config_.target_objf_change > 0.0 && config_.max_d_factor > 1.0);
BaseFloat objf_ratio =
config_.target_objf_change / std::max<BaseFloat>(1.0e-20, diff),
D_ratio = pow(objf_ratio, config_.tune_d_power);
if (D_ratio > config_.max_d_factor)
D_ratio = config_.max_d_factor;
else if (D_ratio < 1.0 / config_.max_d_factor)
D_ratio = 1.0 / config_.max_d_factor;
BaseFloat D_new = D_ * D_ratio;
KALDI_VLOG(3) << "Training objective function normal/perturbed is "
<< orig_objf_per_example << '/' << perturbed_objf_per_example
<< ", diff " << diff << " vs. target "
<< config_.target_objf_change << ", changing D by factor "
<< D_ratio << " to " << D_new;
D_ = D_new;
}
void NnetPerturbedTrainer::BeginNewPhase(bool first_time) {
if (!first_time) {
BaseFloat logprob = logprob_this_phase_ / weight_this_phase_,
logprob_perturbed =
logprob_perturbed_this_phase_ / weight_this_phase_,
diff = logprob - logprob_perturbed;
KALDI_LOG << "Training objective function normal->perturbed is " << logprob
<< " -> " << logprob_perturbed << ", diff " << diff
<< " vs. target " << config_.target_objf_change << ", over "
<< weight_this_phase_ << " frames, D is " << D_;
}
logprob_total_ += logprob_this_phase_;
logprob_perturbed_total_ += logprob_perturbed_this_phase_;
weight_total_ += weight_this_phase_;
logprob_this_phase_ = 0.0;
logprob_perturbed_this_phase_ = 0.0;
weight_this_phase_ = 0.0;
minibatches_seen_this_phase_ = 0;
num_phases_++;
}
NnetPerturbedTrainer::~NnetPerturbedTrainer() {
if (!buffer_.empty()) {
KALDI_LOG << "Doing partial minibatch of size " << buffer_.size();
TrainOneMinibatch();
if (minibatches_seen_this_phase_ != 0) {
bool first_time = false;
BeginNewPhase(first_time);
}
}
if (weight_total_ == 0.0) {
KALDI_WARN << "No data seen.";
} else {
KALDI_LOG << "Did backprop on " << weight_total_
<< " examples, average log-prob normal->perturbed per frame is "
<< (logprob_total_ / weight_total_) << " -> "
<< (logprob_perturbed_total_ / weight_total_);
KALDI_LOG << "[this line is to be parsed by a script:] log-prob-per-frame="
<< (logprob_total_ / weight_total_);
}
}
// compare with DoBackpropParallelClass
class TrainParallelPerturbedClass : public MultiThreadable {
public:
// This constructor is only called for a temporary object
// that we pass to the RunMultiThreaded function.
TrainParallelPerturbedClass(const NnetPerturbedTrainerConfig &config,
const CuMatrix<BaseFloat> &within_class_covar,
int32 num_layers_before_input, BaseFloat *D,
Nnet *nnet, ExamplesRepository *repository,
double *log_prob_orig_ptr,
double *log_prob_perturbed_ptr,
double *tot_weight_ptr)
: config_(config),
within_class_covar_(within_class_covar),
num_layers_before_input_(num_layers_before_input),
D_(D),
nnet_(nnet),
repository_(repository),
log_prob_orig_ptr_(log_prob_orig_ptr),
log_prob_perturbed_ptr_(log_prob_perturbed_ptr),
tot_weight_ptr_(tot_weight_ptr),
log_prob_orig_(0.0),
log_prob_perturbed_(0.0),
tot_weight_(0.0) {}
// Use the default copy constructor.
// This does the main function of the class.
void operator()() {
std::vector<NnetExample> examples;
while (repository_->ProvideExamples(&examples)) {
double objf_orig, objf_perturbed,
weight = TotalNnetTrainingWeight(examples);
DoBackpropPerturbed(*nnet_, num_layers_before_input_, within_class_covar_,
*D_, examples, nnet_, &objf_orig, &objf_perturbed);
UpdateD(objf_orig / weight, objf_perturbed / weight);
tot_weight_ += weight;
log_prob_orig_ += objf_orig;
log_prob_perturbed_ += objf_perturbed;
KALDI_VLOG(4) << "Thread " << thread_id_ << " saw " << tot_weight_
<< " frames so far (weighted); likelihood "
<< "per frame (orig->perturbed) so far is "
<< (log_prob_orig_ / tot_weight_) << " -> "
<< (log_prob_perturbed_ / tot_weight_);
examples.clear();
}
}
~TrainParallelPerturbedClass() {
*log_prob_orig_ptr_ += log_prob_orig_;
*log_prob_perturbed_ptr_ += log_prob_perturbed_;
*tot_weight_ptr_ += tot_weight_;
}
private:
void UpdateD(BaseFloat orig_logprob, BaseFloat perturbed_logprob) {
BaseFloat diff = orig_logprob - perturbed_logprob;
// note: diff should be positive in the normal case.
KALDI_ASSERT(config_.target_objf_change > 0.0 &&
config_.max_d_factor > 1.0);
// divide the power we raise the ratio to when tuning D, by the
// number of threads; this should ensure stability of the update.
BaseFloat tune_d_power = config_.tune_d_power / g_num_threads;
BaseFloat objf_ratio = config_.target_objf_change /
std::max<BaseFloat>(1.0e-20, diff),
D_ratio = pow(objf_ratio, tune_d_power);
if (D_ratio > config_.max_d_factor)
D_ratio = config_.max_d_factor;
else if (D_ratio < 1.0 / config_.max_d_factor)
D_ratio = 1.0 / config_.max_d_factor;
BaseFloat D_new = (*D_) * D_ratio;
*D_ = D_new;
// Note: we are accessing *D_ from multiple threads without
// locking, but the negative consequences of this contention are
// very small (
KALDI_VLOG(3) << "Training objective function normal->perturbed is "
<< orig_logprob << " -> " << perturbed_logprob << ", diff "
<< diff << " vs. target " << config_.target_objf_change
<< ", changing D by factor " << D_ratio << " to " << D_new;
}
const NnetPerturbedTrainerConfig &config_;
const CuMatrix<BaseFloat> &within_class_covar_;
int32 num_layers_before_input_;
BaseFloat *D_; // Constant D that controls how much to perturb the data. We
// update this as well as use it.
Nnet *nnet_;
ExamplesRepository *repository_;
double *log_prob_orig_ptr_;
double *log_prob_perturbed_ptr_;
double *tot_weight_ptr_;
double
log_prob_orig_; // log-like times num frames (before perturbing features)
double log_prob_perturbed_; // log-like times num frames (after perturbing
// features)
double tot_weight_; // normalizing factor for the above.
};
void DoBackpropPerturbedParallel(const NnetPerturbedTrainerConfig &config,
const SpMatrix<BaseFloat> &within_class_covar,
SequentialNnetExampleReader *example_reader,
double *tot_objf_orig,
double *tot_objf_perturbed, double *tot_weight,
Nnet *nnet) {
// within_class_covar_processed is the within-class covar as CuMatrix,
// possibly
// projected by the preconditioning transform in any FixedAffineComponent.
CuMatrix<BaseFloat> within_class_covar_processed;
int32 num_layers_before_input;
InitWithinClassCovar(within_class_covar, *nnet, &num_layers_before_input,
&within_class_covar_processed);
BaseFloat D = config.initial_d;
ExamplesRepository
repository; // handles parallel programming issues regarding
*tot_objf_orig = *tot_objf_perturbed = *tot_weight = 0.0;
TrainParallelPerturbedClass trainer_proto(
config, within_class_covar_processed, num_layers_before_input, &D, nnet,
&repository, tot_objf_orig, tot_objf_perturbed, tot_weight);
{
// The initialization of the following class spawns the threads that
// process the examples. They get re-joined in its destructor.
MultiThreader<TrainParallelPerturbedClass> m(g_num_threads, trainer_proto);
std::vector<NnetExample> examples;
for (; !example_reader->Done(); example_reader->Next()) {
examples.push_back(example_reader->Value());
if (examples.size() == config.minibatch_size)
repository.AcceptExamples(&examples);
}
if (!examples.empty()) // partial minibatch.
repository.AcceptExamples(&examples);
// Here, the destructor of "m" re-joins the threads, and
// does the summing of the gradients if we're doing gradient
// computation (i.e. &nnet != nnet_to_update). This gets
// done in the destructors of the objects of type
// DoBackpropParallelClass.
repository.ExamplesDone();
}
KALDI_LOG << "Did backprop on " << *tot_weight
<< " examples, average log-prob "
<< "per frame (orig->perturbed) is "
<< (*tot_objf_orig / *tot_weight) << " -> "
<< (*tot_objf_perturbed / *tot_weight) << " over " << *tot_weight
<< " samples (weighted).";
KALDI_LOG << "[this line is to be parsed by a script:] log-prob-per-frame="
<< (*tot_objf_orig / *tot_weight);
}
} // namespace nnet2
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/bin/matrix-sum.cc<|end_filename|>
// bin/matrix-sum.cc
// Copyright 2012-2014 Johns Hopkins University (author: <NAME>)
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/kaldi-matrix.h"
namespace kaldi {
// sums a bunch of archives to produce one archive
// for back-compatibility with an older form, we support scaling
// of the first two input archives.
int32 TypeOneUsage(const ParseOptions &po, BaseFloat scale1, BaseFloat scale2) {
int32 num_args = po.NumArgs();
std::string matrix_in_fn1 = po.GetArg(1), matrix_out_fn = po.GetArg(num_args);
// Output matrix
BaseFloatMatrixWriter matrix_writer(matrix_out_fn);
// Input matrices
SequentialBaseFloatMatrixReader matrix_reader1(matrix_in_fn1);
std::vector<RandomAccessBaseFloatMatrixReader *> matrix_readers(
num_args - 2, static_cast<RandomAccessBaseFloatMatrixReader *>(NULL));
std::vector<std::string> matrix_in_fns(num_args - 2);
for (int32 i = 2; i < num_args; ++i) {
matrix_readers[i - 2] = new RandomAccessBaseFloatMatrixReader(po.GetArg(i));
matrix_in_fns[i - 2] = po.GetArg(i);
}
int32 n_utts = 0, n_total_matrices = 0, n_success = 0, n_missing = 0,
n_other_errors = 0;
for (; !matrix_reader1.Done(); matrix_reader1.Next()) {
std::string key = matrix_reader1.Key();
Matrix<BaseFloat> matrix1 = matrix_reader1.Value();
matrix_reader1.FreeCurrent();
n_utts++;
n_total_matrices++;
matrix1.Scale(scale1);
Matrix<BaseFloat> matrix_out(matrix1);
for (int32 i = 0; i < num_args - 2; ++i) {
if (matrix_readers[i]->HasKey(key)) {
Matrix<BaseFloat> matrix2 = matrix_readers[i]->Value(key);
n_total_matrices++;
if (SameDim(matrix2, matrix_out)) {
BaseFloat scale = (i == 0 ? scale2 : 1.0);
// note: i == 0 corresponds to the 2nd input archive.
matrix_out.AddMat(scale, matrix2, kNoTrans);
} else {
KALDI_WARN << "Dimension mismatch for utterance " << key << " : "
<< matrix2.NumRows() << " by " << matrix2.NumCols()
<< " for "
<< "system " << (i + 2)
<< ", rspecifier: " << matrix_in_fns[i] << " vs "
<< matrix_out.NumRows() << " by " << matrix_out.NumCols()
<< " primary matrix, rspecifier:" << matrix_in_fn1;
n_other_errors++;
}
} else {
KALDI_WARN << "No matrix found for utterance " << key << " for "
<< "system " << (i + 2)
<< ", rspecifier: " << matrix_in_fns[i];
n_missing++;
}
}
matrix_writer.Write(key, matrix_out);
n_success++;
}
KALDI_LOG << "Processed " << n_utts << " utterances: with a total of "
<< n_total_matrices << " matrices across " << (num_args - 1)
<< " different systems";
KALDI_LOG << "Produced output for " << n_success << " utterances; "
<< n_missing << " total missing matrices";
DeletePointers(&matrix_readers);
return (n_success != 0 && n_missing < (n_success - n_missing)) ? 0 : 1;
}
int32 TypeTwoUsage(const ParseOptions &po, bool binary) {
KALDI_ASSERT(po.NumArgs() == 2);
KALDI_ASSERT(ClassifyRspecifier(po.GetArg(1), NULL, NULL) != kNoRspecifier &&
"matrix-sum: first argument must be an rspecifier");
// if next assert fails it would be bug in the code as otherwise we shouldn't
// be called.
KALDI_ASSERT(ClassifyRspecifier(po.GetArg(2), NULL, NULL) == kNoRspecifier);
SequentialBaseFloatMatrixReader mat_reader(po.GetArg(1));
Matrix<double> sum;
int32 num_done = 0, num_err = 0;
for (; !mat_reader.Done(); mat_reader.Next()) {
const Matrix<BaseFloat> &mat = mat_reader.Value();
if (mat.NumRows() == 0) {
KALDI_WARN << "Zero matrix input for key " << mat_reader.Key();
num_err++;
} else {
if (sum.NumRows() == 0) sum.Resize(mat.NumRows(), mat.NumCols());
if (sum.NumRows() != mat.NumRows() || sum.NumCols() != mat.NumCols()) {
KALDI_WARN << "Dimension mismatch for key " << mat_reader.Key() << ": "
<< mat.NumRows() << " by " << mat.NumCols() << " vs. "
<< sum.NumRows() << " by " << sum.NumCols();
num_err++;
} else {
Matrix<double> dmat(mat);
sum.AddMat(1.0, dmat, kNoTrans);
num_done++;
}
}
}
Matrix<BaseFloat> sum_float(sum);
WriteKaldiObject(sum_float, po.GetArg(2), binary);
KALDI_LOG << "Summed " << num_done << " matrices, " << num_err
<< " with errors; wrote sum to "
<< PrintableWxfilename(po.GetArg(2));
return (num_done > 0 && num_err < num_done) ? 0 : 1;
}
// sum a bunch of single files to produce a single file [including
// extended filenames, of course]
int32 TypeThreeUsage(const ParseOptions &po, bool binary) {
KALDI_ASSERT(po.NumArgs() >= 2);
for (int32 i = 1; i <= po.NumArgs(); i++) {
if (ClassifyRspecifier(po.GetArg(1), NULL, NULL) != kNoRspecifier) {
KALDI_ERR << "Wrong usage (type 3): if first and last arguments are not "
<< "tables, the intermediate arguments must not be tables.";
}
}
bool add = true;
Matrix<BaseFloat> mat;
for (int32 i = 1; i < po.NumArgs(); i++) {
bool binary_in;
Input ki(po.GetArg(i), &binary_in);
// this Read function will throw if there is a size mismatch.
mat.Read(ki.Stream(), binary_in, add);
}
WriteKaldiObject(mat, po.GetArg(po.NumArgs()), binary);
KALDI_LOG << "Summed " << (po.NumArgs() - 1) << " matrices; "
<< "wrote sum to " << PrintableWxfilename(po.GetArg(po.NumArgs()));
return 0;
}
} // namespace kaldi
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Add matrices (supports various forms)\n"
"\n"
"Type one usage:\n"
" matrix-sum [options] <matrix-in-rspecifier1> [<matrix-in-rspecifier2>"
" <matrix-in-rspecifier3> ...] <matrix-out-wspecifier>\n"
" e.g.: matrix-sum ark:1.weights ark:2.weights ark:combine.weights\n"
" This usage supports the --scale1 and --scale2 options to scale the\n"
" first two input tables.\n"
"Type two usage (sums a single table input to produce a single "
"output):\n"
" matrix-sum [options] <matrix-in-rspecifier> <matrix-out-wxfilename>\n"
" e.g.: matrix-sum --binary=false mats.ark sum.mat\n"
"Type three usage (sums single-file inputs to produce a single "
"output):\n"
" matrix-sum [options] <matrix-in-rxfilename1> <matrix-in-rxfilename2> "
"..."
" <matrix-out-wxfilename>\n"
" e.g.: matrix-sum --binary=false 1.mat 2.mat 3.mat sum.mat\n"
"See also: matrix-sum-rows\n";
BaseFloat scale1 = 1.0, scale2 = 1.0;
bool binary = true;
ParseOptions po(usage);
po.Register("scale1", &scale1,
"Scale applied to first matrix "
"(only for type one usage)");
po.Register("scale2", &scale2,
"Scale applied to second matrix "
"(only for type one usage)");
po.Register("binary", &binary,
"If true, write output as binary (only "
"relevant for usage types two or three");
po.Read(argc, argv);
int32 N = po.NumArgs(), exit_status;
if (po.NumArgs() >= 2 &&
ClassifyRspecifier(po.GetArg(N), NULL, NULL) != kNoRspecifier) {
// output to table.
exit_status = TypeOneUsage(po, scale1, scale2);
} else if (po.NumArgs() == 2 &&
ClassifyRspecifier(po.GetArg(1), NULL, NULL) != kNoRspecifier &&
ClassifyRspecifier(po.GetArg(N), NULL, NULL) == kNoRspecifier) {
KALDI_ASSERT(scale1 == 1.0 && scale2 == 1.0);
// input from a single table, output not to table.
exit_status = TypeTwoUsage(po, binary);
} else if (po.NumArgs() >= 2 &&
ClassifyRspecifier(po.GetArg(1), NULL, NULL) == kNoRspecifier &&
ClassifyRspecifier(po.GetArg(N), NULL, NULL) == kNoRspecifier) {
KALDI_ASSERT(scale1 == 1.0 && scale2 == 1.0);
// summing flat files.
exit_status = TypeThreeUsage(po, binary);
} else {
po.PrintUsage();
exit(1);
}
return exit_status;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/ivector/voice-activity-detection.h<|end_filename|>
// ivector/voice-activity-detection.h
// Copyright 2013 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_IVECTOR_VOICE_ACTIVITY_DETECTION_H_
#define KALDI_IVECTOR_VOICE_ACTIVITY_DETECTION_H_
#include <cassert>
#include <cstdlib>
#include <string>
#include <vector>
#include "matrix/matrix-lib.h"
#include "util/common-utils.h"
#include "base/kaldi-error.h"
namespace kaldi {
/*
Note: we may move the location of this file in the future, e.g. to feat/
This code is geared toward speaker-id applications and is not suitable
for automatic speech recognition (ASR) because it makes independent
decisions for each frame without imposing any notion of continuity.
*/
struct VadEnergyOptions {
BaseFloat vad_energy_threshold;
BaseFloat vad_energy_mean_scale;
int32 vad_frames_context;
BaseFloat vad_proportion_threshold;
VadEnergyOptions()
: vad_energy_threshold(5.0),
vad_energy_mean_scale(0.5),
vad_frames_context(5),
vad_proportion_threshold(0.6) {}
void Register(OptionsItf *po) {
po->Register(
"vad-energy-threshold", &vad_energy_threshold,
"Constant term in energy threshold for MFCC0 for VAD (also see "
"--vad-energy-mean-scale)");
po->Register("vad-energy-mean-scale", &vad_energy_mean_scale,
"If this is set to s, to get the actual threshold we "
"let m be the mean log-energy of the file, and use "
"s*m + vad-energy-threshold");
po->Register("vad-frames-context", &vad_frames_context,
"Number of frames of context on each side of central frame, "
"in window for which energy is monitored");
po->Register("vad-proportion-threshold", &vad_proportion_threshold,
"Parameter controlling the proportion of frames within "
"the window that need to have more energy than the "
"threshold");
}
};
/// Compute voice-activity vector for a file: 1 if we judge the frame as
/// voiced, 0 otherwise. There are no continuity constraints.
/// This method is a very simple energy-based method which only looks
/// at the first coefficient of "input_features", which is assumed to
/// be a log-energy or something similar. A cutoff is set-- we use
/// a formula of the general type: cutoff = 5.0 + 0.5 * (average log-energy
/// in this file), and for each frame the decision is based on the
/// proportion of frames in a context window around the current frame,
/// which are above this cutoff.
void ComputeVadEnergy(const VadEnergyOptions &opts,
const MatrixBase<BaseFloat> &input_features,
Vector<BaseFloat> *output_voiced);
} // namespace kaldi
#endif // KALDI_IVECTOR_VOICE_ACTIVITY_DETECTION_H_
<|start_filename|>tonic-suite/asr/src/matrix/kaldi-matrix-inl.h<|end_filename|>
// matrix/kaldi-matrix-inl.h
// Copyright 2009-2011 Microsoft Corporation; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_MATRIX_KALDI_MATRIX_INL_H_
#define KALDI_MATRIX_KALDI_MATRIX_INL_H_ 1
#include "matrix/kaldi-vector.h"
namespace kaldi {
/// Empty constructor
template <typename Real>
Matrix<Real>::Matrix()
: MatrixBase<Real>(NULL, 0, 0, 0) {}
template <>
template <>
void MatrixBase<float>::AddVecVec(const float alpha,
const VectorBase<float> &ra,
const VectorBase<float> &rb);
template <>
template <>
void MatrixBase<double>::AddVecVec(const double alpha,
const VectorBase<double> &ra,
const VectorBase<double> &rb);
template <typename Real>
inline std::ostream &operator<<(std::ostream &os, const MatrixBase<Real> &M) {
M.Write(os, false);
return os;
}
template <typename Real>
inline std::istream &operator>>(std::istream &is, Matrix<Real> &M) {
M.Read(is, false);
return is;
}
template <typename Real>
inline std::istream &operator>>(std::istream &is, MatrixBase<Real> &M) {
M.Read(is, false);
return is;
}
} // namespace kaldi
#endif // KALDI_MATRIX_KALDI_MATRIX_INL_H_
<|start_filename|>tonic-suite/asr/src/sgmm2bin/sgmm2-sum-accs.cc<|end_filename|>
// sgmm2bin/sgmm2-sum-accs.cc
// Copyright 2009-2012 Saarland University; Microsoft Corporation
// Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "util/common-utils.h"
#include "sgmm2/estimate-am-sgmm2.h"
#include "hmm/transition-model.h"
int main(int argc, char *argv[]) {
try {
typedef kaldi::int32 int32;
const char *usage =
"Sum multiple accumulated stats files for SGMM training.\n"
"Usage: sgmm2-sum-accs [options] stats-out stats-in1 stats-in2 ...\n";
bool binary = true;
bool parallel = false;
kaldi::ParseOptions po(usage);
po.Register("binary", &binary, "Write output in binary mode");
po.Register(
"parallel", ¶llel,
"If true, the program makes sure to open all "
"filehandles before reading for any (useful when summing accs from "
"long processes)");
po.Read(argc, argv);
if (po.NumArgs() < 2) {
po.PrintUsage();
exit(1);
}
std::string stats_out_filename = po.GetArg(1);
kaldi::Vector<double> transition_accs;
kaldi::MleAmSgmm2Accs sgmm_accs;
if (parallel) {
std::vector<kaldi::Input *> inputs(po.NumArgs() - 1);
for (int i = 0; i < po.NumArgs() - 1; i++) {
std::string stats_in_filename = po.GetArg(i + 2);
inputs[i] = new kaldi::Input(stats_in_filename); // Don't try
// to work out binary status yet; this would cause us to wait
// for the output of that process. We delay it till later.
}
for (size_t i = 0; i < po.NumArgs() - 1; i++) {
bool b;
kaldi::InitKaldiInputStream(inputs[i]->Stream(), &b);
transition_accs.Read(inputs[i]->Stream(), b, true /* add values */);
sgmm_accs.Read(inputs[i]->Stream(), b, true /* add values */);
delete inputs[i];
}
} else {
for (int i = 2, max = po.NumArgs(); i <= max; i++) {
std::string stats_in_filename = po.GetArg(i);
bool binary_read;
kaldi::Input ki(stats_in_filename, &binary_read);
transition_accs.Read(ki.Stream(), binary_read, true /* add values */);
sgmm_accs.Read(ki.Stream(), binary_read, true /* add values */);
}
}
// Write out the accs
{
kaldi::Output ko(stats_out_filename, binary);
transition_accs.Write(ko.Stream(), binary);
sgmm_accs.Write(ko.Stream(), binary);
}
KALDI_LOG << "Written stats to " << stats_out_filename;
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>common/include/socket.h<|end_filename|>
/*
* Copyright (c) 2015, University of Michigan.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
/**
* @author: <NAME>, <NAME>
* @contact: <EMAIL>, <EMAIL>
*/
#ifndef SOCKET_H
#define SOCKET_H
// returns socket to tx data
int CLIENT_init(char* hostname, int portno, bool debug);
// returns socket where to rx data
int SERVER_init(int portno);
// tx len of data
void SOCKET_txsize(int socket, int len);
// receive len of data
int SOCKET_rxsize(int socket);
// send data over socket
int SOCKET_send(int socket, char* data, int size, bool debug);
// receive data over socket
int SOCKET_receive(int socket, char* data, int size, bool debug);
// close the socket
int SOCKET_close(int socket, bool debug);
#endif
<|start_filename|>tonic-suite/asr/src/nnet2bin/nnet-am-shrink.cc<|end_filename|>
// nnet2bin/nnet-am-shrink.cc
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/transition-model.h"
#include "nnet2/shrink-nnet.h"
#include "nnet2/am-nnet.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace kaldi::nnet2;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
const char *usage =
"Using a validation set, compute optimal scaling parameters for each\n"
"class of neural network parameters (i.e. each updatable component), "
"to\n"
"maximize validation-set objective function.\n"
"\n"
"Usage: nnet-am-shrink [options] <model-in> <valid-examples-in> "
"<model-out>\n"
"\n"
"e.g.:\n"
" nnet-am-shrink 1.nnet ark:valid.egs 2.nnet\n";
bool binary_write = true;
NnetShrinkConfig shrink_config;
ParseOptions po(usage);
po.Register("binary", &binary_write, "Write output in binary mode");
shrink_config.Register(&po);
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string nnet_rxfilename = po.GetArg(1),
valid_examples_rspecifier = po.GetArg(2),
nnet_wxfilename = po.GetArg(3);
TransitionModel trans_model;
AmNnet am_nnet;
{
bool binary_read;
Input ki(nnet_rxfilename, &binary_read);
trans_model.Read(ki.Stream(), binary_read);
am_nnet.Read(ki.Stream(), binary_read);
}
std::vector<NnetExample> validation_set; // stores validation
// frames.
{ // This block adds samples to "validation_set".
SequentialNnetExampleReader example_reader(valid_examples_rspecifier);
for (; !example_reader.Done(); example_reader.Next())
validation_set.push_back(example_reader.Value());
KALDI_LOG << "Read " << validation_set.size() << " examples from the "
<< "validation set.";
KALDI_ASSERT(validation_set.size() > 0);
}
ShrinkNnet(shrink_config, validation_set, &(am_nnet.GetNnet()));
{
Output ko(nnet_wxfilename, binary_write);
trans_model.Write(ko.Stream(), binary_write);
am_nnet.Write(ko.Stream(), binary_write);
}
KALDI_LOG << "Finished shrinking neural net, wrote model to "
<< nnet_wxfilename;
return (validation_set.size() == 0 ? 1 : 0);
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/sgmm2/estimate-am-sgmm2-test.cc<|end_filename|>
// sgmm2/estimate-am-sgmm2-test.cc
// Copyright 2009-2011 Saarland University (author: <NAME>)
// 2012-2013 Johns Hopkins University (author: <NAME>)
// <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "gmm/model-test-common.h"
#include "sgmm2/am-sgmm2.h"
#include "sgmm2/estimate-am-sgmm2.h"
#include "util/kaldi-io.h"
using kaldi::AmSgmm2;
using kaldi::MleAmSgmm2Accs;
using kaldi::int32;
using kaldi::BaseFloat;
namespace ut = kaldi::unittest;
// Tests the Read() and Write() methods for the accumulators, in both binary
// and ASCII mode, as well as Check().
void TestSgmm2AccsIO(const AmSgmm2 &sgmm,
const kaldi::Matrix<BaseFloat> &feats) {
using namespace kaldi;
kaldi::SgmmUpdateFlagsType flags =
kaldi::kSgmmAll & ~kSgmmSpeakerWeightProjections;
kaldi::Sgmm2PerFrameDerivedVars frame_vars;
kaldi::Sgmm2PerSpkDerivedVars empty;
frame_vars.Resize(sgmm.NumGauss(), sgmm.FeatureDim(), sgmm.PhoneSpaceDim());
kaldi::Sgmm2GselectConfig sgmm_config;
sgmm_config.full_gmm_nbest =
std::min(sgmm_config.full_gmm_nbest, sgmm.NumGauss());
MleAmSgmm2Accs accs(sgmm, flags, true);
BaseFloat loglike = 0.0;
for (int32 i = 0; i < feats.NumRows(); i++) {
std::vector<int32> gselect;
sgmm.GaussianSelection(sgmm_config, feats.Row(i), &gselect);
sgmm.ComputePerFrameVars(feats.Row(i), gselect, empty, &frame_vars);
loglike += accs.Accumulate(sgmm, frame_vars, 0, 1.0, &empty);
}
accs.CommitStatsForSpk(sgmm, empty);
kaldi::MleAmSgmm2Options update_opts;
update_opts.check_v = (Rand() % 2 == 0);
AmSgmm2 *sgmm1 = new AmSgmm2();
sgmm1->CopyFromSgmm2(sgmm, false, false);
kaldi::MleAmSgmm2Updater updater(update_opts);
updater.Update(accs, sgmm1, flags);
sgmm1->ComputeDerivedVars();
std::vector<int32> gselect;
Sgmm2LikelihoodCache like_cache(sgmm.NumGroups(), sgmm.NumPdfs());
sgmm1->GaussianSelection(sgmm_config, feats.Row(0), &gselect);
sgmm1->ComputePerFrameVars(feats.Row(0), gselect, empty, &frame_vars);
BaseFloat loglike1 = sgmm1->LogLikelihood(frame_vars, 0, &like_cache, &empty);
delete sgmm1;
// First, non-binary write
accs.Write(kaldi::Output("tmpf", false).Stream(), false);
bool binary_in;
MleAmSgmm2Accs *accs1 = new MleAmSgmm2Accs();
// Non-binary read
kaldi::Input ki1("tmpf", &binary_in);
accs1->Read(ki1.Stream(), binary_in, false);
accs1->Check(sgmm, true);
AmSgmm2 *sgmm2 = new AmSgmm2();
sgmm2->CopyFromSgmm2(sgmm, false, false);
updater.Update(*accs1, sgmm2, flags);
sgmm2->ComputeDerivedVars();
sgmm2->GaussianSelection(sgmm_config, feats.Row(0), &gselect);
sgmm2->ComputePerFrameVars(feats.Row(0), gselect, empty, &frame_vars);
Sgmm2LikelihoodCache like_cache2(sgmm2->NumGroups(), sgmm2->NumPdfs());
BaseFloat loglike2 =
sgmm2->LogLikelihood(frame_vars, 0, &like_cache2, &empty);
kaldi::AssertEqual(loglike1, loglike2, 1e-4);
delete accs1;
// Next, binary write
accs.Write(kaldi::Output("tmpfb", true).Stream(), true);
MleAmSgmm2Accs *accs2 = new MleAmSgmm2Accs();
// Binary read
kaldi::Input ki2("tmpfb", &binary_in);
accs2->Read(ki2.Stream(), binary_in, false);
accs2->Check(sgmm, true);
AmSgmm2 *sgmm3 = new AmSgmm2();
sgmm3->CopyFromSgmm2(sgmm, false, false);
updater.Update(*accs2, sgmm3, flags);
sgmm3->ComputeDerivedVars();
sgmm3->GaussianSelection(sgmm_config, feats.Row(0), &gselect);
sgmm3->ComputePerFrameVars(feats.Row(0), gselect, empty, &frame_vars);
Sgmm2LikelihoodCache like_cache3(sgmm3->NumGroups(), sgmm3->NumPdfs());
BaseFloat loglike3 =
sgmm3->LogLikelihood(frame_vars, 0, &like_cache3, &empty);
kaldi::AssertEqual(loglike1, loglike3, 1e-6);
// Testing the MAP update of M
update_opts.tau_map_M = 10;
update_opts.full_col_cov = (RandUniform() > 0.5) ? true : false;
update_opts.full_row_cov = (RandUniform() > 0.5) ? true : false;
kaldi::MleAmSgmm2Updater updater_map(update_opts);
sgmm3->CopyFromSgmm2(sgmm, false, false);
updater_map.Update(*accs2, sgmm3, flags);
delete accs2;
delete sgmm2;
delete sgmm3;
unlink("tmpf");
unlink("tmpfb");
}
void UnitTestEstimateSgmm2() {
int32 dim = 1 + kaldi::RandInt(0, 9); // random dimension of the gmm
int32 num_comp = 2 + kaldi::RandInt(0, 9); // random mixture size
kaldi::FullGmm full_gmm;
ut::InitRandFullGmm(dim, num_comp, &full_gmm);
AmSgmm2 sgmm;
kaldi::Sgmm2GselectConfig config;
std::vector<int32> pdf2group;
pdf2group.push_back(0);
sgmm.InitializeFromFullGmm(full_gmm, pdf2group, dim + 1, dim, false,
0.9); // TODO-- make this true!
sgmm.ComputeNormalizers();
kaldi::Matrix<BaseFloat> feats;
{ // First, generate random means and variances
int32 num_feat_comp =
num_comp + kaldi::RandInt(-num_comp / 2, num_comp / 2);
kaldi::Matrix<BaseFloat> means(num_feat_comp, dim),
vars(num_feat_comp, dim);
for (int32 m = 0; m < num_feat_comp; m++) {
for (int32 d = 0; d < dim; d++) {
means(m, d) = kaldi::RandGauss();
vars(m, d) = exp(kaldi::RandGauss()) + 1e-2;
}
}
// Now generate random features with those means and variances.
feats.Resize(num_feat_comp * 200, dim);
for (int32 m = 0; m < num_feat_comp; m++) {
kaldi::SubMatrix<BaseFloat> tmp(feats, m * 200, 200, 0, dim);
ut::RandDiagGaussFeatures(200, means.Row(m), vars.Row(m), &tmp);
}
}
sgmm.ComputeDerivedVars();
TestSgmm2AccsIO(sgmm, feats);
}
int main() {
for (int i = 0; i < 10; i++) UnitTestEstimateSgmm2();
std::cout << "Test OK.\n";
return 0;
}
<|start_filename|>tonic-suite/asr/src/hmm/posterior-test.cc<|end_filename|>
// hmm/posterior-test.cc
// Copyright 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "hmm/posterior.h"
namespace kaldi {
void TestVectorToPosteriorEntry() {
int32 n = 10 + rand() % 50, gselect = 1 + rand() % 5;
BaseFloat min_post = 0.1 + 0.8 * RandUniform();
Vector<BaseFloat> loglikes(n);
loglikes.SetRandn();
loglikes.Scale(10.0);
std::vector<std::pair<int32, BaseFloat> > post_entry;
BaseFloat ans =
VectorToPosteriorEntry(loglikes, gselect, min_post, &post_entry);
KALDI_ASSERT(post_entry.size() <= gselect);
int32 max_elem;
BaseFloat max_val = loglikes.Max(&max_elem);
KALDI_ASSERT(post_entry[0].first == max_elem);
KALDI_ASSERT(post_entry.back().second >= min_post);
KALDI_ASSERT(post_entry.back().second <= post_entry.front().second);
BaseFloat sum = 0.0;
for (size_t i = 0; i < post_entry.size(); i++) sum += post_entry[i].second;
KALDI_ASSERT(fabs(sum - 1.0) < 0.01);
KALDI_ASSERT(ans >= max_val);
}
}
int main() {
// repeat the test ten times
for (int i = 0; i < 10; i++) {
kaldi::TestVectorToPosteriorEntry();
}
std::cout << "Test OK.\n";
}
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-est-basis-fmllr.cc<|end_filename|>
// gmmbin/gmm-est-basis-fmllr.cc
// Copyright 2012 Carnegie Mellon University (author: <NAME>)
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <string>
using std::string;
#include <vector>
using std::vector;
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "transform/fmllr-diag-gmm.h"
#include "transform/basis-fmllr-diag-gmm.h"
#include "hmm/posterior.h"
namespace kaldi {
void AccumulateForUtterance(const Matrix<BaseFloat> &feats,
const Posterior &post,
const TransitionModel &trans_model,
const AmDiagGmm &am_gmm,
FmllrDiagGmmAccs *spk_stats) {
Posterior pdf_post;
ConvertPosteriorToPdfs(trans_model, post, &pdf_post);
for (size_t i = 0; i < post.size(); i++) {
for (size_t j = 0; j < pdf_post[i].size(); j++) {
int32 pdf_id = pdf_post[i][j].first;
spk_stats->AccumulateForGmm(am_gmm.GetPdf(pdf_id), feats.Row(i),
pdf_post[i][j].second);
}
}
}
}
int main(int argc, char *argv[]) {
try {
typedef kaldi::int32 int32;
using namespace kaldi;
const char *usage =
"Perform basis fMLLR adaptation in testing stage, either per utterance "
"or\n"
"for the supplied set of speakers (spk2utt option). Reads posterior "
"to\n"
"accumulate fMLLR stats for each speaker/utterance. Writes to a table "
"of\n"
"matrices.\n"
"Usage: gmm-est-basis-fmllr [options] <model-in> <basis-rspecifier> "
"<feature-rspecifier> "
"<post-rspecifier> <transform-wspecifier>\n";
ParseOptions po(usage);
BasisFmllrOptions basis_fmllr_opts;
string spk2utt_rspecifier;
string weights_out_filename;
po.Register("spk2utt", &spk2utt_rspecifier,
"Rspecifier for speaker to "
"utterance-list map");
po.Register("write-weights", &weights_out_filename,
"File to write base "
"weights to.");
basis_fmllr_opts.Register(&po);
po.Read(argc, argv);
if (po.NumArgs() != 5) {
po.PrintUsage();
exit(1);
}
string model_rxfilename = po.GetArg(1), basis_rspecifier = po.GetArg(2),
feature_rspecifier = po.GetArg(3), post_rspecifier = po.GetArg(4),
trans_wspecifier = po.GetArg(5);
TransitionModel trans_model;
AmDiagGmm am_gmm;
{
bool binary;
Input ki(model_rxfilename, &binary);
trans_model.Read(ki.Stream(), binary);
am_gmm.Read(ki.Stream(), binary);
}
BasisFmllrEstimate basis_est;
ReadKaldiObject(basis_rspecifier, &basis_est);
RandomAccessPosteriorReader post_reader(post_rspecifier);
double tot_impr = 0.0, tot_t = 0.0;
BaseFloatMatrixWriter transform_writer(trans_wspecifier);
BaseFloatVectorWriter weights_writer;
if (!weights_out_filename.empty()) {
weights_writer.Open(weights_out_filename);
}
int32 num_done = 0, num_no_post = 0, num_other_error = 0;
if (spk2utt_rspecifier != "") { // per-speaker adaptation
SequentialTokenVectorReader spk2utt_reader(spk2utt_rspecifier);
RandomAccessBaseFloatMatrixReader feature_reader(feature_rspecifier);
for (; !spk2utt_reader.Done(); spk2utt_reader.Next()) {
FmllrDiagGmmAccs spk_stats(am_gmm.Dim());
string spk = spk2utt_reader.Key();
const vector<string> &uttlist = spk2utt_reader.Value();
for (size_t i = 0; i < uttlist.size(); i++) {
std::string utt = uttlist[i];
if (!feature_reader.HasKey(utt)) {
KALDI_WARN << "Did not find features for utterance " << utt;
num_other_error++;
continue;
}
if (!post_reader.HasKey(utt)) {
KALDI_WARN << "Did not find posteriors for utterance " << utt;
num_no_post++;
continue;
}
const Matrix<BaseFloat> &feats = feature_reader.Value(utt);
const Posterior &post = post_reader.Value(utt);
if (static_cast<int32>(post.size()) != feats.NumRows()) {
KALDI_WARN << "Posterior vector has wrong size " << (post.size())
<< " vs. " << (feats.NumRows());
num_other_error++;
continue;
}
AccumulateForUtterance(feats, post, trans_model, am_gmm, &spk_stats);
num_done++;
} // end looping over all utterances of the current speaker
double impr, spk_tot_t;
int32 wgt_size;
{
// Compute the transform and write it out.
Matrix<BaseFloat> transform(am_gmm.Dim(), am_gmm.Dim() + 1);
transform.SetUnit();
Vector<BaseFloat> weights; // size will be adjusted
impr = basis_est.ComputeTransform(spk_stats, &transform, &weights,
basis_fmllr_opts);
spk_tot_t = spk_stats.beta_;
wgt_size = weights.Dim();
transform_writer.Write(spk, transform);
// Optionally write out the base weights
if (!weights_out_filename.empty() && weights.Dim() > 0)
weights_writer.Write(spk, weights);
}
KALDI_LOG << "For speaker " << spk << ", auxf-impr from Basis fMLLR is "
<< (impr / spk_tot_t) << ", over " << spk_tot_t << " frames, "
<< "the top " << wgt_size << " basis elements have been used";
tot_impr += impr;
tot_t += spk_tot_t;
} // end looping over speakers
} else { // per-utterance adaptation
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
for (; !feature_reader.Done(); feature_reader.Next()) {
string utt = feature_reader.Key();
if (!post_reader.HasKey(utt)) {
KALDI_WARN << "Did not find posts for utterance " << utt;
num_no_post++;
continue;
}
const Matrix<BaseFloat> &feats = feature_reader.Value();
const Posterior &post = post_reader.Value(utt);
if (static_cast<int32>(post.size()) != feats.NumRows()) {
KALDI_WARN << "Posterior has wrong size " << (post.size()) << " vs. "
<< (feats.NumRows());
num_other_error++;
continue;
}
FmllrDiagGmmAccs spk_stats(am_gmm.Dim());
AccumulateForUtterance(feats, post, trans_model, am_gmm, &spk_stats);
num_done++;
BaseFloat impr, utt_tot_t;
int32 wgt_size;
{ // Compute the transform and write it out.
Matrix<BaseFloat> transform(am_gmm.Dim(), am_gmm.Dim() + 1);
transform.SetUnit();
Vector<BaseFloat> weights(
am_gmm.Dim() * (am_gmm.Dim() + 1)); // size will be adjusted
impr = basis_est.ComputeTransform(spk_stats, &transform, &weights,
basis_fmllr_opts);
utt_tot_t = spk_stats.beta_;
wgt_size = weights.Dim();
transform_writer.Write(utt, transform);
// Optionally write out the base weights
if (!weights_out_filename.empty() && weights.Dim() > 0)
weights_writer.Write(utt, weights);
}
KALDI_LOG << "For utterance " << utt
<< ", auxf-impr from Basis fMLLR is " << (impr / utt_tot_t)
<< ", over " << utt_tot_t << " frames, "
<< "the top " << wgt_size << " basis elements have been used";
tot_impr += impr;
tot_t += utt_tot_t;
} // end looping over all the utterances
}
KALDI_LOG << "Done " << num_done << " files, " << num_no_post
<< " with no posts, " << num_other_error << " with other errors.";
KALDI_LOG << "Overall fMLLR auxf-impr per frame is " << (tot_impr / tot_t)
<< " over " << tot_t << " frames.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2bin/nnet-show-progress.cc<|end_filename|>
// nnet2bin/nnet-show-progress.cc
// Copyright 2012-2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/transition-model.h"
#include "nnet2/train-nnet.h"
#include "nnet2/am-nnet.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace kaldi::nnet2;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
const char *usage =
"Given an old and a new model and some training examples (possibly "
"held-out),\n"
"show the average objective function given the mean of the two "
"models,\n"
"and the breakdown by component of why this happened (computed from\n"
"derivative information). Also shows parameter differences per "
"layer.\n"
"If training examples not provided, only shows parameter differences "
"per\n"
"layer.\n"
"\n"
"Usage: nnet-show-progress [options] <old-model-in> <new-model-in> "
"[<training-examples-in>]\n"
"e.g.: nnet-show-progress 1.nnet 2.nnet ark:valid.egs\n";
ParseOptions po(usage);
int32 num_segments = 1;
int32 batch_size = 1024;
std::string use_gpu = "optional";
po.Register("num-segments", &num_segments,
"Number of line segments used for computing derivatives");
po.Register("use-gpu", &use_gpu,
"yes|no|optional, only has effect if compiled with CUDA");
po.Read(argc, argv);
if (po.NumArgs() < 2 || po.NumArgs() > 3) {
po.PrintUsage();
exit(1);
}
#if HAVE_CUDA == 1
CuDevice::Instantiate().SelectGpuId(use_gpu);
#endif
std::string nnet1_rxfilename = po.GetArg(1),
nnet2_rxfilename = po.GetArg(2),
examples_rspecifier = po.GetOptArg(3);
TransitionModel trans_model;
AmNnet am_nnet1, am_nnet2;
{
bool binary_read;
Input ki(nnet1_rxfilename, &binary_read);
trans_model.Read(ki.Stream(), binary_read);
am_nnet1.Read(ki.Stream(), binary_read);
}
{
bool binary_read;
Input ki(nnet2_rxfilename, &binary_read);
trans_model.Read(ki.Stream(), binary_read);
am_nnet2.Read(ki.Stream(), binary_read);
}
if (am_nnet1.GetNnet().GetParameterDim() !=
am_nnet2.GetNnet().GetParameterDim()) {
KALDI_WARN << "Parameter-dim mismatch, cannot show progress.";
exit(0);
}
int32 ret = 0;
if (!examples_rspecifier.empty()) {
Nnet nnet_gradient(am_nnet2.GetNnet());
const bool treat_as_gradient = true;
nnet_gradient.SetZero(treat_as_gradient);
std::vector<NnetExample> examples;
SequentialNnetExampleReader example_reader(examples_rspecifier);
for (; !example_reader.Done(); example_reader.Next())
examples.push_back(example_reader.Value());
int32 num_examples = examples.size();
int32 num_updatable = am_nnet1.GetNnet().NumUpdatableComponents();
Vector<BaseFloat> diff(num_updatable);
for (int32 s = 0; s < num_segments; s++) {
// start and end segments of the line between 0 and 1
BaseFloat start = (s + 0.0) / num_segments,
end = (s + 1.0) / num_segments, middle = 0.5 * (start + end);
Nnet interp_nnet(am_nnet2.GetNnet());
interp_nnet.Scale(middle);
interp_nnet.AddNnet(1.0 - middle, am_nnet1.GetNnet());
Nnet nnet_gradient(am_nnet2.GetNnet());
const bool treat_as_gradient = true;
nnet_gradient.SetZero(treat_as_gradient);
double objf_per_frame = ComputeNnetGradient(interp_nnet, examples,
batch_size, &nnet_gradient);
KALDI_LOG << "At position " << middle << ", objf per frame is "
<< objf_per_frame;
Vector<BaseFloat> old_dotprod(num_updatable),
new_dotprod(num_updatable);
nnet_gradient.ComponentDotProducts(am_nnet1.GetNnet(), &old_dotprod);
nnet_gradient.ComponentDotProducts(am_nnet2.GetNnet(), &new_dotprod);
old_dotprod.Scale(1.0 / num_examples);
new_dotprod.Scale(1.0 / num_examples);
diff.AddVec(1.0 / num_segments, new_dotprod);
diff.AddVec(-1.0 / num_segments, old_dotprod);
KALDI_VLOG(1) << "By segment " << s << ", objf change is " << diff;
}
KALDI_LOG << "Total objf change per component is " << diff;
if (num_examples == 0) ret = 1;
}
{ // Get info about magnitude of parameter change.
Nnet diff_nnet(am_nnet1.GetNnet());
diff_nnet.AddNnet(-1.0, am_nnet2.GetNnet());
int32 num_updatable = diff_nnet.NumUpdatableComponents();
Vector<BaseFloat> dot_prod(num_updatable);
diff_nnet.ComponentDotProducts(diff_nnet, &dot_prod);
dot_prod.ApplyPow(0.5); // take sqrt to get l2 norm of diff
KALDI_LOG << "Parameter differences per layer are " << dot_prod;
Vector<BaseFloat> baseline_prod(num_updatable);
am_nnet1.GetNnet().ComponentDotProducts(am_nnet1.GetNnet(),
&baseline_prod);
baseline_prod.ApplyPow(0.5);
dot_prod.DivElements(baseline_prod);
KALDI_LOG << "Relative parameter differences per layer are " << dot_prod;
}
return ret;
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2bin/nnet-compare-hash-discriminative.cc<|end_filename|>
// nnet2bin/nnet-compare-hash-discriminative.cc
// Copyright 2012-2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/transition-model.h"
#include "nnet2/nnet-example-functions.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace kaldi::nnet2;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
const char *usage =
"Compares two archives of discriminative training examples and checks\n"
"that they behave the same way for purposes of discriminative "
"training.\n"
"This program was created as a way of testing "
"nnet-get-egs-discriminative\n"
"The model is only needed for its transition-model.\n"
"\n"
"Usage: nnet-compare-hash-discriminative [options] <model-rxfilename> "
"<egs-rspecifier1> <egs-rspecifier2>\n"
"\n"
"Note: options --drop-frames and --criterion should be matched with "
"the\n"
"command line of nnet-get-egs-discriminative used to get the examples\n"
"nnet-compare-hash-discriminative --drop-frames=true --criterion=mmi "
"ark:1.degs ark:2.degs\n";
std::string criterion = "smbr";
bool drop_frames = false;
BaseFloat threshold = 0.002;
BaseFloat acoustic_scale = 1.0, lm_scale = 1.0;
ParseOptions po(usage);
po.Register("acoustic-scale", &acoustic_scale,
"Scaling factor for acoustic likelihoods");
po.Register("lm-scale", &lm_scale,
"Scaling factor for \"graph costs\" (including LM costs)");
po.Register("criterion", &criterion,
"Training criterion, 'mmi'|'mpfe'|'smbr'");
po.Register("drop-frames", &drop_frames,
"If true, for MMI training, drop "
"frames where num and den do not intersect.");
po.Register("threshold", &threshold,
"Threshold for equality testing "
"(relative)");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string model_rxfilename = po.GetArg(1),
examples_rspecifier1 = po.GetArg(2),
examples_rspecifier2 = po.GetArg(3);
int64 num_done1 = 0, num_done2 = 0;
TransitionModel tmodel;
ReadKaldiObject(model_rxfilename, &tmodel);
Matrix<double> hash1, hash2;
// some additional diagnostics:
double num_weight1 = 0.0, den_weight1 = 0.0, tot_t1 = 0.0;
double num_weight2 = 0.0, den_weight2 = 0.0, tot_t2 = 0.0;
SequentialDiscriminativeNnetExampleReader example_reader1(
examples_rspecifier1),
example_reader2(examples_rspecifier2);
KALDI_LOG << "Computing first hash function";
for (; !example_reader1.Done(); example_reader1.Next(), num_done1++) {
DiscriminativeNnetExample eg = example_reader1.Value();
fst::ScaleLattice(fst::LatticeScale(lm_scale, acoustic_scale),
&(eg.den_lat));
UpdateHash(tmodel, eg, criterion, drop_frames, &hash1, &num_weight1,
&den_weight1, &tot_t1);
}
KALDI_LOG << "Processed " << num_done1 << " examples.";
KALDI_LOG << "Computing second hash function";
for (; !example_reader2.Done(); example_reader2.Next(), num_done2++) {
DiscriminativeNnetExample eg = example_reader2.Value();
fst::ScaleLattice(fst::LatticeScale(lm_scale, acoustic_scale),
&(eg.den_lat));
UpdateHash(tmodel, eg, criterion, drop_frames, &hash2, &num_weight2,
&den_weight2, &tot_t2);
}
KALDI_LOG << "Processed " << num_done2 << " examples.";
double prod1 = TraceMatMat(hash1, hash1, kTrans),
prod2 = TraceMatMat(hash2, hash2, kTrans),
cross_prod = TraceMatMat(hash1, hash2, kTrans);
KALDI_LOG << "Products are as follows (should be the same): prod1 = "
<< prod1 << ", prod2 = " << prod2
<< ", cross_prod = " << cross_prod;
KALDI_LOG << "Num-weight1 = " << num_weight1
<< ", den-weight1 = " << den_weight1 << ", tot_t1 = " << tot_t1;
KALDI_LOG << "Num-weight2 = " << num_weight2
<< ", den-weight2 = " << den_weight2 << ", tot_t2 = " << tot_t2;
KALDI_ASSERT(ApproxEqual(prod1, prod2, threshold) &&
ApproxEqual(prod2, cross_prod, threshold));
KALDI_ASSERT(prod1 > 0.0);
return 0;
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/feat/resample-test.cc<|end_filename|>
// feat/resample-test.cc
// Copyright 2013 <NAME>
// 2014 IMSL, PKU-HKUST (author: <NAME>)
// 2014 <NAME>, <NAME>
// 2014 Johns Hopkins University (author: <NAME>)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "feat/resample.h"
using namespace kaldi;
class TestFunction {
public:
explicit TestFunction(double frequency)
: frequency_(frequency),
sin_magnitude_(RandGauss()),
cos_magnitude_(RandGauss()) {}
double operator()(double t) const {
double omega_t = t * M_2PI * frequency_;
return sin_magnitude_ * sin(omega_t) + cos_magnitude_ * cos(omega_t);
}
private:
double frequency_;
double sin_magnitude_;
double cos_magnitude_;
};
void UnitTestArbitraryResample() {
BaseFloat samp_freq = 1000.0 * (1.0 + RandUniform());
int32 num_samp = 256 + static_cast<int32>((RandUniform() * 256));
BaseFloat time_interval = num_samp / samp_freq;
// Choose a lowpass frequency that's lower than 95% of the Nyquist.
BaseFloat lowpass_freq = samp_freq * 0.95 * 0.5 / (1.0 + RandUniform());
// Number of zeros of the sinc function that the window extends out to.
int32 num_zeros = 3 + rand() % 10;
// Resample the signal at arbitrary points within that time interval.
int32 num_resamp = 50 + rand() % 100; // Resample at around 100 points,
// anywhere in the signal.
Vector<BaseFloat> resample_points(num_resamp);
for (int32 i = 0; i < num_resamp; i++) {
// the if-statement is to make some of the resample_points
// exactly coincide with the original points, to activate
// a certain code path.
if (rand() % 2 == 0)
resample_points(i) = (rand() % num_samp) / samp_freq;
else
resample_points(i) = RandUniform() * time_interval;
}
BaseFloat window_width = num_zeros / (2.0 * lowpass_freq);
// the resampling should be quite accurate if we are further
// than filter_width away from the edges.
BaseFloat min_t = 0.0 + window_width,
max_t = time_interval - (1.0 / samp_freq) - window_width;
// window_freq gives us a rough idea of the frequency spread
// that the windowing function gives us; we want the test frequency
// to be lower than the lowpass frequency by at least this much.
// (note: the real width of the window from side to side
// is 2.0 * window_width)
BaseFloat window_freq = 1.0 / (2.0 * window_width),
freq_margin = 2.0 * window_freq;
// Choose a test-signal frequency that's lower than
// lowpass_freq - freq_margin.
BaseFloat test_signal_freq =
(lowpass_freq - freq_margin) * (1.0 / (1.0 + RandUniform()));
KALDI_ASSERT(test_signal_freq > 0.0);
ArbitraryResample resampler(num_samp, samp_freq, lowpass_freq,
resample_points, num_zeros);
TestFunction test_func(test_signal_freq);
// test with a one-row matrix equal to the test signal.
Matrix<BaseFloat> sample_values(1, num_samp);
for (int32 i = 0; i < num_samp; i++) {
BaseFloat t = i / samp_freq;
sample_values(0, i) = test_func(t);
}
Matrix<BaseFloat> resampled_values(1, num_resamp);
if (rand() % 2 == 0) {
resampler.Resample(sample_values, &resampled_values);
} else {
SubVector<BaseFloat> out(resampled_values, 0);
resampler.Resample(sample_values.Row(0), &out);
}
for (int32 i = 0; i < num_resamp; i++) {
BaseFloat t = resample_points(i), x1 = test_func(t),
x2 = resampled_values(0, i), error = fabs(x1 - x2);
if (i % 10 == 0) {
KALDI_VLOG(1) << "Error is " << error << ", t = " << t
<< ", samp_freq = " << samp_freq
<< ", lowpass_freq = " << lowpass_freq
<< ", test_freq = " << test_signal_freq << ", num-zeros is "
<< num_zeros;
}
if (t > min_t && t < max_t) {
if (num_zeros == 3) {
KALDI_ASSERT(error < 0.1);
} else {
KALDI_ASSERT(error < 0.02);
}
} else {
KALDI_VLOG(1) << "[not checking since out of bounds]";
}
}
}
void UnitTestLinearResample() {
// this test makes sure that LinearResample gives identical results to
// ArbitraryResample when set up the same way, even if the signal is broken up
// into many pieces.
int32 samp_freq = 1000.0 * (1.0 + RandUniform()),
resamp_freq = 1000.0 * (1.0 + RandUniform());
// note: these are both integers!
int32 num_samp = 256 + static_cast<int32>((RandUniform() * 256));
BaseFloat time_interval = num_samp / static_cast<BaseFloat>(samp_freq);
// Choose a lowpass frequency that's lower than 95% of the Nyquist of both
// of the frequencies..
BaseFloat lowpass_freq =
std::min(samp_freq, resamp_freq) * 0.95 * 0.5 / (1.0 + RandUniform());
// Number of zeros of the sinc function that the window extends out to.
int32 num_zeros = 3 + rand() % 10;
// compute the number of "resample" points.
int32 num_resamp = ceil(time_interval * resamp_freq);
Vector<BaseFloat> resample_points(num_resamp);
for (int32 i = 0; i < num_resamp; i++)
resample_points(i) = i / static_cast<BaseFloat>(resamp_freq);
Vector<BaseFloat> test_signal(num_samp);
test_signal.SetRandn();
ArbitraryResample resampler(num_samp, samp_freq, lowpass_freq,
resample_points, num_zeros);
// test with a one-row matrix equal to the test signal.
Matrix<BaseFloat> sample_values(1, num_samp);
sample_values.Row(0).CopyFromVec(test_signal);
Matrix<BaseFloat> resampled_values(1, num_resamp);
resampler.Resample(sample_values, &resampled_values);
LinearResample linear_resampler(samp_freq, resamp_freq, lowpass_freq,
num_zeros);
Vector<BaseFloat> resampled_vec;
linear_resampler.Resample(test_signal, true, &resampled_vec);
if (!ApproxEqual(resampled_values.Row(0), resampled_vec)) {
KALDI_LOG << "ArbitraryResample: " << resampled_values.Row(0);
KALDI_LOG << "LinearResample: " << resampled_vec;
KALDI_ERR << "Signals differ.";
}
// Check it gives the same results when the input is broken up into pieces.
Vector<BaseFloat> resampled_vec2;
int32 input_dim_seen = 0;
while (input_dim_seen < test_signal.Dim()) {
int32 dim_remaining = test_signal.Dim() - input_dim_seen;
int32 piece_size = rand() % std::min(dim_remaining + 1, 10);
KALDI_VLOG(1) << "Piece size = " << piece_size;
SubVector<BaseFloat> in_piece(test_signal, input_dim_seen, piece_size);
Vector<BaseFloat> out_piece;
bool flush = (piece_size == dim_remaining);
linear_resampler.Resample(in_piece, flush, &out_piece);
int32 old_output_dim = resampled_vec2.Dim();
resampled_vec2.Resize(old_output_dim + out_piece.Dim(), kCopyData);
resampled_vec2.Range(old_output_dim, out_piece.Dim())
.CopyFromVec(out_piece);
input_dim_seen += piece_size;
}
if (!ApproxEqual(resampled_values.Row(0), resampled_vec2)) {
KALDI_LOG << "ArbitraryResample: " << resampled_values.Row(0);
KALDI_LOG << "LinearResample[broken-up]: " << resampled_vec2;
KALDI_ERR << "Signals differ.";
}
}
int main() {
try {
for (int32 x = 0; x < 50; x++) UnitTestLinearResample();
for (int32 x = 0; x < 50; x++) UnitTestArbitraryResample();
KALDI_LOG << "Tests succeeded.\n";
return 0;
} catch (const std::exception &e) {
KALDI_ERR << e.what();
return 1;
}
}
<|start_filename|>tonic-suite/asr/src/transform/lda-estimate-test.cc<|end_filename|>
// transform/lda-estimate-test.cc
// Copyright 2009-2011 <NAME>; Saarland University
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "transform/lda-estimate.h"
#include "util/common-utils.h"
using namespace kaldi;
void rand_posdef_spmatrix(size_t dim, SpMatrix<BaseFloat> *matrix,
TpMatrix<BaseFloat> *matrix_sqrt = NULL,
BaseFloat *logdet = NULL) {
// generate random (non-singular) matrix
Matrix<BaseFloat> tmp(dim, dim);
while (1) {
tmp.SetRandn();
if (tmp.Cond() < 100) break;
std::cout << "Condition number of random matrix large "
<< static_cast<float>(tmp.Cond())
<< ", trying again (this is normal)" << '\n';
}
// tmp * tmp^T will give positive definite matrix
matrix->AddMat2(1.0, tmp, kNoTrans, 0.0);
if (matrix_sqrt != NULL) matrix_sqrt->Cholesky(*matrix);
if (logdet != NULL) *logdet = matrix->LogPosDefDet();
if ((matrix_sqrt == NULL) && (logdet == NULL)) {
TpMatrix<BaseFloat> sqrt(dim);
sqrt.Cholesky(*matrix);
}
}
void test_io(const LdaEstimate &lda_est, bool binary) {
std::cout << "Testing I/O, binary = " << binary << '\n';
size_t dim = lda_est.Dim();
lda_est.Write(Output("tmp_stats", binary).Stream(), binary);
bool binary_in;
LdaEstimate lda_est2;
lda_est2.Init(lda_est.NumClasses(), lda_est.Dim());
Input ki("tmp_stats", &binary_in);
lda_est2.Read(ki.Stream(), binary_in, false); // not adding
Input ki2("tmp_stats", &binary_in);
lda_est2.Read(ki2.Stream(), binary_in, true); // adding
lda_est2.Scale(0.5);
// 0.5 -> make it same as what it would have been if we read just once.
Matrix<BaseFloat> m1;
Matrix<BaseFloat> m2;
LdaEstimateOptions opts;
opts.dim = dim;
lda_est.Estimate(opts, &m1);
lda_est2.Estimate(opts, &m2);
m1.AddMat(-1.0, m2, kNoTrans);
KALDI_ASSERT(m1.IsZero(1.0e-02));
unlink("tmp_stats");
}
void UnitTestEstimateLda() {
// using namespace kaldi;
// dimension of the gmm
size_t dim = kaldi::RandInt(10, 20);
// number of mixtures in the data
size_t num_class = dim + kaldi::RandInt(1, 10); // must be at least dim + 1
std::cout << "Running test with " << num_class << " classes and " << dim
<< " dimensional vectors" << '\n';
// generate random feature vectors
// first, generate parameters of vectors distribution
// (mean and covariance matrices)
Matrix<BaseFloat> means_f(num_class, dim);
std::vector<SpMatrix<BaseFloat> > vars_f(num_class);
std::vector<TpMatrix<BaseFloat> > vars_f_sqrt(num_class);
for (size_t mix = 0; mix < num_class; mix++) {
vars_f[mix].Resize(dim);
vars_f_sqrt[mix].Resize(dim);
}
for (size_t m = 0; m < num_class; m++) {
for (size_t d = 0; d < dim; d++) {
means_f(m, d) = kaldi::RandGauss();
}
rand_posdef_spmatrix(dim, &vars_f[m], &vars_f_sqrt[m], NULL);
}
// second, generate X feature vectors for each of the mixture components
size_t counter = 0;
size_t vec_count = 1000;
Matrix<BaseFloat> feats(num_class * vec_count, dim);
std::vector<int32> feats_class(num_class * vec_count);
Vector<BaseFloat> rnd_vec(dim);
for (size_t m = 0; m < num_class; m++) {
for (size_t i = 0; i < vec_count; i++) {
for (size_t d = 0; d < dim; d++) {
rnd_vec(d) = RandGauss();
}
feats.Row(counter).CopyFromVec(means_f.Row(m));
feats.Row(counter).AddTpVec(1.0, vars_f_sqrt[m], kNoTrans, rnd_vec, 1.0);
feats_class[counter] = m;
++counter;
}
}
// Compute total covar and means for classes.
Vector<double> total_mean(dim);
Matrix<double> class_mean(num_class, dim);
SpMatrix<double> total_covar(dim);
Vector<double> tmp_vec_d(dim);
for (size_t i = 0; i < counter; i++) {
tmp_vec_d.CopyFromVec(feats.Row(i));
class_mean.Row(feats_class[i]).AddVec(1.0, tmp_vec_d);
total_mean.AddVec(1.0, tmp_vec_d);
total_covar.AddVec2(1.0, tmp_vec_d);
}
total_mean.Scale(1 / static_cast<double>(counter));
total_covar.Scale(1 / static_cast<double>(counter));
total_covar.AddVec2(-1.0, total_mean);
// Compute between-class covar.
SpMatrix<double> bc_covar(dim);
for (size_t c = 0; c < num_class; c++) {
class_mean.Row(c).Scale(1 / static_cast<double>(vec_count));
bc_covar.AddVec2(static_cast<double>(vec_count) / counter,
class_mean.Row(c));
}
bc_covar.AddVec2(-1.0, total_mean);
// Compute within-class covar.
SpMatrix<double> wc_covar(total_covar);
wc_covar.AddSp(-1.0, bc_covar);
// Estimate LDA transform matrix
LdaEstimate lda_est;
lda_est.Init(num_class, dim);
lda_est.ZeroAccumulators();
for (size_t i = 0; i < counter; i++) {
lda_est.Accumulate(feats.Row(i), feats_class[i]);
}
LdaEstimateOptions opts;
opts.dim = dim;
Matrix<BaseFloat> lda_mat_bf, lda_mat_bf_mean_remove;
lda_est.Estimate(opts, &lda_mat_bf);
opts.remove_offset = true;
lda_est.Estimate(opts, &lda_mat_bf_mean_remove);
{
Vector<BaseFloat> mean_ext(total_mean);
mean_ext.Resize(mean_ext.Dim() + 1, kCopyData);
mean_ext(mean_ext.Dim() - 1) = 1.0;
Vector<BaseFloat> zero(mean_ext.Dim() - 1);
zero.AddMatVec(1.0, lda_mat_bf_mean_remove, kNoTrans, mean_ext, 0.0);
KALDI_ASSERT(zero.IsZero(0.001));
}
// Check lda_mat
Matrix<double> lda_mat(lda_mat_bf);
Matrix<double> tmp_mat(dim, dim);
Matrix<double> wc_covar_mat(wc_covar);
Matrix<double> bc_covar_mat(bc_covar);
// following product should give unit matrix
tmp_mat.AddMatMatMat(1.0, lda_mat, kNoTrans, wc_covar_mat, kNoTrans, lda_mat,
kTrans, 0.0);
KALDI_ASSERT(tmp_mat.IsUnit());
// following product should give diagonal matrix with ordered diagonal (desc)
tmp_mat.AddMatMatMat(1.0, lda_mat, kNoTrans, bc_covar_mat, kNoTrans, lda_mat,
kTrans, 0.0);
KALDI_ASSERT(tmp_mat.IsDiagonal());
for (int32 i = 1; i < static_cast<int32>(dim); i++) {
if (tmp_mat(i, i) < 1.0e-10) {
tmp_mat(i, i) = 0.0;
}
KALDI_ASSERT(tmp_mat(i - 1, i - 1) >= tmp_mat(i, i));
}
// test I/O
test_io(lda_est, false);
test_io(lda_est, true);
}
int main() {
// repeat the test X times
for (int i = 0; i < 2; i++) UnitTestEstimateLda();
std::cout << "Test OK.\n";
}
<|start_filename|>tonic-suite/asr/src/fstext/prune-special.h<|end_filename|>
// fstext/prune-special.h
// Copyright 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_FSTEXT_PRUNE_SPECIAL_H_
#define KALDI_FSTEXT_PRUNE_SPECIAL_H_
#include "fst/fstlib.h"
#include "fstext/lattice-weight.h"
#include "fstext/factor.h"
namespace fst {
/**
The function PruneSpecial is like the standard OpenFst function "prune",
except it does not expand the entire "ifst"- this is useful for cases where
ifst is an on-demand FST such as a ComposeFst and we don't want to visit
it all. It supports pruning either to a specified beam (if beam is
not One()), or to a specified max_states (if max_states is > 0). One of the
two must be specified.
Requirements:
- Costs must be non-negative (equivalently, weights must not be greater
than One()).
- There must be a Compare(a, b) function that compares two weights and
returns (-1,0,1)
if (a<b, a=b, a>b). We define this in Kaldi, for TropicalWeight,
LogWeight (I think),
and LatticeWeight... also CompactLatticeWeight, but we doubt that will be
used here;
better to use PruneCompactLattice().
*/
template <class Arc>
void PruneSpecial(const Fst<Arc> &ifst, VectorFst<Arc> *ofst,
typename Arc::Weight beam, size_t max_states = 0);
} // end namespace fst
#include "fstext/prune-special-inl.h"
#endif // KALDI_FSTEXT_PRUNE_SPECIAL_H_
<|start_filename|>tonic-suite/asr/src/probe/exp-test.cc<|end_filename|>
// configuration/exp-test.cc
// Copyright 2014 Yandex LLC (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <iostream>
#include <cmath>
#include "base/timer.h"
#define SAMPLE 100000
int main() {
float dummy = 0.0;
kaldi::Timer exp_timer;
for (int i = 0; i < SAMPLE; ++i) {
dummy += exp((double)(i % 10));
}
double exp_time = exp_timer.Elapsed();
kaldi::Timer expf_timer;
for (int i = 0; i < SAMPLE; ++i) {
dummy += expf((double)(i % 10));
}
double expf_time = expf_timer.Elapsed();
// Often exp() and expf() perform very similarly,
// so we will replace expf() by exp() only if there is at least 10% difference
if (expf_time < exp_time * 1.1) {
return 0;
} else {
std::cerr << "exp() time: " << exp_time << std::endl;
std::cerr << "expf() time: " << expf_time << std::endl;
return 1;
}
std::cerr << dummy << std::endl; // No complaint about the unused variable
}
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-align-compiled-plusphones.cc<|end_filename|>
// gmmbin/gmm-align-compiled-plusphones.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "hmm/hmm-utils.h"
#include "fstext/fstext-lib.h"
#include "decoder/faster-decoder.h"
#include "decoder/training-graph-compiler.h"
#include "gmm/decodable-am-diag-gmm.h"
#include "decoder/decodable-sum.h"
#include "decoder/decodable-mapped.h"
#include "lat/kaldi-lattice.h" // for {Compact}LatticeArc
namespace kaldi {
// This creates a model indexed by (phone-index - 1).
// Note: the object DecodableAmDiagGmmUnmapped subtracts
// one from the index it's given, this is where the -1
// will happen in test time.
void CreatePhoneModel(const TransitionModel &trans_model,
const AmDiagGmm &am_gmm,
const Vector<BaseFloat> &transition_accs,
int32 max_num_gauss, // max #gauss for each phone.
AmDiagGmm *phone_am) {
KALDI_LOG << "Creating phone-level model by clustering GMMs merged from "
"context-dependent states";
BaseFloat min_weight =
1.0e-05; // We assign this weight to transition-ids with no observations;
// this ensures that we get a model for unseen phones.
// The vector phone_weights is a list, indexed by phone-id,
// of pairs of (index into am_gmm, weight). We'll use this to
// construct the GMMs for each phone.
std::vector<std::map<int32, BaseFloat> > phone_weights;
KALDI_ASSERT(transition_accs.Dim() == trans_model.NumTransitionIds() + 1);
// +1 because transition_accs[0] is empty; transition-ids are one-based.
for (int32 tid = 1; tid < trans_model.NumTransitionIds(); tid++) {
int32 phone = trans_model.TransitionIdToPhone(tid),
pdf_id = trans_model.TransitionIdToPdf(tid);
if (phone_weights.size() <= phone) phone_weights.resize(phone + 1);
if (phone_weights[phone].count(pdf_id) == 0)
phone_weights[phone][pdf_id] = 0.0;
BaseFloat max_weight = std::max(min_weight, transition_accs(tid));
phone_weights[phone][pdf_id] += max_weight;
}
int32 num_phones =
trans_model.GetTopo().GetPhones().back(); // #phones, assuming
// they start from 1.
int32 dim = am_gmm.Dim();
DiagGmm gmm(1, dim);
{ // give it valid values.. note: should never be accessed, but nice to
// avoid NaNs...
Matrix<BaseFloat> inv_covars(1, dim);
inv_covars.Set(1.0);
gmm.SetInvVars(inv_covars);
Vector<BaseFloat> weights(1);
weights(0) = 1.0;
gmm.SetWeights(weights);
}
phone_am->Init(gmm, num_phones);
for (int32 phone = 1; phone < static_cast<int32>(phone_weights.size());
phone++) {
if (phone_weights[phone].empty())
continue; // No GMM for this phone. Presumably
// not a valid phone.
std::vector<std::pair<BaseFloat, const DiagGmm *> > gmm_vec;
BaseFloat tot_weight = 0.0;
for (std::map<int32, BaseFloat>::const_iterator iter =
phone_weights[phone].begin();
iter != phone_weights[phone].end(); ++iter) {
int32 pdf_id = iter->first;
BaseFloat weight = iter->second;
std::pair<BaseFloat, const DiagGmm *> pr(weight,
&(am_gmm.GetPdf(pdf_id)));
gmm_vec.push_back(pr);
tot_weight += weight;
}
for (size_t i = 0; i < gmm_vec.size(); i++)
gmm_vec[i].first *= (1.0 / tot_weight);
DiagGmm gmm(gmm_vec); // Initializer creates merged GMM.
if (gmm.NumGauss() > max_num_gauss) {
ClusterKMeansOptions cfg;
cfg.verbose = false;
gmm.MergeKmeans(max_num_gauss, cfg);
}
phone_am->GetPdf(phone - 1)
.CopyFromDiagGmm(gmm); // Set this phone's GMM to the specified value.
}
KALDI_LOG << "Done.";
}
void CreatePhoneMap(const TransitionModel &trans_model,
std::vector<int32> *phone_map) {
// Set up map from transition-id to phone.
phone_map->resize(trans_model.NumTransitionIds() + 1);
// transition-ids are one based: there's nothing in index zero.
(*phone_map)[0] = 0;
for (int32 i = 1; i <= trans_model.NumTransitionIds(); i++)
(*phone_map)[i] = trans_model.TransitionIdToPhone(i);
}
}
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
using fst::SymbolTable;
using fst::VectorFst;
using fst::StdArc;
const char *usage =
"Align features given [GMM-based] models, but adds in likelihoods of "
"simple per-phone GMMs\n"
"with alpha*per-phone-like + (1-alpha)*model-like. This gives more "
"consistent alignments.\n"
"Per-phone models are obtained by K-means on weighted model states, "
"using the transition-accs\n"
"to get weights. (e.g. use the first line of text format of normal "
"accs).\n"
"Note: this program actually isn't that useful. We keep it mainly as "
"an example\n"
"of how to write a decoder with interpolated likelihoods.\n"
"Usage: gmm-align-compiled-plusphones [options] transition-accs-in "
"model-in graphs-rspecifier feature-rspecifier alignments-wspecifier\n"
"e.g.: \n"
" gmm-align-compiled-plusphones --alpha=0.2 --acoustic-scale=0.1 \\\n"
" 1.acc 1.mdl ark:graphs.fsts scp:train.scp ark:1.ali\n"
"or:\n"
" compile-train-graphs tree 1.mdl lex.fst ark:train.tra b, ark:- | \\\n"
" gmm-align-compiled-plusphones 1.acc 1.mdl ark:- scp:train.scp t, "
"ark:1.ali\n";
ParseOptions po(usage);
bool binary = true;
BaseFloat alpha = 0.2;
BaseFloat beam = 200.0;
BaseFloat retry_beam = 0.0;
BaseFloat acoustic_scale = 1.0;
BaseFloat transition_scale = 1.0;
BaseFloat self_loop_scale = 1.0;
int32 max_gauss = 10;
po.Register("binary", &binary, "Write output in binary mode");
po.Register(
"alpha", &alpha,
"Weight on simple phone model (rest of weight goes to normal model)");
po.Register(
"max-gauss", &max_gauss,
"Maximum number of Gaussians in any of the simple phone models.");
po.Register("beam", &beam, "Decoding beam");
po.Register("retry-beam", &retry_beam,
"Decoding beam for second try at alignment");
po.Register("transition-scale", &transition_scale,
"Transition-probability scale [relative to acoustics]");
po.Register("acoustic-scale", &acoustic_scale,
"Scaling factor for acoustic likelihoods");
po.Register("self-loop-scale", &self_loop_scale,
"Scale of self-loop versus non-self-loop log probs [relative "
"to acoustics]");
po.Read(argc, argv);
if (po.NumArgs() < 5 || po.NumArgs() > 6) {
po.PrintUsage();
exit(1);
}
if (retry_beam != 0 && retry_beam <= beam)
KALDI_WARN << "Beams do not make sense: beam " << beam << ", retry-beam "
<< retry_beam;
FasterDecoderOptions decode_opts;
decode_opts.beam = beam; // Don't set the other options.
std::string trans_accs_in_filename = po.GetArg(1),
model_in_filename = po.GetArg(2), fst_rspecifier = po.GetArg(3),
feature_rspecifier = po.GetArg(4),
alignment_wspecifier = po.GetArg(5),
scores_wspecifier = po.GetOptArg(6);
TransitionModel trans_model;
AmDiagGmm am_gmm;
{
bool binary;
Input ki(model_in_filename, &binary);
trans_model.Read(ki.Stream(), binary);
am_gmm.Read(ki.Stream(), binary);
}
Vector<BaseFloat> trans_accs; // Transition accs.
{
bool binary;
Input ki(trans_accs_in_filename, &binary);
trans_accs.Read(ki.Stream(), binary);
KALDI_ASSERT(trans_accs.Dim() == trans_model.NumTransitionIds() + 1)
}
AmDiagGmm phone_am;
CreatePhoneModel(trans_model, am_gmm, trans_accs, max_gauss, &phone_am);
std::vector<int32> tid_to_phone_map;
CreatePhoneMap(trans_model, &tid_to_phone_map);
SequentialTableReader<fst::VectorFstHolder> fst_reader(fst_rspecifier);
RandomAccessBaseFloatMatrixReader feature_reader(feature_rspecifier);
Int32VectorWriter alignment_writer(alignment_wspecifier);
BaseFloatWriter scores_writer(scores_wspecifier);
int num_success = 0, num_no_feat = 0, num_other_error = 0;
BaseFloat tot_like = 0.0;
kaldi::int64 frame_count = 0;
for (; !fst_reader.Done(); fst_reader.Next()) {
std::string key = fst_reader.Key();
if (!feature_reader.HasKey(key)) {
num_no_feat++;
KALDI_WARN << "No features for utterance " << key;
} else {
const Matrix<BaseFloat> &features = feature_reader.Value(key);
VectorFst<StdArc> decode_fst(fst_reader.Value());
fst_reader.FreeCurrent(); // this stops copy-on-write of the fst
// by deleting the fst inside the reader, since we're about to mutate
// the fst by adding transition probs.
if (features.NumRows() == 0) {
KALDI_WARN << "Zero-length utterance: " << key;
num_other_error++;
continue;
}
if (decode_fst.Start() == fst::kNoStateId) {
KALDI_WARN << "Empty decoding graph for " << key;
num_other_error++;
continue;
}
{ // Add transition-probs to the FST.
std::vector<int32> disambig_syms; // empty.
AddTransitionProbs(trans_model, disambig_syms, transition_scale,
self_loop_scale, &decode_fst);
}
// SimpleDecoder decoder(decode_fst, beam);
FasterDecoder decoder(decode_fst, decode_opts);
// makes it a bit faster: 37 sec -> 26 sec on 1000 RM utterances @ beam
// 200.
DecodableAmDiagGmm gmm_decodable(am_gmm, trans_model, features);
BaseFloat log_sum_exp_prune = 0.0;
DecodableAmDiagGmmUnmapped phone_decodable(phone_am, features,
log_sum_exp_prune);
DecodableMapped phone_decodable_mapped(tid_to_phone_map,
&phone_decodable);
// indexed by transition-ids.
DecodableSum sum_decodable(
&gmm_decodable, acoustic_scale * (1.0 - alpha),
&phone_decodable_mapped, acoustic_scale * alpha);
decoder.Decode(&sum_decodable);
VectorFst<LatticeArc> decoded; // linear FST.
bool ans = decoder.ReachedFinal() // consider only final states.
&& decoder.GetBestPath(&decoded);
if (!ans && retry_beam != 0.0) {
KALDI_WARN << "Retrying utterance " << key << " with beam "
<< retry_beam;
decode_opts.beam = retry_beam;
decoder.SetOptions(decode_opts);
decoder.Decode(&sum_decodable);
ans = decoder.ReachedFinal() // consider only final states.
&& decoder.GetBestPath(&decoded);
decode_opts.beam = beam;
decoder.SetOptions(decode_opts);
}
if (ans) {
std::vector<int32> alignment;
std::vector<int32> words;
LatticeWeight weight;
frame_count += features.NumRows();
GetLinearSymbolSequence(decoded, &alignment, &words, &weight);
BaseFloat like =
-(weight.Value1() + weight.Value2()) / acoustic_scale;
tot_like += like;
if (scores_writer.IsOpen())
scores_writer.Write(key, -(weight.Value1() + weight.Value2()));
alignment_writer.Write(key, alignment);
num_success++;
if (num_success % 50 == 0) {
KALDI_LOG << "Processed " << num_success << " utterances, "
<< "log-like per frame for " << key << " is "
<< (like / features.NumRows()) << " over "
<< features.NumRows() << " frames.";
}
} else {
KALDI_WARN << "Did not successfully decode file " << key
<< ", len = " << (features.NumRows());
num_other_error++;
}
}
}
KALDI_LOG << "Overall log-likelihood per frame is "
<< (tot_like / frame_count) << " over " << frame_count
<< " frames.";
KALDI_LOG << "Done " << num_success << ", could not find features for "
<< num_no_feat << ", other errors on " << num_other_error;
if (num_success != 0)
return 0;
else
return 1;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/featbin/compute-cmvn-stats-two-channel.cc<|end_filename|>
// featbin/compute-cmvn-stats-two-channel.cc
// Copyright 2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/kaldi-matrix.h"
#include "transform/cmvn.h"
namespace kaldi {
/*
This function gets the utterances that are the first field of the
contents of the file reco2file_and_channel_rxfilename, and sorts
them into pairs corresponding to A/B sides, or singletons in case
we get one without the other.
*/
void GetUtterancePairs(const std::string &reco2file_and_channel_rxfilename,
std::vector<std::vector<std::string> > *utt_pairs) {
Input ki(reco2file_and_channel_rxfilename);
std::string line;
std::map<std::string, std::vector<std::string> > call_to_uttlist;
while (std::getline(ki.Stream(), line)) {
std::vector<std::string> split_line;
SplitStringToVector(line, " \t\r", true, &split_line);
if (split_line.size() != 3) {
KALDI_ERR << "Expecting 3 fields per line of reco2file_and_channel file "
<< PrintableRxfilename(reco2file_and_channel_rxfilename)
<< ", got: " << line;
}
// lines like: sw02001-A sw02001 A
std::string utt = split_line[0], call = split_line[1];
call_to_uttlist[call].push_back(utt);
}
for (std::map<std::string, std::vector<std::string> >::const_iterator iter =
call_to_uttlist.begin();
iter != call_to_uttlist.end(); ++iter) {
const std::vector<std::string> &uttlist = iter->second;
if (uttlist.size() == 2) {
utt_pairs->push_back(uttlist);
} else {
KALDI_WARN << "Call " << iter->first << " has " << uttlist.size()
<< " utterances, expected two; treating them singly.";
for (size_t i = 0; i < uttlist.size(); i++) {
std::vector<std::string> singleton_list;
singleton_list.push_back(uttlist[i]);
utt_pairs->push_back(singleton_list);
}
}
}
}
void AccCmvnStatsForPair(const std::string &utt1, const std::string &utt2,
const MatrixBase<BaseFloat> &feats1,
const MatrixBase<BaseFloat> &feats2,
BaseFloat quieter_channel_weight,
MatrixBase<double> *cmvn_stats1,
MatrixBase<double> *cmvn_stats2) {
KALDI_ASSERT(feats1.NumCols() == feats2.NumCols()); // same dim.
if (feats1.NumRows() != feats2.NumRows()) {
KALDI_WARN << "Number of frames differ between " << utt1 << " and " << utt2
<< ": " << feats1.NumRows() << " vs. " << feats2.NumRows()
<< ", treating them separately.";
AccCmvnStats(feats1, NULL, cmvn_stats1);
AccCmvnStats(feats2, NULL, cmvn_stats2);
return;
}
for (int32 i = 0; i < feats1.NumRows(); i++) {
if (feats1(i, 0) > feats2(i, 0)) {
AccCmvnStats(feats1.Row(i), 1.0, cmvn_stats1);
AccCmvnStats(feats2.Row(i), quieter_channel_weight, cmvn_stats2);
} else {
AccCmvnStats(feats2.Row(i), 1.0, cmvn_stats2);
AccCmvnStats(feats1.Row(i), quieter_channel_weight, cmvn_stats1);
}
}
}
}
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using kaldi::int32;
const char *usage =
"Compute cepstral mean and variance normalization statistics\n"
"Specialized for two-sided telephone data where we only accumulate\n"
"the louder of the two channels at each frame (and add it to that\n"
"side's stats). Reads a 'reco2file_and_channel' file, normally like\n"
"sw02001-A sw02001 A\n"
"sw02001-B sw02001 B\n"
"sw02005-A sw02005 A\n"
"sw02005-B sw02005 B\n"
"interpreted as <utterance-id> <call-id> <side> and for each "
"<call-id>\n"
"that has two sides, does the 'only-the-louder' computation, else "
"doesn\n"
"per-utterance stats in the normal way.\n"
"Note: loudness is judged by the first feature component, either "
"energy or c0;\n"
"only applicable to MFCCs or PLPs (this code could be modified to "
"handle filterbanks).\n"
"\n"
"Usage: compute-cmvn-stats-two-channel [options] "
"<reco2file-and-channel> <feats-rspecifier> <stats-wspecifier>\n"
"e.g.: compute-cmvn-stats-two-channel "
"data/train_unseg/reco2file_and_channel scp:data/train_unseg/feats.scp "
"ark,t:-\n";
ParseOptions po(usage);
BaseFloat quieter_channel_weight = 0.01;
po.Register("quieter-channel-weight", &quieter_channel_weight,
"For the quieter channel, apply this weight to the stats, so "
"that we still get stats if one channel always dominates.");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
int32 num_done = 0, num_err = 0;
std::string reco2file_and_channel_rxfilename = po.GetArg(1),
feats_rspecifier = po.GetArg(2),
stats_wspecifier = po.GetArg(3);
std::vector<std::vector<std::string> > utt_pairs;
GetUtterancePairs(reco2file_and_channel_rxfilename, &utt_pairs);
RandomAccessBaseFloatMatrixReader feat_reader(feats_rspecifier);
DoubleMatrixWriter writer(stats_wspecifier);
for (size_t i = 0; i < utt_pairs.size(); i++) {
std::vector<std::string> this_pair(utt_pairs[i]);
KALDI_ASSERT(this_pair.size() == 2 || this_pair.size() == 1);
if (this_pair.size() == 2) {
std::string utt1 = this_pair[0], utt2 = this_pair[1];
if (!feat_reader.HasKey(utt1)) {
KALDI_WARN << "No feature data for utterance " << utt1;
num_err++;
this_pair[0] = utt2;
this_pair.pop_back();
// and fall through to the singleton code below.
} else if (!feat_reader.HasKey(utt2)) {
KALDI_WARN << "No feature data for utterance " << utt2;
num_err++;
this_pair.pop_back();
// and fall through to the singleton code below.
} else {
Matrix<BaseFloat> feats1 = feat_reader.Value(utt1),
feats2 = feat_reader.Value(utt2);
int32 dim = feats1.NumCols();
Matrix<double> cmvn_stats1(2, dim + 1), cmvn_stats2(2, dim + 1);
AccCmvnStatsForPair(utt1, utt2, feats1, feats2,
quieter_channel_weight, &cmvn_stats1,
&cmvn_stats2);
writer.Write(utt1, cmvn_stats1);
writer.Write(utt2, cmvn_stats2);
num_done += 2;
continue; // continue so we don't go to the singleton-processing code
// below.
}
}
// process singletons.
std::string utt = this_pair[0];
if (!feat_reader.HasKey(utt)) {
KALDI_WARN << "No feature data for utterance " << utt;
num_err++;
continue;
}
const Matrix<BaseFloat> &feats = feat_reader.Value(utt);
Matrix<double> cmvn_stats(2, feats.NumCols() + 1);
AccCmvnStats(feats, NULL, &cmvn_stats);
writer.Write(utt, cmvn_stats);
num_done++;
}
KALDI_LOG << "Done accumulating CMVN stats for " << num_done
<< " utterances; " << num_err << " had errors.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-init-model.cc<|end_filename|>
// gmmbin/gmm-init-model.cc
// Copyright 2009-2012 Microsoft Corporation Johns Hopkins University (Author:
// <NAME>)
// Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "gmm/mle-am-diag-gmm.h"
#include "tree/build-tree-utils.h"
#include "tree/context-dep.h"
#include "tree/clusterable-classes.h"
#include "util/text-utils.h"
namespace kaldi {
/// InitAmGmm initializes the GMM with one Gaussian per state.
void InitAmGmm(const BuildTreeStatsType &stats, const EventMap &to_pdf_map,
AmDiagGmm *am_gmm, const TransitionModel &trans_model,
BaseFloat var_floor) {
// Get stats split by tree-leaf ( == pdf):
std::vector<BuildTreeStatsType> split_stats;
SplitStatsByMap(stats, to_pdf_map, &split_stats);
split_stats.resize(to_pdf_map.MaxResult() + 1); // ensure that
// if the last leaf had no stats, this vector still has the right size.
// Make sure each leaf has stats.
for (size_t i = 0; i < split_stats.size(); i++) {
if (split_stats[i].empty()) {
std::vector<int32> bad_pdfs(1, i), bad_phones;
GetPhonesForPdfs(trans_model, bad_pdfs, &bad_phones);
std::ostringstream ss;
for (int32 idx = 0; idx < bad_phones.size(); idx++)
ss << bad_phones[idx] << ' ';
KALDI_WARN << "Tree has pdf-id " << i
<< " with no stats; corresponding phone list: " << ss.str();
/*
This probably means you have phones that were unseen in training
and were not shared with other phones in the roots file.
You should modify your roots file as necessary to fix this.
(i.e. share that phone with a similar but seen phone on one line
of the roots file). Be sure to regenerate roots.int from roots.txt,
if using s5 scripts. To work out the phone, search for
pdf-id i in the output of show-transitions (for this model). */
}
}
std::vector<Clusterable *> summed_stats;
SumStatsVec(split_stats, &summed_stats);
Clusterable *avg_stats = SumClusterable(summed_stats);
KALDI_ASSERT(avg_stats != NULL && "No stats available in gmm-init-model.");
for (size_t i = 0; i < summed_stats.size(); i++) {
GaussClusterable *c = static_cast<GaussClusterable *>(
summed_stats[i] != NULL ? summed_stats[i] : avg_stats);
DiagGmm gmm(*c, var_floor);
am_gmm->AddPdf(gmm);
BaseFloat count = c->count();
if (count < 100) {
std::vector<int32> bad_pdfs(1, i), bad_phones;
GetPhonesForPdfs(trans_model, bad_pdfs, &bad_phones);
std::ostringstream ss;
for (int32 idx = 0; idx < bad_phones.size(); idx++)
ss << bad_phones[idx] << ' ';
KALDI_WARN << "Very small count for state " << i << ": " << count
<< "; corresponding phone list: " << ss.str();
}
}
DeletePointers(&summed_stats);
delete avg_stats;
}
/// Get state occupation counts.
void GetOccs(const BuildTreeStatsType &stats, const EventMap &to_pdf_map,
Vector<BaseFloat> *occs) {
// Get stats split by tree-leaf ( == pdf):
std::vector<BuildTreeStatsType> split_stats;
SplitStatsByMap(stats, to_pdf_map, &split_stats);
if (split_stats.size() != to_pdf_map.MaxResult() + 1) {
KALDI_ASSERT(split_stats.size() < to_pdf_map.MaxResult() + 1);
split_stats.resize(to_pdf_map.MaxResult() + 1);
}
occs->Resize(split_stats.size());
for (int32 pdf = 0; pdf < occs->Dim(); pdf++)
(*occs)(pdf) = SumNormalizer(split_stats[pdf]);
}
/// InitAmGmmFromOld initializes the GMM based on a previously trained
/// model and tree, which must require no more phonetic context than
/// the current tree. It does this by finding the closest PDF in the
/// old model, to any given PDF in the new model. Here, "closest" is
/// defined as: has the largest count in common from the tree stats.
void InitAmGmmFromOld(const BuildTreeStatsType &stats,
const EventMap &to_pdf_map,
int32 N, // context-width
int32 P, // central-position
const std::string &old_tree_rxfilename,
const std::string &old_model_rxfilename,
BaseFloat var_floor, AmDiagGmm *am_gmm) {
AmDiagGmm old_am_gmm;
ContextDependency old_tree;
{ // Read old_gm_gmm
bool binary_in;
TransitionModel old_trans_model;
Input ki(old_model_rxfilename, &binary_in);
old_trans_model.Read(ki.Stream(), binary_in);
old_am_gmm.Read(ki.Stream(), binary_in);
}
{ // Read tree.
bool binary_in;
Input ki(old_tree_rxfilename, &binary_in);
old_tree.Read(ki.Stream(), binary_in);
}
// Get stats split by (new) tree-leaf ( == pdf):
std::vector<BuildTreeStatsType> split_stats;
SplitStatsByMap(stats, to_pdf_map, &split_stats);
// Make sure each leaf has stats.
for (size_t i = 0; i < split_stats.size(); i++) {
if (split_stats[i].empty()) {
KALDI_WARN << "Leaf " << i << " of new tree has no stats.";
}
}
if (static_cast<int32>(split_stats.size()) != to_pdf_map.MaxResult() + 1) {
KALDI_ASSERT(static_cast<int32>(split_stats.size()) <
to_pdf_map.MaxResult() + 1);
KALDI_WARN << "Tree may have final leaf with no stats.";
split_stats.resize(to_pdf_map.MaxResult() + 1);
// avoid indexing errors later.
}
int32 oldN = old_tree.ContextWidth(), oldP = old_tree.CentralPosition();
// avg_stats will be used for leaves that have no stats.
Clusterable *avg_stats = SumStats(stats);
GaussClusterable *avg_stats_gc = dynamic_cast<GaussClusterable *>(avg_stats);
KALDI_ASSERT(avg_stats_gc != NULL && "Empty stats input.");
DiagGmm avg_gmm(*avg_stats_gc, var_floor);
delete avg_stats;
avg_stats = NULL;
avg_stats_gc = NULL;
const EventMap &old_map = old_tree.ToPdfMap();
KALDI_ASSERT(am_gmm->NumPdfs() == 0);
int32 num_pdfs = static_cast<int32>(split_stats.size());
for (int32 pdf = 0; pdf < num_pdfs; pdf++) {
BuildTreeStatsType &my_stats = split_stats[pdf];
// The next statement converts the stats to a possibly narrower older
// context-width (e.g. triphone -> monophone).
// note: don't get confused by the "old" and "new" in the parameters
// to ConvertStats. The next line is correct.
bool ret = ConvertStats(N, P, oldN, oldP, &my_stats);
if (!ret)
KALDI_ERR << "InitAmGmmFromOld: old system has wider context "
"so cannot convert stats.";
// oldpdf_to_count works out a map from old pdf-id to count (for stats
// that align to this "new" pdf... we'll use it to work out the old pdf-id
// that's "closest" in stats overlap to this new pdf ("pdf").
std::map<int32, BaseFloat> oldpdf_to_count;
for (size_t i = 0; i < my_stats.size(); i++) {
EventType evec = my_stats[i].first;
EventAnswerType ans;
bool ret = old_map.Map(evec, &ans);
if (!ret) {
KALDI_ERR << "Could not map context using old tree.";
}
KALDI_ASSERT(my_stats[i].second != NULL);
BaseFloat stats_count = my_stats[i].second->Normalizer();
if (oldpdf_to_count.count(ans) == 0)
oldpdf_to_count[ans] = stats_count;
else
oldpdf_to_count[ans] += stats_count;
}
BaseFloat max_count = 0;
int32 max_old_pdf = -1;
for (std::map<int32, BaseFloat>::const_iterator iter =
oldpdf_to_count.begin();
iter != oldpdf_to_count.end(); ++iter) {
if (iter->second > max_count) {
max_count = iter->second;
max_old_pdf = iter->first;
}
}
if (max_count == 0) { // no overlap - probably a leaf with no stats at all.
KALDI_WARN << "Leaf " << pdf << " of new tree being initialized with "
<< "globally averaged stats.";
am_gmm->AddPdf(avg_gmm);
} else {
am_gmm->AddPdf(old_am_gmm.GetPdf(
max_old_pdf)); // Here is where we copy the relevant old PDF.
}
}
}
}
int main(int argc, char *argv[]) {
using namespace kaldi;
try {
using namespace kaldi;
typedef kaldi::int32 int32;
const char *usage =
"Initialize GMM from decision tree and tree stats\n"
"Usage: gmm-init-model [options] <tree-in> <tree-stats-in> "
"<topo-file> <model-out> [<old-tree> <old-model>]\n"
"e.g.: \n"
" gmm-init-model tree treeacc topo 1.mdl\n"
"or (initializing GMMs with old model):\n"
" gmm-init-model tree treeacc topo 1.mdl prev/tree prev/30.mdl\n";
bool binary = true;
double var_floor = 0.01;
std::string occs_out_filename;
ParseOptions po(usage);
po.Register("binary", &binary, "Write output in binary mode");
po.Register("write-occs", &occs_out_filename,
"File to write state "
"occupancies to.");
po.Register("var-floor", &var_floor,
"Variance floor used while "
"initializing Gaussians");
po.Read(argc, argv);
if (po.NumArgs() != 4 && po.NumArgs() != 6) {
po.PrintUsage();
exit(1);
}
std::string tree_filename = po.GetArg(1), stats_filename = po.GetArg(2),
topo_filename = po.GetArg(3), model_out_filename = po.GetArg(4),
old_tree_filename = po.GetOptArg(5),
old_model_filename = po.GetOptArg(6);
ContextDependency ctx_dep;
ReadKaldiObject(tree_filename, &ctx_dep);
BuildTreeStatsType stats;
{
bool binary_in;
GaussClusterable gc; // dummy needed to provide type.
Input ki(stats_filename, &binary_in);
ReadBuildTreeStats(ki.Stream(), binary_in, gc, &stats);
}
KALDI_LOG << "Number of separate statistics is " << stats.size();
HmmTopology topo;
ReadKaldiObject(topo_filename, &topo);
const EventMap &to_pdf = ctx_dep.ToPdfMap(); // not owned here.
TransitionModel trans_model(ctx_dep, topo);
// Now, the summed_stats will be used to initialize the GMM.
AmDiagGmm am_gmm;
if (old_tree_filename.empty())
InitAmGmm(
stats, to_pdf, &am_gmm, trans_model,
var_floor); // Normal case: initialize 1 Gauss/model from tree stats.
else {
InitAmGmmFromOld(stats, to_pdf, ctx_dep.ContextWidth(),
ctx_dep.CentralPosition(), old_tree_filename,
old_model_filename, var_floor, &am_gmm);
}
if (!occs_out_filename.empty()) { // write state occs
Vector<BaseFloat> occs;
GetOccs(stats, to_pdf, &occs);
Output ko(occs_out_filename, binary);
occs.Write(ko.Stream(), binary);
}
{
Output ko(model_out_filename, binary);
trans_model.Write(ko.Stream(), binary);
am_gmm.Write(ko.Stream(), binary);
}
KALDI_LOG << "Wrote model.";
DeleteBuildTreeStats(&stats);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/lat/minimize-lattice.cc<|end_filename|>
// lat/minimize-lattice.cc
// Copyright 2009-2011 Saarland University (Author: <NAME>)
// 2012-2013 Johns Hopkins University (Author: <NAME>); Chao
// Weng;
// <NAME>
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "lat/minimize-lattice.h"
#include "hmm/transition-model.h"
#include "util/stl-utils.h"
namespace fst {
/*
Process the states in reverse topological order.
For each state, compute a hash-value that will be the same for states
that can be combined. Then for each pair of states with the
same hash value, check that the "to-states" map to the
same equivalence class and that the weights are sufficiently similar.
*/
template <class Weight, class IntType>
class CompactLatticeMinimizer {
public:
typedef CompactLatticeWeightTpl<Weight, IntType> CompactWeight;
typedef ArcTpl<CompactWeight> CompactArc;
typedef typename CompactArc::StateId StateId;
typedef typename CompactArc::Label Label;
typedef size_t HashType;
CompactLatticeMinimizer(MutableFst<CompactArc> *clat,
float delta = fst::kDelta)
: clat_(clat), delta_(delta) {}
bool Minimize() {
if (clat_->Properties(kTopSorted, true) == 0) {
if (!TopSort(clat_)) {
KALDI_WARN << "Topological sorting of state-level lattice failed "
"(probably your lexicon has empty words or your LM has "
"epsilon cycles; this "
" is a bad idea.)";
return false;
}
}
ComputeStateHashValues();
ComputeStateMap();
ModifyModel();
return true;
}
static HashType ConvertStringToHashValue(const std::vector<IntType> &vec) {
const HashType prime = 53281;
kaldi::VectorHasher<IntType> h;
HashType ans = static_cast<HashType>(h(vec));
if (ans == 0) ans = prime;
// We don't allow a zero answer, as this can cause too many values to be the
// same.
return ans;
}
static void InitHashValue(const CompactWeight &final_weight, HashType *h) {
const HashType prime1 = 33317, prime2 = 607; // it's pretty random.
if (final_weight == CompactWeight::Zero())
*h = prime1;
else
*h = prime2 *ConvertStringToHashValue(final_weight.String());
}
// It's important that this function and UpdateHashValueForFinalProb be
// insensitive to the order in which it's called, as the order of the arcs
// won't necessarily be the same for different equivalent states.
static void UpdateHashValueForTransition(const CompactWeight &weight,
Label label,
HashType &next_state_hash,
HashType *h) {
const HashType prime1 = 1447, prime2 = 51907;
if (label == 0) label = prime2; // Zeros will cause problems.
*h += prime1 *label *(
1 + ConvertStringToHashValue(weight.String()) * next_state_hash);
// Above, the "1 +" is to ensure that if somehow we get zeros due to
// weird word sequences, they don't propagate.
}
void ComputeStateHashValues() {
// Note: clat_ is topologically sorted, and StateId is
// signed. Each state's hash value is only a function of toplogically-later
// states' hash values.
state_hashes_.resize(clat_->NumStates());
for (StateId s = clat_->NumStates() - 1; s >= 0; s--) {
HashType this_hash;
InitHashValue(clat_->Final(s), &this_hash);
for (ArcIterator<MutableFst<CompactArc> > aiter(*clat_, s); !aiter.Done();
aiter.Next()) {
const CompactArc &arc = aiter.Value();
HashType next_hash;
if (arc.nextstate > s) {
next_hash = state_hashes_[arc.nextstate];
} else {
KALDI_ASSERT(s == arc.nextstate &&
"Lattice not topologically sorted [code error]");
next_hash = 1;
KALDI_WARN << "Minimizing lattice with self-loops "
"(lattices should not have self-loops)";
}
UpdateHashValueForTransition(arc.weight, arc.ilabel, next_hash,
&this_hash);
}
state_hashes_[s] = this_hash;
}
}
struct EquivalenceSorter {
// This struct has an operator () which you can interpret as a less-than (<)
// operator for arcs. We sort on ilabel; since the lattice is supposed to
// be deterministic, this should completely determine the ordering (there
// should not be more than one arc with the same ilabel, out of the same
// state). For identical ilabels we next sort on the nextstate, simply to
// better handle non-deterministic input (we do our best on this, without
// guaranteeing full minimization). We could sort on the strings next, but
// this would be an unnecessary hassle as we only really need good
// performance on deterministic input.
bool operator()(const CompactArc &a, const CompactArc &b) const {
if (a.ilabel < b.ilabel)
return true;
else if (a.ilabel > b.ilabel)
return false;
else if (a.nextstate < b.nextstate)
return true;
else
return false;
}
};
// This function works out whether s and t are equivalent, assuming
// we have already partitioned all topologically-later states into
// equivalence classes (i.e. set up state_map_).
bool Equivalent(StateId s, StateId t) const {
if (!ApproxEqual(clat_->Final(s), clat_->Final(t), delta_)) return false;
if (clat_->NumArcs(s) != clat_->NumArcs(t)) return false;
std::vector<CompactArc> s_arcs;
std::vector<CompactArc> t_arcs;
for (int32 iter = 0; iter <= 1; iter++) {
StateId state = (iter == 0 ? s : t);
std::vector<CompactArc> &arcs = (iter == 0 ? s_arcs : t_arcs);
arcs.reserve(clat_->NumArcs(s));
for (ArcIterator<MutableFst<CompactArc> > aiter(*clat_, state);
!aiter.Done(); aiter.Next()) {
CompactArc arc = aiter.Value();
if (arc.nextstate == state) {
// This is a special case for states that have self-loops. If two
// states have an identical self-loop arc, they may be equivalent.
arc.nextstate = kNoStateId;
} else {
KALDI_ASSERT(arc.nextstate > state);
// while (state_map_[arc.nextstate] != arc.nextstate)
arc.nextstate = state_map_[arc.nextstate];
arcs.push_back(arc);
}
}
EquivalenceSorter s;
std::sort(arcs.begin(), arcs.end(), s);
}
KALDI_ASSERT(s_arcs.size() == t_arcs.size());
for (size_t i = 0; i < s_arcs.size(); i++) {
if (s_arcs[i].nextstate != t_arcs[i].nextstate) return false;
KALDI_ASSERT(s_arcs[i].ilabel ==
s_arcs[i].olabel); // CompactLattices are
// supposed to be
// acceptors.
if (s_arcs[i].ilabel != t_arcs[i].ilabel) return false;
// We've already mapped to equivalence classes.
if (s_arcs[i].nextstate != t_arcs[i].nextstate) return false;
if (!ApproxEqual(s_arcs[i].weight, t_arcs[i].weight)) return false;
}
return true;
}
void ComputeStateMap() {
// We have to compute the state mapping in reverse topological order also,
// since the equivalence test relies on later states being already sorted
// out into equivalence classes (by state_map_).
StateId num_states = clat_->NumStates();
unordered_map<HashType, std::vector<StateId> > hash_groups_;
for (StateId s = 0; s < num_states; s++)
hash_groups_[state_hashes_[s]].push_back(s);
state_map_.resize(num_states);
for (StateId s = 0; s < num_states; s++)
state_map_[s] = s; // Default mapping.
{ // This block is just diagnostic.
typedef typename unordered_map<
HashType, std::vector<StateId> >::const_iterator HashIter;
size_t max_size = 0;
for (HashIter iter = hash_groups_.begin(); iter != hash_groups_.end();
++iter)
max_size = std::max(max_size, iter->second.size());
if (max_size > 1000) {
KALDI_WARN << "Largest equivalence group (using hash) is " << max_size
<< ", minimization might be slow.";
}
}
for (StateId s = num_states - 1; s >= 0; s--) {
HashType hash = state_hashes_[s];
const std::vector<StateId> &equivalence_class = hash_groups_[hash];
KALDI_ASSERT(!equivalence_class.empty());
for (size_t i = 0; i < equivalence_class.size(); i++) {
StateId t = equivalence_class[i];
// Below, there is no point doing the test if state_map_[t] != t,
// because
// in that case we will, before after this, be comparing with another
// state
// that is equivalent to t.
if (t > s && state_map_[t] == t && Equivalent(s, t)) {
state_map_[s] = t;
break;
}
}
}
}
void ModifyModel() {
// Modifies the model according to state_map_;
StateId num_removed = 0;
StateId num_states = clat_->NumStates();
for (StateId s = 0; s < num_states; s++)
if (state_map_[s] != s) num_removed++;
KALDI_VLOG(3) << "Removing " << num_removed << " of " << num_states
<< " states.";
if (num_removed == 0) return; // Nothing to do.
clat_->SetStart(state_map_[clat_->Start()]);
for (StateId s = 0; s < num_states; s++) {
if (state_map_[s] != s)
continue; // There is no point modifying states we're removing.
for (MutableArcIterator<MutableFst<CompactArc> > aiter(clat_, s);
!aiter.Done(); aiter.Next()) {
CompactArc arc = aiter.Value();
StateId mapped_nextstate = state_map_[arc.nextstate];
if (mapped_nextstate != arc.nextstate) {
arc.nextstate = mapped_nextstate;
aiter.SetValue(arc);
}
}
}
fst::Connect(clat_);
}
private:
MutableFst<ArcTpl<CompactLatticeWeightTpl<Weight, IntType> > > *clat_;
float delta_;
std::vector<HashType> state_hashes_;
std::vector<StateId> state_map_; // maps each state to itself or to some
// equivalent state. Within each equivalence
// class, we pick one arbitrarily.
};
template <class Weight, class IntType>
bool MinimizeCompactLattice(
MutableFst<ArcTpl<CompactLatticeWeightTpl<Weight, IntType> > > *clat,
float delta) {
CompactLatticeMinimizer<Weight, IntType> minimizer(clat, delta);
return minimizer.Minimize();
}
// Instantiate for CompactLattice type.
template bool MinimizeCompactLattice<kaldi::LatticeWeight, kaldi::int32>(
MutableFst<kaldi::CompactLatticeArc> *clat, float delta);
} // namespace fst
<|start_filename|>tonic-suite/asr/src/nnet2/decodable-am-nnet.h<|end_filename|>
// nnet2/decodable-am-nnet.h
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_NNET2_DECODABLE_AM_NNET_H_
#define KALDI_NNET2_DECODABLE_AM_NNET_H_
#include <vector>
#include "base/kaldi-common.h"
#include "gmm/am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "itf/decodable-itf.h"
#include "nnet2/am-nnet.h"
#include "nnet2/nnet-compute.h"
namespace kaldi {
namespace nnet2 {
/// DecodableAmNnet is a decodable object that decodes
/// with a neural net acoustic model of type AmNnet.
class DecodableAmNnet : public DecodableInterface {
public:
DecodableAmNnet(const TransitionModel &trans_model, const AmNnet &am_nnet,
const CuMatrixBase<BaseFloat> &feats,
bool pad_input = true, // if !pad_input, the NumIndices()
// will be < feats.NumRows().
BaseFloat prob_scale = 1.0)
: trans_model_(trans_model) {
// Note: we could make this more memory-efficient by doing the
// computation in smaller chunks than the whole utterance, and not
// storing the whole thing. We'll leave this for later.
int32 num_rows = feats.NumRows() -
(pad_input ? 0 : am_nnet.GetNnet().LeftContext() +
am_nnet.GetNnet().RightContext());
if (num_rows <= 0) {
KALDI_WARN << "Input with " << feats.NumRows() << " rows will produce "
<< "empty output.";
return;
}
CuMatrix<BaseFloat> log_probs(num_rows, trans_model.NumPdfs());
// the following function is declared in nnet-compute.h
NnetComputation(am_nnet.GetNnet(), feats, pad_input, &log_probs);
log_probs.ApplyFloor(1.0e-20); // Avoid log of zero which leads to NaN.
log_probs.ApplyLog();
CuVector<BaseFloat> priors(am_nnet.Priors());
KALDI_ASSERT(priors.Dim() == trans_model.NumPdfs() &&
"Priors in neural network not set up.");
priors.ApplyLog();
// subtract log-prior (divide by prior)
log_probs.AddVecToRows(-1.0, priors);
// apply probability scale.
log_probs.Scale(prob_scale);
// Transfer the log-probs to the CPU for faster access by the
// decoding process.
log_probs_.Swap(&log_probs);
}
// Note, frames are numbered from zero. But state_index is numbered
// from one (this routine is called by FSTs).
virtual BaseFloat LogLikelihood(int32 frame, int32 transition_id) {
return log_probs_(frame, trans_model_.TransitionIdToPdf(transition_id));
}
virtual int32 NumFramesReady() const { return log_probs_.NumRows(); }
// Indices are one-based! This is for compatibility with OpenFst.
virtual int32 NumIndices() const { return trans_model_.NumTransitionIds(); }
virtual bool IsLastFrame(int32 frame) const {
KALDI_ASSERT(frame < NumFramesReady());
return (frame == NumFramesReady() - 1);
}
protected:
const TransitionModel &trans_model_;
Matrix<BaseFloat>
log_probs_; // actually not really probabilities, since we divide
// by the prior -> they won't sum to one.
KALDI_DISALLOW_COPY_AND_ASSIGN(DecodableAmNnet);
};
/// This version of DecodableAmNnet is intended for a version of the decoder
/// that processes different utterances with multiple threads. It needs to do
/// the computation in a different place than the initializer, since the
/// initializer gets called in the main thread of the program.
class DecodableAmNnetParallel : public DecodableInterface {
public:
DecodableAmNnetParallel(const TransitionModel &trans_model,
const AmNnet &am_nnet,
const CuMatrix<BaseFloat> *feats,
bool pad_input = true, BaseFloat prob_scale = 1.0)
: trans_model_(trans_model),
am_nnet_(am_nnet),
feats_(feats),
pad_input_(pad_input),
prob_scale_(prob_scale) {
KALDI_ASSERT(feats_ != NULL);
}
void Compute() {
log_probs_.Resize(feats_->NumRows(), trans_model_.NumPdfs());
// the following function is declared in nnet-compute.h
NnetComputation(am_nnet_.GetNnet(), *feats_, pad_input_, &log_probs_);
log_probs_.ApplyFloor(1.0e-20); // Avoid log of zero which leads to NaN.
log_probs_.ApplyLog();
CuVector<BaseFloat> priors(am_nnet_.Priors());
KALDI_ASSERT(priors.Dim() == trans_model_.NumPdfs() &&
"Priors in neural network not set up.");
priors.ApplyLog();
// subtract log-prior (divide by prior)
log_probs_.AddVecToRows(-1.0, priors);
// apply probability scale.
log_probs_.Scale(prob_scale_);
delete feats_;
feats_ = NULL;
}
// Note, frames are numbered from zero. But state_index is numbered
// from one (this routine is called by FSTs).
virtual BaseFloat LogLikelihood(int32 frame, int32 transition_id) {
if (feats_) Compute(); // this function sets feats_ to NULL.
return log_probs_(frame, trans_model_.TransitionIdToPdf(transition_id));
}
int32 NumFrames() const {
if (feats_) {
if (pad_input_)
return feats_->NumRows();
else {
int32 ans = feats_->NumRows() - am_nnet_.GetNnet().LeftContext() -
am_nnet_.GetNnet().RightContext();
if (ans < 0) ans = 0;
return ans;
}
} else {
return log_probs_.NumRows();
}
}
// Indices are one-based! This is for compatibility with OpenFst.
virtual int32 NumIndices() const { return trans_model_.NumTransitionIds(); }
virtual bool IsLastFrame(int32 frame) const {
KALDI_ASSERT(frame < NumFrames());
return (frame == NumFrames() - 1);
}
~DecodableAmNnetParallel() {
if (feats_) delete feats_;
}
protected:
const TransitionModel &trans_model_;
const AmNnet &am_nnet_;
CuMatrix<BaseFloat>
log_probs_; // actually not really probabilities, since we divide
// by the prior -> they won't sum to one.
const CuMatrix<BaseFloat> *feats_;
bool pad_input_;
BaseFloat prob_scale_;
KALDI_DISALLOW_COPY_AND_ASSIGN(DecodableAmNnetParallel);
};
} // namespace nnet2
} // namespace kaldi
#endif // KALDI_NNET2_DECODABLE_AM_NNET_H_
<|start_filename|>tonic-suite/asr/src/util/simple-options.h<|end_filename|>
// util/simple-options.hh
// Copyright 2013 <NAME>, Tallinn University of Technology
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_UTIL_SIMPLE_OPTIONS_H_
#define KALDI_UTIL_SIMPLE_OPTIONS_H_
#include <map>
#include <string>
#include <vector>
#include "base/kaldi-common.h"
#include "itf/options-itf.h"
namespace kaldi {
/// The class SimpleOptions is an implementation of OptionsItf that allows
/// setting and getting option values programmatically, i.e., via getter
/// and setter methods. It doesn't provide any command line parsing
/// functionality.
/// The class ParseOptions should be used for command-line options.
class SimpleOptions : public OptionsItf {
public:
SimpleOptions() {}
virtual ~SimpleOptions() {}
// Methods from the interface
void Register(const std::string &name, bool *ptr, const std::string &doc);
void Register(const std::string &name, int32 *ptr, const std::string &doc);
void Register(const std::string &name, uint32 *ptr, const std::string &doc);
void Register(const std::string &name, float *ptr, const std::string &doc);
void Register(const std::string &name, double *ptr, const std::string &doc);
void Register(const std::string &name, std::string *ptr,
const std::string &doc);
// set option with the specified key, return true if successful
bool SetOption(const std::string &key, const bool &value);
bool SetOption(const std::string &key, const int32 &value);
bool SetOption(const std::string &key, const uint32 &value);
bool SetOption(const std::string &key, const float &value);
bool SetOption(const std::string &key, const double &value);
bool SetOption(const std::string &key, const std::string &value);
bool SetOption(const std::string &key, const char *value);
// get option with the specified key and put to 'value',
// return true if successful
bool GetOption(const std::string &key, bool *value);
bool GetOption(const std::string &key, int32 *value);
bool GetOption(const std::string &key, uint32 *value);
bool GetOption(const std::string &key, float *value);
bool GetOption(const std::string &key, double *value);
bool GetOption(const std::string &key, std::string *value);
enum OptionType { kBool, kInt32, kUint32, kFloat, kDouble, kString };
struct OptionInfo {
OptionInfo(const std::string &doc, OptionType type)
: doc(doc), type(type) {}
std::string doc;
OptionType type;
};
std::vector<std::pair<std::string, OptionInfo> > GetOptionInfoList();
/*
* Puts the type of the option with name 'key' in the argument 'type'.
* Return true if such option is found, false otherwise.
*/
bool GetOptionType(const std::string &key, OptionType *type);
private:
std::vector<std::pair<std::string, OptionInfo> > option_info_list_;
// maps for option variables
std::map<std::string, bool *> bool_map_;
std::map<std::string, int32 *> int_map_;
std::map<std::string, uint32 *> uint_map_;
std::map<std::string, float *> float_map_;
std::map<std::string, double *> double_map_;
std::map<std::string, std::string *> string_map_;
};
} // namespace kaldi
#endif // KALDI_UTIL_SIMPLE_OPTIONS_H_
<|start_filename|>tonic-suite/asr/src/cudamatrix/cu-randkernels-ansi.h<|end_filename|>
// cudamatrix/cu-randkernels-ansi.h
// Copyright 2012 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_CUDAMATRIX_CU_RANDKERNELS_ANSI_H_
#define KALDI_CUDAMATRIX_CU_RANDKERNELS_ANSI_H_
#include "cudamatrix/cu-matrixdim.h"
#include "cudamatrix/cu-kernels-ansi.h"
#if HAVE_CUDA == 1
extern "C" {
/*********************************************************
* float CUDA kernel calls
*/
void cudaF_rand(dim3 Gr, dim3 Bl, float *mat, uint32_cuda *z1, uint32_cuda *z2,
uint32_cuda *z3, uint32_cuda *z4, MatrixDim d);
void cudaF_gauss_rand(dim3 Gr, dim3 Bl, float *mat, uint32_cuda *z1,
uint32_cuda *z2, uint32_cuda *z3, uint32_cuda *z4,
MatrixDim d);
void cudaF_vec_gauss_rand(int Gr, int Bl, float *v, uint32_cuda *z1,
uint32_cuda *z2, uint32_cuda *z3, uint32_cuda *z4,
int dim);
void cudaF_binarize_probs(dim3 Gr, dim3 Bl, float *states, const float *probs,
float *rand, MatrixDim d);
/*********************************************************
* double CUDA kernel calls
*/
void cudaD_rand(dim3 Gr, dim3 Bl, double *mat, uint32_cuda *z1, uint32_cuda *z2,
uint32_cuda *z3, uint32_cuda *z4, MatrixDim d);
void cudaD_gauss_rand(dim3 Gr, dim3 Bl, double *mat, uint32_cuda *z1,
uint32_cuda *z2, uint32_cuda *z3, uint32_cuda *z4,
MatrixDim d);
void cudaD_vec_gauss_rand(int Gr, int Bl, double *v, uint32_cuda *z1,
uint32_cuda *z2, uint32_cuda *z3, uint32_cuda *z4,
int dim);
void cudaD_binarize_probs(dim3 Gr, dim3 Bl, double *states, const double *probs,
double *rand, MatrixDim d);
}
#endif // HAVE_CUDA
#endif
<|start_filename|>tonic-suite/asr/src/cudamatrix/cu-block-matrix-test.cc<|end_filename|>
// cudamatrix/cu-block-matrix-test.cc
// Copyright 2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <iostream>
#include <vector>
#include <cstdlib>
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "cudamatrix/cu-matrix-lib.h"
using namespace kaldi;
namespace kaldi {
template <typename Real>
static bool ApproxEqual(const CuBlockMatrix<Real> &A,
const CuBlockMatrix<Real> &B, float tol = 0.001) {
CuMatrix<Real> Acopy(A), Bcopy(B);
return Acopy.ApproxEqual(Bcopy, tol);
}
template <class Real>
static void UnitTestCuBlockMatrixIO() {
for (int32 i = 0; i < 10; i++) {
int32 num_blocks = Rand() % 5;
std::vector<CuMatrix<Real> > data(num_blocks);
for (int32 b = 0; b < num_blocks; b++) {
int32 dimM = 100 + Rand() % 255, dimN = 10 + Rand() % 20;
if (b % 2 == 0) std::swap(dimM, dimN);
data[b].Resize(dimM, dimN);
data[b].SetRandn();
}
CuBlockMatrix<Real> B(data);
std::ostringstream os;
bool binary = (i % 4 < 2);
B.Write(os, binary);
CuBlockMatrix<Real> B2;
std::istringstream is(os.str());
B2.Read(is, binary);
CuMatrix<Real> mat(B), mat2(B2);
AssertEqual(mat, mat2);
if (!data.empty()) KALDI_ASSERT(mat.Sum() != 0.0);
}
}
template <class Real>
static void UnitTestCuBlockMatrixAddMatBlock() {
for (int32 i = 0; i < 20; i++) {
int32 num_blocks = Rand() % 5;
std::vector<CuMatrix<Real> > data(num_blocks);
for (int32 b = 0; b < num_blocks; b++) {
int32 dimM = 100 + Rand() % 255, dimN = 10 + Rand() % 20;
// early failures will have small dim for easier eyeballing.
if (b % 2 == 0) std::swap(dimM, dimN);
data[b].Resize(dimM, dimN);
data[b].SetRandn();
}
CuBlockMatrix<Real> B(data);
int32 B_num_rows = B.NumRows(), B_num_cols = B.NumCols();
// will do X += A B
MatrixTransposeType transB = (i % 2 == 1 ? kTrans : kNoTrans),
transA = (i % 3 == 1 ? kTrans : kNoTrans);
if (transB == kTrans) std::swap(B_num_rows, B_num_cols);
int32 X_num_rows = 100 + Rand() % 255, X_num_cols = B_num_cols,
A_num_rows = X_num_rows, A_num_cols = B_num_rows;
if (data.size() == 0) {
X_num_rows = 0;
A_num_rows = 0;
}
if (transA == kTrans) std::swap(A_num_rows, A_num_cols);
Real alpha = 2.0, beta = -1.0;
CuMatrix<Real> X(X_num_rows, X_num_cols);
X.SetRandn();
CuMatrix<Real> A(A_num_rows, A_num_cols);
A.SetRandn();
CuMatrix<Real> Xcopy(X), Bcopy(B), Xorig(X), Aorig(A);
Xcopy.AddMatMat(alpha, A, transA, Bcopy, transB, beta);
X.AddMatBlock(alpha, A, transA, B, transB, beta);
AssertEqual(X, Xcopy);
}
}
template <class Real>
static void UnitTestCuBlockMatrixAddMatMat() {
for (int32 i = 0; i < 20; i++) {
int32 num_blocks = Rand() % 5;
std::vector<CuMatrix<Real> > data(num_blocks);
for (int32 b = 0; b < num_blocks; b++) {
int32 dimM = 100 + Rand() % 255, dimN = 10 + Rand() % 20;
if (i == 0) {
dimM = 1;
dimN = 1;
}
// early failures will have small dim for easier eyeballing.
if (b % 2 == 0) std::swap(dimM, dimN);
data[b].Resize(dimM, dimN);
data[b].SetRandn();
}
CuBlockMatrix<Real> B(data);
int32 B_num_rows = B.NumRows(), B_num_cols = B.NumCols();
// will do B += C D
int32 C_num_rows = B_num_rows, C_num_cols = 100 + Rand() % 255;
if (C_num_rows == 0) C_num_cols = 0;
int32 D_num_rows = C_num_cols, D_num_cols = B_num_cols;
MatrixTransposeType transC = (i % 2 == 1 ? kTrans : kNoTrans),
transD = (i % 3 == 1 ? kTrans : kNoTrans);
if (transC == kTrans) std::swap(C_num_rows, C_num_cols);
if (transD == kTrans) std::swap(D_num_rows, D_num_cols);
CuMatrix<Real> C(C_num_rows, C_num_cols), D(D_num_rows, D_num_cols);
C.SetRandn();
D.SetRandn();
CuMatrix<Real> Bmat(B);
Real alpha = 2.0, beta = -1.0;
CuBlockMatrix<Real> Bcopy(B);
B.AddMatMat(alpha, C, transC, D, transD, beta);
Bmat.AddMatMat(alpha, C, transC, D, transD, beta);
// Now check that the block-structured part of Bmat is the
// same as B.
Bcopy.CopyFromMat(Bmat); // copy block-structured part from Bmat to Bcopy.
if (!ApproxEqual(B, Bcopy)) {
KALDI_WARN
<< "CuBlockMatrixTest failure, please report to maintainers: Bcopy = "
<< Bcopy << ", B = " << B << ", C = " << C << ", D = " << D
<< ", Bmat = " << B << " transD = " << transD
<< ", transC = " << transC;
KALDI_ERR << "Please give this log to the maintainers.";
}
KALDI_ASSERT(Bmat.Sum() != 0 || B_num_rows == 0);
}
}
template <typename Real>
void CuBlockMatrixUnitTest() {
UnitTestCuBlockMatrixIO<Real>();
UnitTestCuBlockMatrixAddMatBlock<Real>();
UnitTestCuBlockMatrixAddMatMat<Real>();
}
} // namespace kaldi
int main() {
for (int32 loop = 0; loop < 2; loop++) {
#if HAVE_CUDA == 1
if (loop == 0)
CuDevice::Instantiate().SelectGpuId("no"); // -1 means no GPU
else
CuDevice::Instantiate().SelectGpuId("yes"); // -2 .. automatic selection
#endif
kaldi::CuBlockMatrixUnitTest<float>();
#if HAVE_CUDA == 1
if (CuDevice::Instantiate().DoublePrecisionSupported()) {
kaldi::CuBlockMatrixUnitTest<double>();
} else {
KALDI_WARN << "Double precision not supported";
}
#else
kaldi::CuBlockMatrixUnitTest<double>();
#endif
if (loop == 0)
KALDI_LOG << "Tests without GPU use succeeded.";
else
KALDI_LOG << "Tests with GPU use (if available) succeeded.";
}
#if HAVE_CUDA == 1
CuDevice::Instantiate().PrintProfile();
#endif
return 0;
}
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-global-get-post.cc<|end_filename|>
// gmmbin/gmm-global-get-post.cc
// Copyright 2009-2011 Saarland University; Microsoft Corporation
// 2013-2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/diag-gmm.h"
#include "hmm/posterior.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using std::vector;
typedef kaldi::int32 int32;
const char *usage =
"Precompute Gaussian indices and convert immediately to top-n\n"
"posteriors (useful in iVector extraction with diagonal UBMs)\n"
"See also: gmm-gselect, fgmm-gselect, fgmm-global-gselect-to-post\n"
" (e.g. in training UBMs, SGMMs, tied-mixture systems)\n"
" For each frame, gives a list of the n best Gaussian indices,\n"
" sorted from best to worst.\n"
"Usage: \n"
" gmm-global-get-post [options] <model-in> <feature-rspecifier> "
"<post-wspecifier>\n"
"e.g.: gmm-global-get-post --n=20 1.gmm \"ark:feature-command |\" "
"\"ark,t:|gzip -c >post.1.gz\"\n";
ParseOptions po(usage);
int32 num_post = 50;
BaseFloat min_post = 0.0;
po.Register("n", &num_post, "Number of Gaussians to keep per frame\n");
po.Register("min-post", &min_post,
"Minimum posterior we will output "
"before pruning and renormalizing (e.g. 0.01)");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string model_filename = po.GetArg(1),
feature_rspecifier = po.GetArg(2),
post_wspecifier = po.GetArg(3);
DiagGmm gmm;
ReadKaldiObject(model_filename, &gmm);
KALDI_ASSERT(num_post > 0);
KALDI_ASSERT(min_post < 1.0);
int32 num_gauss = gmm.NumGauss();
if (num_post > num_gauss) {
KALDI_WARN << "You asked for " << num_post << " Gaussians but GMM "
<< "only has " << num_gauss << ", returning this many. ";
num_post = num_gauss;
}
double tot_like = 0.0;
kaldi::int64 tot_t = 0;
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
PosteriorWriter post_writer(post_wspecifier);
int32 num_done = 0, num_err = 0;
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string utt = feature_reader.Key();
const Matrix<BaseFloat> &feats = feature_reader.Value();
int32 T = feats.NumRows();
if (T == 0) {
KALDI_WARN << "Empty features for utterance " << utt;
num_err++;
continue;
}
if (feats.NumCols() != gmm.Dim()) {
KALDI_WARN << "Dimension mismatch for utterance " << utt << ": got "
<< feats.NumCols() << ", expected " << gmm.Dim();
num_err++;
continue;
}
vector<vector<int32> > gselect(T);
Matrix<BaseFloat> loglikes;
gmm.LogLikelihoods(feats, &loglikes);
Posterior post(T);
double log_like_this_file = 0.0;
for (int32 t = 0; t < T; t++) {
log_like_this_file += VectorToPosteriorEntry(loglikes.Row(t), num_post,
min_post, &(post[t]));
}
KALDI_VLOG(1) << "Processed utterance " << utt << ", average likelihood "
<< (log_like_this_file / T) << " over " << T << " frames";
tot_like += log_like_this_file;
tot_t += T;
post_writer.Write(utt, post);
num_done++;
}
KALDI_LOG << "Done " << num_done << " files, " << num_err
<< " with errors, average UBM log-likelihood is "
<< (tot_like / tot_t) << " over " << tot_t << " frames.";
if (num_done != 0)
return 0;
else
return 1;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2bin/nnet-combine-fast.cc<|end_filename|>
// nnet2bin/nnet-combine-fast.cc
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/transition-model.h"
#include "nnet2/combine-nnet-fast.h"
#include "nnet2/am-nnet.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace kaldi::nnet2;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
const char *usage =
"Using a validation set, compute an optimal combination of a number "
"of\n"
"neural nets (the combination weights are separate for each layer and\n"
"do not have to sum to one). The optimization is BFGS, which is "
"initialized\n"
"from the best of the individual input neural nets (or as specified "
"by\n"
"--initial-model)\n"
"\n"
"Usage: nnet-combine-fast [options] <model-in1> <model-in2> ... "
"<model-inN> <valid-examples-in> <model-out>\n"
"\n"
"e.g.:\n"
" nnet-combine-fast 1.1.nnet 1.2.nnet 1.3.nnet ark:valid.egs 2.nnet\n"
"Caution: the first input neural net must not be a gradient.\n";
bool binary_write = true;
NnetCombineFastConfig combine_config;
std::string use_gpu = "yes";
ParseOptions po(usage);
po.Register("binary", &binary_write, "Write output in binary mode");
po.Register("use-gpu", &use_gpu,
"yes|no|optional, only has effect if compiled with CUDA");
combine_config.Register(&po);
po.Read(argc, argv);
if (po.NumArgs() < 3) {
po.PrintUsage();
exit(1);
}
std::string nnet1_rxfilename = po.GetArg(1),
valid_examples_rspecifier = po.GetArg(po.NumArgs() - 1),
nnet_wxfilename = po.GetArg(po.NumArgs());
#if HAVE_CUDA == 1
if (combine_config.num_threads == 1)
CuDevice::Instantiate().SelectGpuId(use_gpu);
#endif
TransitionModel trans_model;
AmNnet am_nnet1;
{
bool binary_read;
Input ki(nnet1_rxfilename, &binary_read);
trans_model.Read(ki.Stream(), binary_read);
am_nnet1.Read(ki.Stream(), binary_read);
}
int32 num_nnets = po.NumArgs() - 2;
std::vector<Nnet> nnets(num_nnets);
nnets[0] = am_nnet1.GetNnet();
am_nnet1.GetNnet() = Nnet(); // Clear it to save memory.
for (int32 n = 1; n < num_nnets; n++) {
TransitionModel trans_model;
AmNnet am_nnet;
bool binary_read;
Input ki(po.GetArg(1 + n), &binary_read);
trans_model.Read(ki.Stream(), binary_read);
am_nnet.Read(ki.Stream(), binary_read);
nnets[n] = am_nnet.GetNnet();
}
std::vector<NnetExample> validation_set; // stores validation
// frames.
{ // This block adds samples to "validation_set".
SequentialNnetExampleReader example_reader(valid_examples_rspecifier);
for (; !example_reader.Done(); example_reader.Next())
validation_set.push_back(example_reader.Value());
KALDI_LOG << "Read " << validation_set.size() << " examples from the "
<< "validation set.";
KALDI_ASSERT(validation_set.size() > 0);
}
CombineNnetsFast(combine_config, validation_set, nnets,
&(am_nnet1.GetNnet()));
{
Output ko(nnet_wxfilename, binary_write);
trans_model.Write(ko.Stream(), binary_write);
am_nnet1.Write(ko.Stream(), binary_write);
}
KALDI_LOG << "Finished combining neural nets, wrote model to "
<< nnet_wxfilename;
return (validation_set.size() == 0 ? 1 : 0);
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/featbin/copy-feats-to-sphinx.cc<|end_filename|>
// featbin/copy-feats-to-sphinx.cc
// Copyright 2013 <NAME>
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/kaldi-matrix.h"
#include "matrix/matrix-common.h"
#include "matrix/matrix-lib.h"
#include <sys/stat.h>
#if defined(_MSC_VER)
#include <direct.h>
#else
#include <unistd.h>
#endif
#include <stdio.h>
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Save features as Sphinx files:\n"
"Each utterance will be stored as a unique Sphinx file in a specified "
"directory.\n"
"The Sphinx filename will correspond to the utterance-id (key) in the "
"input table, with the specified extension.\n"
"Usage: copy-feats-to-sphinx [options] in-rspecifier\n"
"Example: copy-feats-to-sphinx --output-dir=/tmp/sphinx-features "
"--output-ext=fea scp:feats.scp\n";
ParseOptions po(usage);
std::string dir_out = "./";
std::string ext_out = "mfc";
po.Register("output-ext", &ext_out, "Output extension of sphinx files");
po.Register("output-dir", &dir_out, "Output directory");
po.Read(argc, argv);
if (po.NumArgs() != 1) {
po.PrintUsage();
exit(1);
}
std::string rspecifier = po.GetArg(1);
// check or create output dir:
const char *c = dir_out.c_str();
if (access(c, 0) != 0) {
#if defined(_MSC_VER)
if (_mkdir(c) != 0)
#else
if (mkdir(c, S_IRWXU | S_IRGRP | S_IXGRP) != 0)
#endif
KALDI_ERR << "Could not create output directory: " << dir_out;
}
// write to the sphinx files
int32 num_frames, dim, num_done = 0;
SequentialBaseFloatMatrixReader feats_reader(rspecifier);
for (; !feats_reader.Done(); feats_reader.Next()) {
std::string utt = feats_reader.Key();
const Matrix<BaseFloat> &feats = feats_reader.Value();
num_frames = feats.NumRows(), dim = feats.NumCols();
Matrix<BaseFloat> output(num_frames, dim, kUndefined);
std::stringstream ss;
ss << dir_out << "/" << utt << "." << ext_out;
output.Range(0, num_frames, 0, dim)
.CopyFromMat(feats.Range(0, num_frames, 0, dim));
std::ofstream os(ss.str().c_str(), std::ios::out | std::ios::binary);
WriteSphinx(os, output);
num_done++;
}
KALDI_LOG << num_done
<< " Sphinx feature files generated in the direcory: " << dir_out;
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/tree/build-tree-questions.cc<|end_filename|>
// tree/build-tree-questions.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-math.h"
#include "util/stl-utils.h"
#include "tree/build-tree-questions.h"
#include "tree/build-tree-utils.h"
namespace kaldi {
void QuestionsForKey::Write(std::ostream &os, bool binary) const {
WriteToken(os, binary, "<QuestionsForKey>");
int32 size = initial_questions.size();
WriteBasicType(os, binary, size);
for (int32 i = 0; i < size; i++)
WriteIntegerVector(os, binary, initial_questions[i]);
refine_opts.Write(os, binary);
WriteToken(os, binary, "</QuestionsForKey>");
}
void QuestionsForKey::Read(std::istream &is, bool binary) {
int32 size;
ExpectToken(is, binary, "<QuestionsForKey>");
ReadBasicType(is, binary, &size);
initial_questions.resize(size);
for (int32 i = 0; i < size; i++)
ReadIntegerVector(is, binary, &(initial_questions[i]));
refine_opts.Read(is, binary);
ExpectToken(is, binary, "</QuestionsForKey>");
}
void Questions::Write(std::ostream &os, bool binary) const {
WriteToken(os, binary, "<Questions>");
std::vector<EventKeyType> keys_with_options;
this->GetKeysWithQuestions(&keys_with_options);
for (size_t i = 0; i < keys_with_options.size(); i++) {
EventKeyType key = keys_with_options[i];
WriteToken(os, binary, "<Key>");
WriteBasicType(os, binary, key);
const QuestionsForKey &opts = GetQuestionsOf(key);
opts.Write(os, binary);
}
WriteToken(os, binary, "</Questions>");
}
void Questions::Read(std::istream &is, bool binary) {
// First, clear anything present.
DeletePointers(&key_options_);
key_options_.clear();
key_idx_.clear();
ExpectToken(is, binary, "<Questions>");
std::vector<EventKeyType> keys_with_options;
while (1) {
std::string token;
ReadToken(is, binary, &token);
if (token == "</Questions>")
return;
else {
if (token != "<Key>")
KALDI_ERR << "Questions::Read, expecting <Key>, got " << token;
EventKeyType key;
ReadBasicType(is, binary, &key);
QuestionsForKey opts;
opts.Read(is, binary);
SetQuestionsOf(key, opts);
}
}
}
void Questions::InitRand(const BuildTreeStatsType &stats, int32 num_quest,
int32 num_iters_refine, AllKeysType all_keys_type) {
std::vector<EventKeyType> all_keys;
FindAllKeys(stats, all_keys_type, &all_keys); // get all keys.
if (all_keys_type == kAllKeysUnion) {
KALDI_WARN << "Questions::InitRand(), using union of all keys. This may "
"work depending on how you are building the tree but may "
"crash (depends if you have already ensured that stats "
"currently on the same leaf all share the same set of keys.)";
}
for (size_t i = 0; i < all_keys.size(); i++) {
EventKeyType key = all_keys[i];
std::vector<EventValueType> all_values;
bool b = PossibleValues(key, stats, &all_values); // get possible values.
if (all_keys_type != kAllKeysUnion) KALDI_ASSERT(b);
KALDI_ASSERT(
all_values.size() !=
0); // since key exists in stats, must have some value defined.
QuestionsForKey q_for_key;
q_for_key.refine_opts.num_iters = num_iters_refine;
q_for_key.initial_questions.clear(); // Make sure empty.
if (all_values.size() == 1) { // can have no meaningful questions because
// only 1 possible value. use empty set of
// questions.
; // Do nothing. No questions.
} else {
q_for_key.initial_questions.resize((size_t)num_quest);
for (size_t i = 0; i < (size_t)num_quest; i++) {
std::vector<EventValueType> &this_quest =
q_for_key.initial_questions[i];
for (size_t j = 0; j < all_values.size() / 2; j++)
this_quest.push_back(all_values[RandInt(0, all_values.size() - 1)]);
SortAndUniq(&this_quest);
KALDI_ASSERT(!this_quest.empty());
}
SortAndUniq(&q_for_key.initial_questions); // Ensure unique questions.
}
q_for_key.Check();
SetQuestionsOf(key, q_for_key);
}
}
} // end namespace kaldi
<|start_filename|>tonic-suite/asr/src/featbin/modify-cmvn-stats.cc<|end_filename|>
// featbin/modify-cmvn-stats.cc
// Copyright 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/kaldi-matrix.h"
#include "transform/cmvn.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Copy cepstral mean/variance stats so that some dimensions have 'fake' "
"stats\n"
"that will skip normalization\n"
"Copy features [and possibly change format]\n"
"Usage: modify-cmvn-stats <fake-dims> <in-rspecifier> "
"<out-wspecifier>\n"
"e.g.: modify-cmvn-stats 13:14:15 ark:- ark:-\n"
"See also: compute-cmvn-stats\n";
ParseOptions po(usage);
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
int32 num_done = 0;
std::string skip_dims_str = po.GetArg(1), rspecifier = po.GetArg(2),
wspecifier = po.GetArg(3);
std::vector<int32> skip_dims;
if (!SplitStringToIntegers(skip_dims_str, ":", false, &skip_dims)) {
KALDI_ERR << "Bad first argument (should be colon-separated list of "
<< "integers)";
}
SequentialDoubleMatrixReader reader(rspecifier);
DoubleMatrixWriter writer(wspecifier);
for (; !reader.Done(); reader.Next()) {
Matrix<double> mat(reader.Value());
if (mat.NumRows() != 2)
KALDI_ERR << "Expected input to be CMVN stats (should have two rows)";
int32 dim = mat.NumCols() - 1;
double count = mat(0, dim);
for (size_t i = 0; i < skip_dims.size(); i++) {
int32 d = skip_dims[i];
if (!(d >= 0 && d < dim))
KALDI_ERR << "Bad entry " << d << " in list of fake dims; "
<< "feature dim is " << dim;
mat(0, d) = 0.0; // zero 'x' stats.
mat(1, d) =
count; // 'x^2' stats equalt to count, implying unit variance.
}
writer.Write(reader.Key(), mat);
num_done++;
}
KALDI_LOG << "Modified " << num_done << " sets of stats.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-fix.h<|end_filename|>
// nnet2/nnet-fix.h
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_NNET2_NNET_FIX_H_
#define KALDI_NNET2_NNET_FIX_H_
#include "nnet2/nnet-nnet.h"
namespace kaldi {
namespace nnet2 {
/* This header provides a function FixNnet(), and associated config, which
is responsible for fixing certain pathologies in a neural network during
training.
For Sigmoid/Tanh units: it identifies neurons whose parameters are getting so
large that
they are maxing out the sigmoid, and scales down those parameters by a
specified factor. It also identifies neurons that have the opposite
pathology
that they are just in the linear part of the sigmoid, and it scales up
their parameters.
For ReLU (rectified linear) units, it identifies neurons that are always zero
or close to zero, re-randomizes the corresponding parameters, increasing the
bias.
*/
struct NnetFixConfig {
BaseFloat min_average_deriv; // Minimum average derivative that we allow,
// as a proportion of the maximum derivative of the nonlinearity (1.0 for
// tanh, 0.25 for sigmoid).
// If average derivative is less, we scale up the parameters.
BaseFloat max_average_deriv; // Maximum average derivative that we allow,
// also expressed relative to the maximum derivative of the nonlinearity.
BaseFloat
parameter_factor; // Factor (>1.0) by which we change the parameters if
// the exceed the bounds above
BaseFloat relu_bias_change; // Change in bias for relus that are usually
// close to zero.
NnetFixConfig()
: min_average_deriv(0.1),
max_average_deriv(0.75),
parameter_factor(2.0),
relu_bias_change(1.0) {}
void Register(OptionsItf *po) {
po->Register(
"min-average-deriv", &min_average_deriv,
"Miniumum derivative, "
"averaged over the training data, that we allow for a nonlinearity,"
"expressed relative to the maximum derivative of the nonlinearity,"
"i.e. 1.0 for tanh or 0.25 for sigmoid, 1.0 for rectified linear.");
po->Register(
"max-average-deriv", &max_average_deriv,
"Maximum derivative, "
"averaged over the training data, that we allow for the nonlinearity "
"associated with one neuron.");
po->Register("parameter-factor", ¶meter_factor,
"Maximum factor by which we change "
"the set of parameters associated with a neuron.");
po->Register("relu-bias-change", &relu_bias_change,
"For ReLUs, change in bias when "
"we identify one that's too frequently zero.");
}
};
void FixNnet(const NnetFixConfig &config, Nnet *nnet);
} // namespace nnet2
} // namespace kaldi
#endif // KALDI_NNET2_NNET_FIX_H_
<|start_filename|>tonic-suite/asr/src/bin/duplicate-matrix.cc<|end_filename|>
// bin/duplicate-matrix.cc
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/kaldi-matrix.h"
#include "transform/transform-common.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Copy tables of BaseFloat matrices, from one input to possibly "
"multiple outputs,\n"
"with each element of the input written too all outputs.\n"
"\n"
"Usage: duplicate-matrix [options] <matrix-rspecifier> "
"<matrix-wspecifier1> [<matrix-wspecifier2> ...]\n";
ParseOptions po(usage);
po.Read(argc, argv);
if (po.NumArgs() < 2) {
po.PrintUsage();
exit(1);
}
std::string matrix_rspecifier = po.GetArg(1);
SequentialBaseFloatMatrixReader matrix_reader(matrix_rspecifier);
std::vector<BaseFloatMatrixWriter> writers(po.NumArgs() - 1);
for (size_t i = 0; i < writers.size(); i++)
if (!writers[i].Open(po.GetArg(i + 1)))
KALDI_ERR << "Error opening table for writing with wspecifier \""
<< po.GetArg(i + 1) << '"';
int32 num_done = 0;
for (; !matrix_reader.Done(); matrix_reader.Next(), num_done++)
for (size_t i = 0; i < writers.size(); i++)
writers[i].Write(matrix_reader.Key(), matrix_reader.Value());
KALDI_LOG << "Copied " << num_done << " matrices to " << writers.size()
<< " outputs.";
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/bin/build-tree.cc<|end_filename|>
// bin/build-tree.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/hmm-topology.h"
#include "tree/context-dep.h"
#include "tree/build-tree.h"
#include "tree/build-tree-utils.h"
#include "tree/clusterable-classes.h"
#include "util/text-utils.h"
int main(int argc, char *argv[]) {
using namespace kaldi;
try {
using namespace kaldi;
typedef kaldi::int32 int32;
const char *usage =
"Train decision tree\n"
"Usage: build-tree [options] <tree-stats-in> <roots-file> "
"<questions-file> <topo-file> <tree-out>\n"
"e.g.: \n"
" build-tree treeacc roots.txt 1.qst topo tree\n";
bool binary = true;
int32 P = 1, N = 3;
BaseFloat thresh = 300.0;
BaseFloat cluster_thresh = -1.0; // negative means use smallest split in
// splitting phase as thresh.
int32 max_leaves = 0;
std::string occs_out_filename;
ParseOptions po(usage);
po.Register("binary", &binary, "Write output in binary mode");
po.Register("context-width", &N,
"Context window size [must match "
"acc-tree-stats]");
po.Register("central-position", &P,
"Central position in context window "
"[must match acc-tree-stats]");
po.Register("max-leaves", &max_leaves,
"Maximum number of leaves to be "
"used in tree-buliding (if positive)");
po.Register("thresh", &thresh,
"Log-likelihood change threshold for "
"tree-building");
po.Register("cluster-thresh", &cluster_thresh,
"Log-likelihood change "
"threshold for clustering after tree-building. 0 means "
"no clustering; -1 means use as a clustering threshold the "
"likelihood change of the final split.");
po.Read(argc, argv);
if (po.NumArgs() != 5) {
po.PrintUsage();
exit(1);
}
std::string stats_filename = po.GetArg(1), roots_filename = po.GetArg(2),
questions_filename = po.GetArg(3), topo_filename = po.GetArg(4),
tree_out_filename = po.GetArg(5);
// Following 2 variables derived from roots file.
// phone_sets is sets of phones that share their roots.
// Just one phone each for normal systems.
std::vector<std::vector<int32> > phone_sets;
std::vector<bool> is_shared_root;
std::vector<bool> is_split_root;
{
Input ki(roots_filename.c_str());
ReadRootsFile(ki.Stream(), &phone_sets, &is_shared_root, &is_split_root);
}
HmmTopology topo;
ReadKaldiObject(topo_filename, &topo);
BuildTreeStatsType stats;
{
bool binary_in;
GaussClusterable gc; // dummy needed to provide type.
Input ki(stats_filename, &binary_in);
ReadBuildTreeStats(ki.Stream(), binary_in, gc, &stats);
}
KALDI_LOG << "Number of separate statistics is " << stats.size();
Questions qo;
{
bool binary_in;
try {
Input ki(questions_filename, &binary_in);
qo.Read(ki.Stream(), binary_in);
} catch (const std::exception &e) {
KALDI_ERR << "Error reading questions file " << questions_filename
<< ", error is: " << e.what();
}
}
std::vector<int32> phone2num_pdf_classes;
topo.GetPhoneToNumPdfClasses(&phone2num_pdf_classes);
EventMap *to_pdf = NULL;
//////// Build the tree. ////////////
to_pdf =
BuildTree(qo, phone_sets, phone2num_pdf_classes, is_shared_root,
is_split_root, stats, thresh, max_leaves, cluster_thresh, P);
{ // This block is to warn about low counts.
std::vector<BuildTreeStatsType> split_stats;
SplitStatsByMap(stats, *to_pdf, &split_stats);
for (size_t i = 0; i < split_stats.size(); i++)
if (SumNormalizer(split_stats[i]) < 100.0)
KALDI_VLOG(1) << "For pdf-id " << i << ", low count "
<< SumNormalizer(split_stats[i]);
}
ContextDependency ctx_dep(N, P, to_pdf); // takes ownership
// of pointer "to_pdf", so set it NULL.
to_pdf = NULL;
WriteKaldiObject(ctx_dep, tree_out_filename, binary);
{ // This block is just doing some checks.
std::vector<int32> all_phones;
for (size_t i = 0; i < phone_sets.size(); i++)
all_phones.insert(all_phones.end(), phone_sets[i].begin(),
phone_sets[i].end());
SortAndUniq(&all_phones);
if (all_phones != topo.GetPhones()) {
std::ostringstream ss;
WriteIntegerVector(ss, false, all_phones);
ss << " vs. ";
WriteIntegerVector(ss, false, topo.GetPhones());
KALDI_WARN << "Mismatch between phone sets provided in roots file, and "
"those in topology: " << ss.str();
}
std::vector<int32> phones_vec; // phones we saw.
PossibleValues(P, stats, &phones_vec); // function in build-tree-utils.h
std::vector<int32> unseen_phones; // diagnostic.
for (size_t i = 0; i < all_phones.size(); i++)
if (!std::binary_search(phones_vec.begin(), phones_vec.end(),
all_phones[i]))
unseen_phones.push_back(all_phones[i]);
for (size_t i = 0; i < phones_vec.size(); i++)
if (!std::binary_search(all_phones.begin(), all_phones.end(),
phones_vec[i]))
KALDI_ERR << "Phone " << (phones_vec[i])
<< " appears in stats but is not listed in roots file.";
if (!unseen_phones.empty()) {
std::ostringstream ss;
for (size_t i = 0; i < unseen_phones.size(); i++)
ss << unseen_phones[i] << ' ';
// Note, unseen phones is just a warning as in certain kinds of
// systems, this can be OK (e.g. where phone encodes position and
// stress information).
KALDI_WARN << "Saw no stats for following phones: " << ss.str();
}
}
KALDI_LOG << "Wrote tree";
DeleteBuildTreeStats(&stats);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-component.h<|end_filename|>
// nnet2/nnet-component.h
// Copyright 2011-2013 <NAME>
// Johns Hopkins University (author: <NAME>)
// 2013 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_NNET2_NNET_COMPONENT_H_
#define KALDI_NNET2_NNET_COMPONENT_H_
#include "base/kaldi-common.h"
#include "itf/options-itf.h"
#include "matrix/matrix-lib.h"
#include "cudamatrix/cu-matrix-lib.h"
#include "thread/kaldi-mutex.h"
#include "nnet2/nnet-precondition-online.h"
#include <iostream>
namespace kaldi {
namespace nnet2 {
/**
* Abstract class, basic element of the network,
* it is a box with defined inputs, outputs,
* and tranformation functions interface.
*
* It is able to propagate and backpropagate
* exact implementation is to be implemented in descendants.
*
*/
class Component {
public:
Component() : index_(-1) {}
virtual std::string Type()
const = 0; // each type should return a string such as
// "SigmoidComponent".
/// Returns the index in the sequence of layers in the neural net; intended
/// only
/// to be used in debugging information.
virtual int32 Index() const { return index_; }
virtual void SetIndex(int32 index) { index_ = index; }
/// Initialize, typically from a line of a config file. The "args" will
/// contain any parameters that need to be passed to the Component, e.g.
/// dimensions.
virtual void InitFromString(std::string args) = 0;
/// Get size of input vectors
virtual int32 InputDim() const = 0;
/// Get size of output vectors
virtual int32 OutputDim() const = 0;
/// Number of left-context frames the component sees for each output frame;
/// nonzero only for splicing layers.
virtual int32 LeftContext() const { return 0; }
/// Number of right-context frames the component sees for each output frame;
/// nonzero only for splicing layers.
virtual int32 RightContext() const { return 0; }
/// Perform forward pass propagation Input->Output. Each row is
/// one frame or training example. Interpreted as "num_chunks"
/// equally sized chunks of frames; this only matters for layers
/// that do things like context splicing. Typically this variable
/// will either be 1 (when we're processing a single contiguous
/// chunk of data) or will be the same as in.NumFrames(), but
/// other values are possible if some layers do splicing.
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const = 0;
/// Perform backward pass propagation of the derivative, and
/// also either update the model (if to_update == this) or
/// update another model or compute the model derivative (otherwise).
/// Note: in_value and out_value are the values of the input and output
/// of the component, and these may be dummy variables if respectively
/// BackpropNeedsInput() or BackpropNeedsOutput() return false for
/// that component (not all components need these).
///
/// num_chunks lets us treat the input matrix as n contiguous-in-time
/// chunks of equal size; it only matters if splicing is involved.
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const = 0;
virtual bool BackpropNeedsInput() const {
return true;
} // if this returns false,
// the "in_value" to Backprop may be a dummy variable.
virtual bool BackpropNeedsOutput() const {
return true;
} // if this returns false,
// the "out_value" to Backprop may be a dummy variable.
/// Read component from stream
static Component *ReadNew(std::istream &is, bool binary);
/// Copy component (deep copy).
virtual Component *Copy() const = 0;
/// Initialize the Component from one line that will contain
/// first the type, e.g. SigmoidComponent, and then
/// a number of tokens (typically integers or floats) that will
/// be used to initialize the component.
static Component *NewFromString(const std::string &initializer_line);
/// Return a new Component of the given type e.g. "SoftmaxComponent",
/// or NULL if no such type exists.
static Component *NewComponentOfType(const std::string &type);
virtual void Read(std::istream &is, bool binary) = 0; // This Read function
// requires that the Component has the correct type.
/// Write component to stream
virtual void Write(std::ostream &os, bool binary) const = 0;
virtual std::string Info() const;
virtual ~Component() {}
private:
int32 index_;
KALDI_DISALLOW_COPY_AND_ASSIGN(Component);
};
/**
* Class UpdatableComponent is a Component which has
* trainable parameters and contains some global
* parameters for stochastic gradient descent
* (learning rate, L2 regularization constant).
* This is a base-class for Components with parameters.
*/
class UpdatableComponent : public Component {
public:
UpdatableComponent(const UpdatableComponent &other)
: learning_rate_(other.learning_rate_) {}
void Init(BaseFloat learning_rate) { learning_rate_ = learning_rate; }
UpdatableComponent(BaseFloat learning_rate) { Init(learning_rate); }
/// Set parameters to zero, and if treat_as_gradient is true, we'll be
/// treating this as a gradient so set the learning rate to 1 and make any
/// other changes necessary (there's a variable we have to set for the
/// MixtureProbComponent).
virtual void SetZero(bool treat_as_gradient) = 0;
UpdatableComponent() : learning_rate_(0.001) {}
virtual ~UpdatableComponent() {}
/// Here, "other" is a component of the same specific type. This
/// function computes the dot product in parameters, and is computed while
/// automatically adjusting learning rates; typically, one of the two will
/// actually contain the gradient.
virtual BaseFloat DotProduct(const UpdatableComponent &other) const = 0;
/// We introduce a new virtual function that only applies to
/// class UpdatableComponent. This is used in testing.
virtual void PerturbParams(BaseFloat stddev) = 0;
/// This new virtual function scales the parameters
/// by this amount.
virtual void Scale(BaseFloat scale) = 0;
/// This new virtual function adds the parameters of another
/// updatable component, times some constant, to the current
/// parameters.
virtual void Add(BaseFloat alpha, const UpdatableComponent &other) = 0;
/// Sets the learning rate of gradient descent
void SetLearningRate(BaseFloat lrate) { learning_rate_ = lrate; }
/// Gets the learning rate of gradient descent
BaseFloat LearningRate() const { return learning_rate_; }
virtual std::string Info() const;
// The next few functions are not implemented everywhere; they are
// intended for use by L-BFGS code, and we won't implement them
// for all child classes.
/// The following new virtual function returns the total dimension of
/// the parameters in this class. E.g. used for L-BFGS update
virtual int32 GetParameterDim() const {
KALDI_ASSERT(0);
return 0;
}
/// Turns the parameters into vector form. We put the vector form on the CPU,
/// because in the kinds of situations where we do this, we'll tend to use
/// too much memory for the GPU.
virtual void Vectorize(VectorBase<BaseFloat> *params) const {
KALDI_ASSERT(0);
}
/// Converts the parameters from vector form.
virtual void UnVectorize(const VectorBase<BaseFloat> ¶ms) {
KALDI_ASSERT(0);
}
protected:
BaseFloat learning_rate_; ///< learning rate (0.0..0.01)
private:
const UpdatableComponent &operator=(
const UpdatableComponent &other); // Disallow.
};
/// Augments a scalar variable with powers of itself, e.g. x => {x, x^2}.
class PowerExpandComponent : public Component {
public:
void Init(int32 dim, int32 max_power = 2, BaseFloat higher_power_scale = 1.0);
explicit PowerExpandComponent(int32 dim, int32 max_power = 2,
BaseFloat higher_power_scale = 1.0) {
Init(dim, max_power, higher_power_scale);
}
PowerExpandComponent()
: input_dim_(0), max_power_(2), higher_power_scale_(1.0) {}
virtual std::string Type() const { return "PowerExpandComponent"; }
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return input_dim_; }
virtual int32 OutputDim() const { return max_power_ * input_dim_; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &, // out_value
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual Component *Copy() const {
return new PowerExpandComponent(input_dim_, max_power_,
higher_power_scale_);
}
virtual void Read(std::istream &is, bool binary); // This Read function
// requires that the Component has the correct type.
/// Write component to stream
virtual void Write(std::ostream &os, bool binary) const;
virtual std::string Info() const;
private:
int32 input_dim_;
int32 max_power_;
BaseFloat higher_power_scale_; // Scale put on all powers
// except the first one.
};
/// This kind of Component is a base-class for things like
/// sigmoid and softmax.
class NonlinearComponent : public Component {
public:
void Init(int32 dim) {
dim_ = dim;
count_ = 0.0;
}
explicit NonlinearComponent(int32 dim) { Init(dim); }
NonlinearComponent() : dim_(0) {} // e.g. prior to Read().
explicit NonlinearComponent(const NonlinearComponent &other);
virtual int32 InputDim() const { return dim_; }
virtual int32 OutputDim() const { return dim_; }
/// We implement InitFromString at this level.
virtual void InitFromString(std::string args);
/// We implement Read at this level as it just needs the Type().
virtual void Read(std::istream &is, bool binary);
/// Write component to stream.
virtual void Write(std::ostream &os, bool binary) const;
void Scale(BaseFloat scale); // relates to scaling stats, not parameters.
void Add(BaseFloat alpha, const NonlinearComponent &other); // relates to
// adding stats
// The following functions are unique to NonlinearComponent.
// They mostly relate to diagnostics.
const CuVector<double> &ValueSum() const { return value_sum_; }
const CuVector<double> &DerivSum() const { return deriv_sum_; }
double Count() const { return count_; }
// The following function is used when "widening" neural networks.
void SetDim(int32 dim);
protected:
friend class NormalizationComponent;
friend class SigmoidComponent;
friend class TanhComponent;
friend class SoftmaxComponent;
friend class RectifiedLinearComponent;
friend class SoftHingeComponent;
// This function updates the stats "value_sum_", "deriv_sum_", and
// count_. (If deriv == NULL, it won't update "deriv_sum_").
// It will be called from the Backprop function of child classes.
void UpdateStats(const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> *deriv = NULL);
const NonlinearComponent &operator=(
const NonlinearComponent &other); // Disallow.
int32 dim_;
CuVector<double> value_sum_; // stats at the output.
CuVector<double>
deriv_sum_; // stats of the derivative of the nonlinearity (only
// applicable to element-by-element nonlinearities, not Softmax.
double count_;
};
class MaxoutComponent : public Component {
public:
void Init(int32 input_dim, int32 output_dim);
explicit MaxoutComponent(int32 input_dim, int32 output_dim) {
Init(input_dim, output_dim);
}
MaxoutComponent() : input_dim_(0), output_dim_(0) {}
virtual std::string Type() const { return "MaxoutComponent"; }
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return input_dim_; }
virtual int32 OutputDim() const { return output_dim_; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &, // out_value
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual Component *Copy() const {
return new MaxoutComponent(input_dim_, output_dim_);
}
virtual void Read(std::istream &is, bool binary); // This Read function
// requires that the Component has the correct type.
/// Write component to stream
virtual void Write(std::ostream &os, bool binary) const;
virtual std::string Info() const;
protected:
int32 input_dim_;
int32 output_dim_;
};
class PnormComponent : public Component {
public:
void Init(int32 input_dim, int32 output_dim, BaseFloat p);
explicit PnormComponent(int32 input_dim, int32 output_dim, BaseFloat p) {
Init(input_dim, output_dim, p);
}
PnormComponent() : input_dim_(0), output_dim_(0), p_(0) {}
virtual std::string Type() const { return "PnormComponent"; }
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return input_dim_; }
virtual int32 OutputDim() const { return output_dim_; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &, // out_value
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual Component *Copy() const {
return new PnormComponent(input_dim_, output_dim_, p_);
}
virtual void Read(std::istream &is, bool binary); // This Read function
// requires that the Component has the correct type.
/// Write component to stream
virtual void Write(std::ostream &os, bool binary) const;
virtual std::string Info() const;
protected:
int32 input_dim_;
int32 output_dim_;
BaseFloat p_;
};
class NormalizeComponent : public NonlinearComponent {
public:
explicit NormalizeComponent(int32 dim) : NonlinearComponent(dim) {}
explicit NormalizeComponent(const NormalizeComponent &other)
: NonlinearComponent(other) {}
NormalizeComponent() {}
virtual std::string Type() const { return "NormalizeComponent"; }
virtual Component *Copy() const { return new NormalizeComponent(*this); }
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
private:
NormalizeComponent &operator=(const NormalizeComponent &other); // Disallow.
static const BaseFloat kNormFloor;
// about 0.7e-20. We need a value that's exactly representable in
// float and whose inverse square root is also exactly representable
// in float (hence, an even power of two).
};
class SigmoidComponent : public NonlinearComponent {
public:
explicit SigmoidComponent(int32 dim) : NonlinearComponent(dim) {}
explicit SigmoidComponent(const SigmoidComponent &other)
: NonlinearComponent(other) {}
SigmoidComponent() {}
virtual std::string Type() const { return "SigmoidComponent"; }
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual Component *Copy() const { return new SigmoidComponent(*this); }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
private:
SigmoidComponent &operator=(const SigmoidComponent &other); // Disallow.
};
class TanhComponent : public NonlinearComponent {
public:
explicit TanhComponent(int32 dim) : NonlinearComponent(dim) {}
explicit TanhComponent(const TanhComponent &other)
: NonlinearComponent(other) {}
TanhComponent() {}
virtual std::string Type() const { return "TanhComponent"; }
virtual Component *Copy() const { return new TanhComponent(*this); }
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &, // in_value
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
private:
TanhComponent &operator=(const TanhComponent &other); // Disallow.
};
/// Take the absoute values of an input vector to a power.
/// The derivative for zero input will be treated as zero.
class PowerComponent : public NonlinearComponent {
public:
void Init(int32 dim, BaseFloat power = 2);
explicit PowerComponent(int32 dim, BaseFloat power = 2) { Init(dim, power); }
PowerComponent() : dim_(0), power_(2) {}
virtual std::string Type() const { return "PowerComponent"; }
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return dim_; }
virtual int32 OutputDim() const { return dim_; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &, // out_value
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual Component *Copy() const { return new PowerComponent(dim_, power_); }
virtual void Read(std::istream &is, bool binary); // This Read function
// requires that the Component has the correct type.
/// Write component to stream
virtual void Write(std::ostream &os, bool binary) const;
virtual std::string Info() const;
private:
int32 dim_;
BaseFloat power_;
};
class RectifiedLinearComponent : public NonlinearComponent {
public:
explicit RectifiedLinearComponent(int32 dim) : NonlinearComponent(dim) {}
explicit RectifiedLinearComponent(const RectifiedLinearComponent &other)
: NonlinearComponent(other) {}
RectifiedLinearComponent() {}
virtual std::string Type() const { return "RectifiedLinearComponent"; }
virtual Component *Copy() const {
return new RectifiedLinearComponent(*this);
}
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
private:
RectifiedLinearComponent &operator=(
const RectifiedLinearComponent &other); // Disallow.
};
class SoftHingeComponent : public NonlinearComponent {
public:
explicit SoftHingeComponent(int32 dim) : NonlinearComponent(dim) {}
explicit SoftHingeComponent(const SoftHingeComponent &other)
: NonlinearComponent(other) {}
SoftHingeComponent() {}
virtual std::string Type() const { return "SoftHingeComponent"; }
virtual Component *Copy() const { return new SoftHingeComponent(*this); }
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
private:
SoftHingeComponent &operator=(const SoftHingeComponent &other); // Disallow.
};
// This class scales the input by a specified constant. This is, of course,
// useless, but we use it when we want to change how fast the next layer learns.
// (e.g. a smaller scale will make the next layer learn slower.)
class ScaleComponent : public Component {
public:
explicit ScaleComponent(int32 dim, BaseFloat scale)
: dim_(dim), scale_(scale) {}
explicit ScaleComponent(const ScaleComponent &other)
: dim_(other.dim_), scale_(other.scale_) {}
ScaleComponent() : dim_(0), scale_(0.0) {}
virtual std::string Type() const { return "ScaleComponent"; }
virtual Component *Copy() const { return new ScaleComponent(*this); }
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks, Component *to_update,
CuMatrix<BaseFloat> *in_deriv) const;
virtual int32 InputDim() const { return dim_; }
virtual int32 OutputDim() const { return dim_; }
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
void Init(int32 dim, BaseFloat scale);
virtual void InitFromString(std::string args);
virtual std::string Info() const;
private:
int32 dim_;
BaseFloat scale_;
ScaleComponent &operator=(const ScaleComponent &other); // Disallow.
};
class SumGroupComponent; // Forward declaration.
class AffineComponent; // Forward declaration.
class SoftmaxComponent : public NonlinearComponent {
public:
explicit SoftmaxComponent(int32 dim) : NonlinearComponent(dim) {}
explicit SoftmaxComponent(const SoftmaxComponent &other)
: NonlinearComponent(other) {}
SoftmaxComponent() {}
virtual std::string Type() const {
return "SoftmaxComponent";
} // Make it lower case
// because each type of Component needs a different first letter.
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
void MixUp(int32 num_mixtures, BaseFloat power, BaseFloat min_count,
BaseFloat perturb_stddev, AffineComponent *ac,
SumGroupComponent *sc);
virtual Component *Copy() const { return new SoftmaxComponent(*this); }
private:
SoftmaxComponent &operator=(const SoftmaxComponent &other); // Disallow.
};
class FixedAffineComponent;
// Affine means a linear function plus an offset.
// Note: although this class can be instantiated, it also
// function as a base-class for more specialized versions of
// AffineComponent.
class AffineComponent : public UpdatableComponent {
friend class SoftmaxComponent; // Friend declaration relates to mixing up.
public:
explicit AffineComponent(const AffineComponent &other);
// The next constructor is used in converting from nnet1.
AffineComponent(const CuMatrixBase<BaseFloat> &linear_params,
const CuVectorBase<BaseFloat> &bias_params,
BaseFloat learning_rate);
virtual int32 InputDim() const { return linear_params_.NumCols(); }
virtual int32 OutputDim() const { return linear_params_.NumRows(); }
void Init(BaseFloat learning_rate, int32 input_dim, int32 output_dim,
BaseFloat param_stddev, BaseFloat bias_stddev);
void Init(BaseFloat learning_rate, std::string matrix_filename);
// The following functions are used for collapsing multiple layers
// together. They return a pointer to a new Component equivalent to
// the sequence of two components. We haven't implemented this for
// FixedLinearComponent yet.
Component *CollapseWithNext(const AffineComponent &next) const;
Component *CollapseWithNext(const FixedAffineComponent &next) const;
Component *CollapseWithPrevious(const FixedAffineComponent &prev) const;
virtual std::string Info() const;
virtual void InitFromString(std::string args);
AffineComponent() : is_gradient_(false) {} // use Init to really initialize.
virtual std::string Type() const { return "AffineComponent"; }
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Scale(BaseFloat scale);
virtual void Add(BaseFloat alpha, const UpdatableComponent &other);
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value, // dummy
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual void SetZero(bool treat_as_gradient);
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
virtual BaseFloat DotProduct(const UpdatableComponent &other) const;
virtual Component *Copy() const;
virtual void PerturbParams(BaseFloat stddev);
// This new function is used when mixing up:
virtual void SetParams(const VectorBase<BaseFloat> &bias,
const MatrixBase<BaseFloat> &linear);
const CuVector<BaseFloat> &BiasParams() { return bias_params_; }
const CuMatrix<BaseFloat> &LinearParams() { return linear_params_; }
virtual int32 GetParameterDim() const;
virtual void Vectorize(VectorBase<BaseFloat> *params) const;
virtual void UnVectorize(const VectorBase<BaseFloat> ¶ms);
/// This function is for getting a low-rank approximations of this
/// AffineComponent by two AffineComponents.
virtual void LimitRank(int32 dimension, AffineComponent **a,
AffineComponent **b) const;
/// This function is implemented in widen-nnet.cc
void Widen(
int32 new_dimension, BaseFloat param_stddev, BaseFloat bias_stddev,
std::vector<NonlinearComponent *> c2, // will usually have just one
// element.
AffineComponent *c3);
protected:
friend class AffineComponentPreconditionedOnline;
friend class AffineComponentA;
// This function Update() is for extensibility; child classes may override
// this.
virtual void Update(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_deriv) {
UpdateSimple(in_value, out_deriv);
}
// UpdateSimple is used when *this is a gradient. Child classes may
// or may not override this.
virtual void UpdateSimple(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_deriv);
const AffineComponent &operator=(const AffineComponent &other); // Disallow.
CuMatrix<BaseFloat> linear_params_;
CuVector<BaseFloat> bias_params_;
bool is_gradient_; // If true, treat this as just a gradient.
};
/// PiecewiseLinearComponent is a kind of trainable version of the
/// RectifiedLinearComponent, in which each dimension of the nonlinearity has a
/// number of parameters that can be trained. it's of the form
/// alpha + beta x + gamma_1 |x - c_1| + gamma_2 |x - c_2| + ... + gamma_N |x -
/// c_N|
/// where c_1 ... c_N on are constants (by default, equally
/// spaced between -1 and 1), and the alpha, beta and gamma quantities are
/// trainable.
/// (Each dimension has separate alpha, beta and gamma quantities).
/// We require that N be odd so that the "middle" gamma quantity corresponds
/// to zero; this is for convenience of initialization so that it corresponds
/// to ReLus.
class PiecewiseLinearComponent : public UpdatableComponent {
public:
explicit PiecewiseLinearComponent(const PiecewiseLinearComponent &other);
virtual int32 InputDim() const { return params_.NumRows(); }
virtual int32 OutputDim() const { return params_.NumRows(); }
void Init(int32 dim, int32 N, BaseFloat learning_rate, BaseFloat max_change);
virtual std::string Info() const;
virtual void InitFromString(std::string args);
PiecewiseLinearComponent()
: is_gradient_(false),
max_change_(0.0) {} // use Init to really initialize.
virtual std::string Type() const { return "PiecewiseLinearComponent"; }
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Scale(BaseFloat scale);
virtual void Add(BaseFloat alpha, const UpdatableComponent &other);
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value, // dummy
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual void SetZero(bool treat_as_gradient);
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
virtual BaseFloat DotProduct(const UpdatableComponent &other) const;
virtual Component *Copy() const;
virtual void PerturbParams(BaseFloat stddev);
const CuMatrix<BaseFloat> &Params() { return params_; }
virtual int32 GetParameterDim() const;
virtual void Vectorize(VectorBase<BaseFloat> *params) const;
virtual void UnVectorize(const VectorBase<BaseFloat> ¶ms);
protected:
const PiecewiseLinearComponent &operator=(
const PiecewiseLinearComponent &other); // Disallow.
CuMatrix<BaseFloat> params_;
bool is_gradient_; // If true, treat this as just a gradient.
BaseFloat max_change_; // If nonzero, maximum change allowed per individual
// parameter per minibatch.
};
// This is an idea Dan is trying out, a little bit like
// preconditioning the update with the Fisher matrix, but the
// Fisher matrix has a special structure.
// [note: it is currently used in the standard recipe].
class AffineComponentPreconditioned : public AffineComponent {
public:
virtual std::string Type() const { return "AffineComponentPreconditioned"; }
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
void Init(BaseFloat learning_rate, int32 input_dim, int32 output_dim,
BaseFloat param_stddev, BaseFloat bias_stddev, BaseFloat alpha,
BaseFloat max_change);
void Init(BaseFloat learning_rate, BaseFloat alpha, BaseFloat max_change,
std::string matrix_filename);
virtual void InitFromString(std::string args);
virtual std::string Info() const;
virtual Component *Copy() const;
AffineComponentPreconditioned() : alpha_(1.0), max_change_(0.0) {}
void SetMaxChange(BaseFloat max_change) { max_change_ = max_change; }
protected:
KALDI_DISALLOW_COPY_AND_ASSIGN(AffineComponentPreconditioned);
BaseFloat alpha_;
BaseFloat max_change_; // If > 0, this is the maximum amount of parameter
// change (in L2 norm)
// that we allow per minibatch. This was introduced in order to
// control instability. Instead of the exact L2 parameter change,
// for efficiency purposes we limit a bound on the exact change.
// The limit is applied via a constant <= 1.0 for each minibatch,
// A suitable value might be, for example, 10 or so; larger if there are
// more parameters.
/// The following function is only called if max_change_ > 0. It returns the
/// greatest value alpha <= 1.0 such that (alpha times the sum over the
/// row-index of the two matrices of the product the l2 norms of the two rows
/// times learning_rate_)
/// is <= max_change.
BaseFloat GetScalingFactor(const CuMatrix<BaseFloat> &in_value_precon,
const CuMatrix<BaseFloat> &out_deriv_precon);
virtual void Update(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_deriv);
};
/// AffineComponentPreconditionedOnline is, like AffineComponentPreconditioned,
/// a version of AffineComponent that has a non-(multiple of unit) learning-rate
/// matrix. See nnet-precondition-online.h for a description of the technique.
/// This method maintains an orthogonal matrix N with a small number of rows,
/// actually two (for input and output dims) which gets modified each time;
/// we maintain a mutex for access to this (we just use it to copy it when
/// we need it and write to it when we change it). For multi-threaded use,
/// the parallelization method is to lock a mutex whenever we want to
/// read N or change it, but just quickly make a copy and release the mutex;
/// this is to ensure operations on N are atomic.
class AffineComponentPreconditionedOnline : public AffineComponent {
public:
virtual std::string Type() const {
return "AffineComponentPreconditionedOnline";
}
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
void Init(BaseFloat learning_rate, int32 input_dim, int32 output_dim,
BaseFloat param_stddev, BaseFloat bias_stddev, int32 rank_in,
int32 rank_out, int32 update_period, BaseFloat num_samples_history,
BaseFloat alpha, BaseFloat max_change_per_sample);
void Init(BaseFloat learning_rate, int32 rank_in, int32 rank_out,
int32 update_period, BaseFloat num_samples_history, BaseFloat alpha,
BaseFloat max_change_per_sample, std::string matrix_filename);
// This constructor is used when converting neural networks partway through
// training, from AffineComponent or AffineComponentPreconditioned to
// AffineComponentPreconditionedOnline.
AffineComponentPreconditionedOnline(const AffineComponent &orig,
int32 rank_in, int32 rank_out,
int32 update_period, BaseFloat eta,
BaseFloat alpha);
virtual void InitFromString(std::string args);
virtual std::string Info() const;
virtual Component *Copy() const;
AffineComponentPreconditionedOnline() : max_change_per_sample_(0.0) {}
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(AffineComponentPreconditionedOnline);
// Configs for preconditioner. The input side tends to be better conditioned
// ->
// smaller rank needed, so make them separately configurable.
int32 rank_in_;
int32 rank_out_;
int32 update_period_;
BaseFloat num_samples_history_;
BaseFloat alpha_;
OnlinePreconditioner preconditioner_in_;
OnlinePreconditioner preconditioner_out_;
BaseFloat max_change_per_sample_;
// If > 0, max_change_per_sample_ this is the maximum amount of parameter
// change (in L2 norm) that we allow per sample, averaged over the minibatch.
// This was introduced in order to control instability.
// Instead of the exact L2 parameter change, for
// efficiency purposes we limit a bound on the exact
// change. The limit is applied via a constant <= 1.0
// for each minibatch, A suitable value might be, for
// example, 10 or so; larger if there are more
// parameters.
/// The following function is only called if max_change_per_sample_ > 0, it
/// returns a
/// scaling factor alpha <= 1.0 (1.0 in the normal case) that enforces the
/// "max-change" constraint. "in_products" is the inner product with itself
/// of each row of the matrix of preconditioned input features; "out_products"
/// is the same for the output derivatives. gamma_prod is a product of two
/// scalars that are output by the preconditioning code (for the input and
/// output), which we will need to multiply into the learning rate.
/// out_products is a pointer because we modify it in-place.
BaseFloat GetScalingFactor(const CuVectorBase<BaseFloat> &in_products,
BaseFloat gamma_prod,
CuVectorBase<BaseFloat> *out_products);
// Sets the configs rank, alpha and eta in the preconditioner objects,
// from the class variables.
void SetPreconditionerConfigs();
virtual void Update(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_deriv);
};
/// AffineComponentModified as as AffineComponent but we are careful about
/// the lengths of rows of the parameter matrix, when we do the update.
/// That means, for a given row, we first do an update along the direction of
/// the existing vector; we then take the update orthogonal to that direction,
/// but keep the length of the vector fixed.
class AffineComponentModified : public AffineComponent {
public:
virtual std::string Type() const { return "AffineComponentModified"; }
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
void Init(BaseFloat learning_rate, int32 input_dim, int32 output_dim,
BaseFloat param_stddev, BaseFloat bias_stddev,
BaseFloat cutoff_length, BaseFloat max_change);
void Init(BaseFloat learning_rate, BaseFloat cutoff_length,
BaseFloat max_change, std::string matrix_filename);
virtual void InitFromString(std::string args);
virtual std::string Info() const;
virtual Component *Copy() const;
AffineComponentModified() : cutoff_length_(10.0), max_change_(0.1) {}
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(AffineComponentModified);
BaseFloat cutoff_length_; /// If the length of the vector corresponding to
/// this row of the parameter matrix is less than this, we just do a regular
/// gradient descent update. This would typically be less than
/// sqrt(InputDim())-- a value smaller than the expected length of the
/// parameter vector.
BaseFloat max_change_; /// [if above the cutoff], this is the maximum
/// change allowed in the vector per minibatch,
/// as a proportion of the previous value. We separately
/// apply this constraint to both the length and direction. Should
/// be less than one, e.g. 0.1 or 0.01.
virtual void Update(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_deriv);
};
class RandomComponent : public Component {
public:
// This function is required in testing code and in other places we need
// consistency in the random number generation (e.g. when optimizing
// validation-set performance), but check where else we call sRand(). You'll
// need to call srand as well as making this call.
void ResetGenerator() { random_generator_.SeedGpu(0); }
protected:
CuRand<BaseFloat> random_generator_;
};
struct PreconditionConfig { // relates to AffineComponentA
BaseFloat alpha;
bool do_precondition;
bool renormalize;
PreconditionConfig() : alpha(0.1), do_precondition(true), renormalize(true) {}
void Register(OptionsItf *po) {
po->Register("alpha", &alpha,
"Smoothing constant used in "
"preconditioning of updates.");
po->Register("do-precondition", &do_precondition,
"Controls whether "
"or not preconditioning is applied in the L-BFGS update.");
po->Register(
"renormalize", &renormalize,
"If true, in the preconditioning "
"we renormalize with a scalar so the projected scatter has the "
"same trace as before preconditioning.");
}
};
/**
AffineComponentA is a special type of AffineComponent, that
stores matrices for preconditioning similar to those used
in the update function of AffineComponentPreconditioned. This is
intended for use as a preconditioner in L-BFGS updates.
In this case we optionally store the preconditioning
information with the gradient information, in a separate
copy of the component.
*/
class AffineComponentA : public AffineComponent {
public:
AffineComponentA() {}
virtual std::string Type() const { return "AffineComponentA"; }
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
// There is no Init function for now; we only have the
// ability to initialize from another AffineComponent (or child
// class). This is because we imagine that the L-BFGS training
// will be initialized from a system trained with SGD, for which
// something like AffineComponentPreconditioned will be more
// appropriate; we'll then convert the model.
AffineComponentA(const AffineComponent &component);
// We're not supporting initializing as this type.
virtual void InitFromString(std::string args) { KALDI_ASSERT(0); }
virtual Component *Copy() const;
virtual void Scale(BaseFloat scale);
virtual void Add(BaseFloat alpha, const UpdatableComponent &other);
// Some functions that are specific to this class:
void InitializeScatter(); // Lets the class
// know that it should accumulate the scatter matrix; sets
// up input_scatter_ and output_scatter_.
// This function uses the input_scatter_ and output_scatter_ variables of the
// current class to transform the linear_params_ and bias_params_ variables of
// "component". If forward == true then we transform to the preconditioned
// space; otherwise we transform back from the preconditioned to the canonical
// space. This is done differently depending if component->is_gradient_ ==
// true, because gradients and parameters transform differently. The alpha
// value relates to smoothing with the unit matrix; it's not defined in quite
// the same way as for AffineComponentPreconditioned. See the code for
// details.
void Transform(const PreconditionConfig &config, bool forward,
AffineComponent *component);
// This function uses the input_scatter_ and output_scatter_ variables
// current class to transform the linear_params_ and bias_params_ variables of
// "component". It is equivalent to multiplying by the inverse Fisher,
// or approximate inverse Hessian. It's the operation that you need
// in optimization methods like L-BFGS, to transform from "gradient space"
// into "model space".
// Note: it's not const in this object, because we may cache stuff with the
// model.
// See also the function "PreconditionNnet" in nnet-lbfgs.h, which
// does this at the whole-neural-net level (by calling this function).
void Precondition(const PreconditionConfig &config,
AffineComponent *component);
private:
// The following variables are not used for the actual neural net, but
// only when is_gradient_ == true (when it's being used to store gradients),
CuSpMatrix<double>
input_scatter_; // scatter of (input vectors extended with 1.)
// This is only set up if this->is_gradient = true, and InitializeScatter()
// has been called.
CuSpMatrix<double> output_scatter_;
// The following four quantities may be cached by the function "Transform",
// to avoid duplicating work.
CuTpMatrix<double> in_C_;
CuTpMatrix<double> in_C_inv_;
CuTpMatrix<double> out_C_;
CuTpMatrix<double> out_C_inv_;
// The following two quantities may be cached by the function "Precondition",
// to avoid duplicating work.
CuSpMatrix<double> inv_fisher_in_;
CuSpMatrix<double> inv_fisher_out_;
// This function computes the matrix (and corresponding transpose-ness) that
// we'd left-multiply a vector by when transforming the parameter/gradient
// space.
static void ComputeTransforms(const CuSpMatrix<double> &scatter,
const PreconditionConfig &config,
double tot_count, CuTpMatrix<double> *C,
CuTpMatrix<double> *C_inv);
// This function is called by "Precondition"; it pre-computes
// certain quantities we'll need.
static void ComputePreconditioner(const CuSpMatrix<double> &scatter,
const PreconditionConfig &config,
double tot_count,
CuSpMatrix<double> *inv_fisher);
void ClearPrecomputedQuantities();
// The following update function is called when *this is
// a gradient. We only override this one.
virtual void UpdateSimple(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_deriv);
};
/// Splices a context window of frames together [over time]
class SpliceComponent : public Component {
public:
SpliceComponent() {} // called only prior to Read() or Init().
void Init(int32 input_dim, int32 left_context, int32 right_context,
int32 const_component_dim = 0);
virtual std::string Type() const { return "SpliceComponent"; }
virtual std::string Info() const;
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return input_dim_; }
virtual int32 OutputDim() const;
virtual int32 LeftContext() const { return left_context_; }
virtual int32 RightContext() const { return right_context_; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual Component *Copy() const;
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(SpliceComponent);
int32 input_dim_;
int32 left_context_;
int32 right_context_;
int32 const_component_dim_;
};
/// This is as SpliceComponent but outputs the max of
/// any of the inputs (taking the max across time).
class SpliceMaxComponent : public Component {
public:
SpliceMaxComponent() {} // called only prior to Read() or Init().
void Init(int32 dim, int32 left_context, int32 right_context);
virtual std::string Type() const { return "SpliceMaxComponent"; }
virtual std::string Info() const;
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return dim_; }
virtual int32 OutputDim() const { return dim_; }
virtual int32 LeftContext() const { return left_context_; }
virtual int32 RightContext() const { return right_context_; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual Component *Copy() const;
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(SpliceMaxComponent);
int32 dim_;
int32 left_context_;
int32 right_context_;
};
// Affine means a linear function plus an offset. PreconInput means we
// precondition using the inverse of the variance of each dimension of the input
// data. Note that this doesn't take into account any scaling of the samples,
// but this doesn't really matter. This has some relation to AdaGrad, except
// it's being done not per input dimension, rather than per parameter, and also
// we multiply by a separately supplied and updated learning rate which will
// typically vary with time. Note: avg_samples is the number of samples over
// which we average the variance of the input data.
class AffinePreconInputComponent : public AffineComponent {
public:
void Init(BaseFloat learning_rate, int32 input_dim, int32 output_dim,
BaseFloat param_stddev, BaseFloat bias_stddev,
BaseFloat avg_samples);
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value, // dummy
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
AffinePreconInputComponent() {} // use Init to really initialize.
virtual std::string Type() const { return "AffinePreconInputComponent"; }
virtual void InitFromString(std::string args);
virtual void SetZero(bool treat_as_gradient);
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
virtual Component *Copy() const;
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(AffinePreconInputComponent);
BaseFloat avg_samples_; // Config parameter; determines how many samples
// we average the input feature variance over during training
bool is_gradient_; // Set this to true if we consider this as a gradient.
// In this case we don't do the input preconditioning.
// Note: linear_params_ and bias_params_ are inherited from
// AffineComponent.
CuVector<BaseFloat>
input_precision_; // Inverse variance of input features; used
// to precondition the update.
};
// Affine means a linear function plus an offset. "Block" means
// here that we support a number of equal-sized blocks of parameters,
// in the linear part, so e.g. 2 x 500 would mean 2 blocks of 500 each.
class BlockAffineComponent : public UpdatableComponent {
public:
virtual int32 InputDim() const {
return linear_params_.NumCols() * num_blocks_;
}
virtual int32 OutputDim() const { return linear_params_.NumRows(); }
virtual int32 GetParameterDim() const;
virtual void Vectorize(VectorBase<BaseFloat> *params) const;
virtual void UnVectorize(const VectorBase<BaseFloat> ¶ms);
// Note: num_blocks must divide input_dim.
void Init(BaseFloat learning_rate, int32 input_dim, int32 output_dim,
BaseFloat param_stddev, BaseFloat bias_stddev, int32 num_blocks);
virtual void InitFromString(std::string args);
BlockAffineComponent() {} // use Init to really initialize.
virtual std::string Type() const { return "BlockAffineComponent"; }
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual void SetZero(bool treat_as_gradient);
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
virtual BaseFloat DotProduct(const UpdatableComponent &other) const;
virtual Component *Copy() const;
virtual void PerturbParams(BaseFloat stddev);
virtual void Scale(BaseFloat scale);
virtual void Add(BaseFloat alpha, const UpdatableComponent &other);
protected:
virtual void Update(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_deriv) {
UpdateSimple(in_value, out_deriv);
}
// UpdateSimple is used when *this is a gradient. Child classes may
// override this.
virtual void UpdateSimple(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_deriv);
// The matrix linear_parms_ has a block structure, with num_blocks_ blocks fo
// equal size. The blocks are stored in linear_params_ as
// [ M
// N
// O ] but we actually treat it as the matrix:
// [ M 0 0
// 0 N 0
// 0 0 O ]
CuMatrix<BaseFloat> linear_params_;
CuVector<BaseFloat> bias_params_;
int32 num_blocks_;
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(BlockAffineComponent);
};
// Affine means a linear function plus an offset. "Block" means
// here that we support a number of equal-sized blocks of parameters,
// in the linear part, so e.g. 2 x 500 would mean 2 blocks of 500 each.
class BlockAffineComponentPreconditioned : public BlockAffineComponent {
public:
// Note: num_blocks must divide input_dim.
void Init(BaseFloat learning_rate, int32 input_dim, int32 output_dim,
BaseFloat param_stddev, BaseFloat bias_stddev, int32 num_blocks,
BaseFloat alpha);
virtual void InitFromString(std::string args);
BlockAffineComponentPreconditioned() {} // use Init to really initialize.
virtual std::string Type() const {
return "BlockAffineComponentPreconditioned";
}
virtual void SetZero(bool treat_as_gradient);
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
virtual Component *Copy() const;
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(BlockAffineComponentPreconditioned);
virtual void Update(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_deriv);
bool is_gradient_;
BaseFloat alpha_;
};
// MixtureProbComponent is a linear transform, but it's kind of a special case.
// It's used to transform probabilities while retaining the sum-to-one
// constraint (after the softmax), so we require nonnegative
// elements that sum to one for each column. In addition, this component
// implements a linear transformation that's a block matrix... not quite
// block diagonal, because the component matrices aren't necessarily square.
// They start off square, but as we mix up, they may get non-square.
//
// From its external interface, i.e. DotProduct(), Scale(), and Backprop(), if
// you use this class in the expected way (e.g. only calling DotProduct()
// between a gradient and the parameters), it behaves as if the parameters were
// stored as unnormalized log-prob and the gradients were taken w.r.t. that
// representation. This is the only way for the Scale() function to make sense.
// In reality, the parameters are stored as probabilities (normalized to sum to
// one for each row).
class MixtureProbComponent : public UpdatableComponent {
public:
virtual int32 InputDim() const { return input_dim_; }
virtual int32 OutputDim() const { return output_dim_; }
void Init(BaseFloat learning_rate, BaseFloat diag_element,
const std::vector<int32> &sizes);
virtual void InitFromString(std::string args);
MixtureProbComponent() {}
virtual void SetZero(bool treat_as_gradient);
virtual std::string Type() const { return "MixtureProbComponent"; }
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
// Note: in_value and out_value are both dummy variables.
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual Component *Copy() const;
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
virtual BaseFloat DotProduct(const UpdatableComponent &other) const;
virtual void Scale(BaseFloat scale);
virtual void Add(BaseFloat alpha, const UpdatableComponent &other);
virtual void PerturbParams(BaseFloat stddev);
virtual int32 GetParameterDim() const;
virtual void Vectorize(VectorBase<BaseFloat> *params) const;
virtual void UnVectorize(const VectorBase<BaseFloat> ¶ms);
private:
void Refresh(); // Refreshes params_ from log_params_.
KALDI_DISALLOW_COPY_AND_ASSIGN(MixtureProbComponent);
std::vector<CuMatrix<BaseFloat> > log_params_; // these are the
// underlying parameters that are subject to gradient descent.
std::vector<CuMatrix<BaseFloat> > params_; // these are derived from
// log_params_.
int32 input_dim_;
int32 output_dim_;
};
// SumGroupComponent is used to sum up groups of posteriors.
// It's used to introduce a kind of Gaussian-mixture-model-like
// idea into neural nets. This is basically a degenerate case of
// MixtureProbComponent; we had to implement it separately to
// be efficient for CUDA (we can use this one regardless whether
// we have CUDA or not; it's the normal case we want anyway).
class SumGroupComponent : public Component {
public:
virtual int32 InputDim() const { return input_dim_; }
virtual int32 OutputDim() const { return output_dim_; }
void Init(const std::vector<int32> &sizes); // the vector is of the input dim
// (>= 1) for each output dim.
void GetSizes(std::vector<int32> *sizes) const; // Get a vector saying, for
// each output-dim, how many
// inputs were summed over.
virtual void InitFromString(std::string args);
SumGroupComponent() {}
virtual std::string Type() const { return "SumGroupComponent"; }
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
// Note: in_value and out_value are both dummy variables.
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual Component *Copy() const;
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(SumGroupComponent);
// Note: Int32Pair is just struct{ int32 first; int32 second }; it's defined
// in cu-matrixdim.h as extern "C" which is needed for the CUDA interface.
CuArray<Int32Pair> indexes_; // for each output index, the (start, end) input
// index.
CuArray<int32> reverse_indexes_; // for each input index, the output index.
int32 input_dim_;
int32 output_dim_;
};
/// PermuteComponent does a permutation of the dimensions (by default, a fixed
/// random permutation, but it may be specified). Useful in conjunction with
/// block-diagonal transforms.
class PermuteComponent : public Component {
public:
void Init(int32 dim);
void Init(const std::vector<int32> &reorder);
PermuteComponent(int32 dim) { Init(dim); }
PermuteComponent(const std::vector<int32> &reorder) { Init(reorder); }
PermuteComponent() {} // e.g. prior to Read() or Init()
virtual int32 InputDim() const { return reorder_.size(); }
virtual int32 OutputDim() const { return reorder_.size(); }
virtual Component *Copy() const;
virtual void InitFromString(std::string args);
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
virtual std::string Type() const { return "PermuteComponent"; }
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &,
const CuMatrixBase<BaseFloat> &,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks, Component *,
CuMatrix<BaseFloat> *in_deriv) const;
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(PermuteComponent);
std::vector<int32> reorder_; // This class sends input dimension i to
// output dimension reorder_[i].
};
/// Discrete cosine transform.
/// TODO: modify this Component so that it supports only keeping a subset
class DctComponent : public Component {
public:
DctComponent() { dim_ = 0; }
virtual std::string Type() const { return "DctComponent"; }
virtual std::string Info() const;
// dim = dimension of vector being processed
// dct_dim = effective lenght of DCT, i.e. how many compoments will be kept
void Init(int32 dim, int32 dct_dim, bool reorder, int32 keep_dct_dim = 0);
// InitFromString takes numeric options
// dim, dct-dim, and (optionally) reorder={true,false}, keep-dct-dim
// Note: reorder defaults to false. keep-dct-dim defaults to dct-dim
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return dim_; }
virtual int32 OutputDim() const {
return dct_mat_.NumRows() * (dim_ / dct_mat_.NumCols());
}
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual Component *Copy() const;
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
private:
void Reorder(CuMatrixBase<BaseFloat> *mat, bool reverse) const;
int32 dim_; // The input dimension of the (sub)vector.
bool reorder_; // If true, transformation matrix we use is not
// block diagonal but is block diagonal after reordering-- so
// effectively we transform with the Kronecker product D x I,
// rather than a matrix with D's on the diagonal (i.e. I x D,
// where x is the Kronecker product). We'll set reorder_ to
// true if we want to use this to transform in the time domain,
// because the SpliceComponent splices blocks of e.g. MFCCs
// together so each time is a dimension of the block.
CuMatrix<BaseFloat> dct_mat_;
KALDI_DISALLOW_COPY_AND_ASSIGN(DctComponent);
};
/// FixedLinearComponent is a linear transform that is supplied
/// at network initialization time and is not trainable.
class FixedLinearComponent : public Component {
public:
FixedLinearComponent() {}
virtual std::string Type() const { return "FixedLinearComponent"; }
virtual std::string Info() const;
void Init(const CuMatrixBase<BaseFloat> &matrix) { mat_ = matrix; }
// InitFromString takes only the option matrix=<string>,
// where the string is the filename of a Kaldi-format matrix to read.
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return mat_.NumCols(); }
virtual int32 OutputDim() const { return mat_.NumRows(); }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual Component *Copy() const;
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
protected:
friend class AffineComponent;
CuMatrix<BaseFloat> mat_;
KALDI_DISALLOW_COPY_AND_ASSIGN(FixedLinearComponent);
};
/// FixedAffineComponent is an affine transform that is supplied
/// at network initialization time and is not trainable.
class FixedAffineComponent : public Component {
public:
FixedAffineComponent() {}
virtual std::string Type() const { return "FixedAffineComponent"; }
virtual std::string Info() const;
/// matrix should be of size input-dim+1 to output-dim, last col is offset
void Init(const CuMatrixBase<BaseFloat> &matrix);
// InitFromString takes only the option matrix=<string>,
// where the string is the filename of a Kaldi-format matrix to read.
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return linear_params_.NumCols(); }
virtual int32 OutputDim() const { return linear_params_.NumRows(); }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual Component *Copy() const;
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
// Function to provide access to linear_params_.
const CuMatrix<BaseFloat> &LinearParams() const { return linear_params_; }
protected:
friend class AffineComponent;
CuMatrix<BaseFloat> linear_params_;
CuVector<BaseFloat> bias_params_;
KALDI_DISALLOW_COPY_AND_ASSIGN(FixedAffineComponent);
};
/// FixedScaleComponent applies a fixed per-element scale; it's similar
/// to the Rescale component in the nnet1 setup (and only needed for nnet1
/// model conversion.
class FixedScaleComponent : public Component {
public:
FixedScaleComponent() {}
virtual std::string Type() const { return "FixedScaleComponent"; }
virtual std::string Info() const;
void Init(const CuVectorBase<BaseFloat> &scales);
// InitFromString takes only the option scales=<string>,
// where the string is the filename of a Kaldi-format matrix to read.
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return scales_.Dim(); }
virtual int32 OutputDim() const { return scales_.Dim(); }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual Component *Copy() const;
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
protected:
CuVector<BaseFloat> scales_;
KALDI_DISALLOW_COPY_AND_ASSIGN(FixedScaleComponent);
};
/// FixedBiasComponent applies a fixed per-element bias; it's similar
/// to the AddShift component in the nnet1 setup (and only needed for nnet1
/// model conversion.
class FixedBiasComponent : public Component {
public:
FixedBiasComponent() {}
virtual std::string Type() const { return "FixedBiasComponent"; }
virtual std::string Info() const;
void Init(const CuVectorBase<BaseFloat> &scales);
// InitFromString takes only the option bias=<string>,
// where the string is the filename of a Kaldi-format matrix to read.
virtual void InitFromString(std::string args);
virtual int32 InputDim() const { return bias_.Dim(); }
virtual int32 OutputDim() const { return bias_.Dim(); }
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual Component *Copy() const;
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
protected:
CuVector<BaseFloat> bias_;
KALDI_DISALLOW_COPY_AND_ASSIGN(FixedBiasComponent);
};
/// This Component, if present, randomly zeroes half of
/// the inputs and multiplies the other half by two.
/// Typically you would use this in training but not in
/// test or when computing validation-set objective functions.
class DropoutComponent : public RandomComponent {
public:
/// dropout-proportion is the proportion that is dropped out,
/// e.g. if 0.1, we set 10% to a low value. [note, in
/// some older code it was interpreted as the value not dropped
/// out, so be careful.] The low scale-value
/// is equal to dropout_scale. The high scale-value is chosen
/// such that the expected scale-value is one.
void Init(int32 dim, BaseFloat dropout_proportion = 0.5,
BaseFloat dropout_scale = 0.0);
DropoutComponent(int32 dim, BaseFloat dp = 0.5, BaseFloat sc = 0.0) {
Init(dim, dp, sc);
}
DropoutComponent() : dim_(0), dropout_proportion_(0.5) {}
virtual int32 InputDim() const { return dim_; }
virtual int32 OutputDim() const { return dim_; }
virtual void InitFromString(std::string args);
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
virtual std::string Type() const { return "DropoutComponent"; }
void SetDropoutScale(BaseFloat scale) { dropout_scale_ = scale; }
virtual bool BackpropNeedsInput() const { return true; }
virtual bool BackpropNeedsOutput() const { return true; }
virtual Component *Copy() const;
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const;
virtual std::string Info() const;
private:
int32 dim_;
BaseFloat dropout_proportion_;
BaseFloat
dropout_scale_; // Set the scale that we scale "dropout_proportion_"
// of the neurons by (default 0.0, but can be set arbitrarily close to 1.0).
};
/// This is a bit similar to dropout but adding (not multiplying) Gaussian
/// noise with a given standard deviation.
class AdditiveNoiseComponent : public RandomComponent {
public:
void Init(int32 dim, BaseFloat noise_stddev);
AdditiveNoiseComponent(int32 dim, BaseFloat stddev) { Init(dim, stddev); }
AdditiveNoiseComponent() : dim_(0), stddev_(1.0) {}
virtual int32 InputDim() const { return dim_; }
virtual int32 OutputDim() const { return dim_; }
virtual void InitFromString(std::string args);
virtual void Read(std::istream &is, bool binary);
virtual void Write(std::ostream &os, bool binary) const;
virtual std::string Type() const { return "AdditiveNoiseComponent"; }
virtual bool BackpropNeedsInput() const { return false; }
virtual bool BackpropNeedsOutput() const { return false; }
virtual Component *Copy() const {
return new AdditiveNoiseComponent(dim_, stddev_);
}
virtual void Propagate(const CuMatrixBase<BaseFloat> &in, int32 num_chunks,
CuMatrix<BaseFloat> *out) const;
virtual void Backprop(const CuMatrixBase<BaseFloat> &in_value,
const CuMatrixBase<BaseFloat> &out_value,
const CuMatrixBase<BaseFloat> &out_deriv,
int32 num_chunks,
Component *to_update, // may be identical to "this".
CuMatrix<BaseFloat> *in_deriv) const {
*in_deriv = out_deriv;
}
private:
int32 dim_;
BaseFloat stddev_;
};
/// Functions used in Init routines. Suppose name=="foo", if "string" has a
/// field like foo=12, this function will set "param" to 12 and remove that
/// element from "string". It returns true if the parameter was read.
bool ParseFromString(const std::string &name, std::string *string,
int32 *param);
/// This version is for parameters of type BaseFloat.
bool ParseFromString(const std::string &name, std::string *string,
BaseFloat *param);
/// This version is for parameters of type std::vector<int32>; it expects
/// them as a colon-separated list, without spaces.
bool ParseFromString(const std::string &name, std::string *string,
std::vector<int32> *param);
/// This version is for parameters of type bool, which can appear
/// as any string beginning with f, F, t or T.
bool ParseFromString(const std::string &name, std::string *string, bool *param);
} // namespace nnet2
} // namespace kaldi
#endif
<|start_filename|>tonic-suite/asr/src/online2/online-gmm-decoding.cc<|end_filename|>
// online2/online-gmm-decoding.cc
// Copyright 2013-2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "online2/online-gmm-decoding.h"
#include "lat/lattice-functions.h"
#include "lat/determinize-lattice-pruned.h"
namespace kaldi {
SingleUtteranceGmmDecoder::SingleUtteranceGmmDecoder(
const OnlineGmmDecodingConfig &config,
const OnlineGmmDecodingModels &models,
const OnlineFeaturePipeline &feature_prototype,
const fst::Fst<fst::StdArc> &fst,
const OnlineGmmAdaptationState &adaptation_state)
: config_(config),
models_(models),
feature_pipeline_(feature_prototype.New()),
orig_adaptation_state_(adaptation_state),
adaptation_state_(adaptation_state),
decoder_(fst, config.faster_decoder_opts) {
if (!SplitStringToIntegers(config_.silence_phones, ":", false,
&silence_phones_))
KALDI_ERR << "Bad --silence-phones option '" << config_.silence_phones
<< "'";
SortAndUniq(&silence_phones_);
feature_pipeline_->SetTransform(adaptation_state_.transform);
decoder_.InitDecoding();
}
// Advance the decoding as far as we can, and possibly estimate fMLLR.
void SingleUtteranceGmmDecoder::AdvanceDecoding() {
const AmDiagGmm &am_gmm =
(HaveTransform() ? models_.GetModel()
: models_.GetOnlineAlignmentModel());
// The decodable object is lightweight, we lose nothing
// from constructing it each time we want to decode more of the
// input.
DecodableDiagGmmScaledOnline decodable(am_gmm, models_.GetTransitionModel(),
config_.acoustic_scale,
feature_pipeline_);
int32 old_frames = decoder_.NumFramesDecoded();
// This will decode as many frames as are currently available.
decoder_.AdvanceDecoding(&decodable);
{ // possibly estimate fMLLR.
int32 new_frames = decoder_.NumFramesDecoded();
BaseFloat frame_shift = feature_pipeline_->FrameShiftInSeconds();
// if the original adaptation state (at utterance-start) had no transform,
// then this means it's the first utt of the speaker... even if not, if we
// don't have a transform it probably makes sense to treat it as the 1st utt
// of the speaker, i.e. to do fMLLR adaptation sooner.
bool is_first_utterance_of_speaker =
(orig_adaptation_state_.transform.NumRows() == 0);
bool end_of_utterance = false;
if (config_.adaptation_policy_opts.DoAdapt(old_frames * frame_shift,
new_frames * frame_shift,
is_first_utterance_of_speaker))
this->EstimateFmllr(end_of_utterance);
}
}
// gets Gaussian posteriors for purposes of fMLLR estimation.
// We exclude the silence phones from the Gaussian posteriors.
bool SingleUtteranceGmmDecoder::GetGaussianPosteriors(bool end_of_utterance,
GaussPost *gpost) {
// Gets the Gaussian-level posteriors for this utterance, using whatever
// features and model we are currently decoding with. We'll use these
// to estimate basis-fMLLR with.
if (decoder_.NumFramesDecoded() == 0) {
KALDI_WARN << "You have decoded no data so cannot estimate fMLLR.";
return false;
}
KALDI_ASSERT(config_.fmllr_lattice_beam > 0.0);
// Note: we'll just use whatever acoustic scaling factor we were decoding
// with. This is in the lattice that we get from decoder_.GetRawLattice().
Lattice raw_lat;
decoder_.GetRawLatticePruned(&raw_lat, end_of_utterance,
config_.fmllr_lattice_beam);
// At this point we could rescore the lattice if we wanted, and
// this might improve the accuracy on long utterances that were
// the first utterance of that speaker, if we had already
// estimated the fMLLR by the time we reach this code (e.g. this
// was the second call). We don't do this right now.
PruneLattice(config_.fmllr_lattice_beam, &raw_lat);
#if 1 // Do determinization.
Lattice det_lat; // lattice-determinized lattice-- represent this as Lattice
// not CompactLattice, as LatticeForwardBackward() does not
// accept CompactLattice.
fst::Invert(&raw_lat); // want to determinize on words.
fst::ILabelCompare<kaldi::LatticeArc> ilabel_comp;
fst::ArcSort(&raw_lat,
ilabel_comp); // improves efficiency of determinization
fst::DeterminizeLatticePruned(raw_lat, double(config_.fmllr_lattice_beam),
&det_lat);
fst::Invert(&det_lat); // invert back.
if (det_lat.NumStates() == 0) {
// Do nothing if the lattice is empty. This should not happen.
KALDI_WARN << "Got empty lattice. Not estimating fMLLR.";
return false;
}
#else
Lattice &det_lat = raw_lat; // Don't determinize.
#endif
TopSortLatticeIfNeeded(&det_lat);
// Note: the acoustic scale we use here is whatever we decoded with.
Posterior post;
BaseFloat tot_fb_like = LatticeForwardBackward(det_lat, &post);
KALDI_VLOG(3) << "Lattice forward-backward likelihood was "
<< (tot_fb_like / post.size()) << " per frame over "
<< post.size() << " frames.";
ConstIntegerSet<int32> silence_set(silence_phones_); // faster lookup
const TransitionModel &trans_model = models_.GetTransitionModel();
WeightSilencePost(trans_model, silence_set, config_.silence_weight, &post);
const AmDiagGmm &am_gmm =
(HaveTransform() ? models_.GetModel()
: models_.GetOnlineAlignmentModel());
Posterior pdf_post;
ConvertPosteriorToPdfs(trans_model, post, &pdf_post);
Vector<BaseFloat> feat(feature_pipeline_->Dim());
double tot_like = 0.0, tot_weight = 0.0;
gpost->resize(pdf_post.size());
for (size_t i = 0; i < pdf_post.size(); i++) {
feature_pipeline_->GetFrame(i, &feat);
for (size_t j = 0; j < pdf_post[i].size(); j++) {
int32 pdf_id = pdf_post[i][j].first;
BaseFloat weight = pdf_post[i][j].second;
const DiagGmm &gmm = am_gmm.GetPdf(pdf_id);
Vector<BaseFloat> this_post_vec;
BaseFloat like = gmm.ComponentPosteriors(feat, &this_post_vec);
this_post_vec.Scale(weight);
tot_like += like * weight;
tot_weight += weight;
(*gpost)[i].push_back(std::make_pair(pdf_id, this_post_vec));
}
}
KALDI_VLOG(3) << "Average likelihood weighted by posterior was "
<< (tot_like / tot_weight) << " over " << tot_weight
<< " frames (after downweighting silence).";
return true;
}
void SingleUtteranceGmmDecoder::EstimateFmllr(bool end_of_utterance) {
if (decoder_.NumFramesDecoded() == 0) {
KALDI_WARN << "You have decoded no data so cannot estimate fMLLR.";
}
if (GetVerboseLevel() >= 2) {
Matrix<BaseFloat> feats;
feature_pipeline_->GetAsMatrix(&feats);
KALDI_VLOG(2) << "Features are " << feats;
}
GaussPost gpost;
GetGaussianPosteriors(end_of_utterance, &gpost);
FmllrDiagGmmAccs &spk_stats = adaptation_state_.spk_stats;
if (spk_stats.beta_ != orig_adaptation_state_.spk_stats.beta_) {
// This could happen if the user called EstimateFmllr() twice on the
// same utterance... we don't want to count any stats twice so we
// have to reset the stats to what they were before this utterance
// (possibly empty).
spk_stats = orig_adaptation_state_.spk_stats;
}
int32 dim = feature_pipeline_->Dim();
if (spk_stats.Dim() == 0) spk_stats.Init(dim);
Matrix<BaseFloat> empty_transform;
feature_pipeline_->SetTransform(empty_transform);
Vector<BaseFloat> feat(dim);
if (adaptation_state_.transform.NumRows() == 0) {
// If this is the first time we're estimating fMLLR, freeze the CMVN to its
// current value. It doesn't matter too much what value this is, since we
// have already computed the Gaussian-level alignments (it may have a small
// effect if the basis is very small and doesn't include an offset as part
// of the transform).
feature_pipeline_->FreezeCmvn();
}
// GetModel() returns the model to be used for estimating
// transforms.
const AmDiagGmm &am_gmm = models_.GetModel();
for (size_t i = 0; i < gpost.size(); i++) {
feature_pipeline_->GetFrame(i, &feat);
for (size_t j = 0; j < gpost[i].size(); j++) {
int32 pdf_id = gpost[i][j].first; // caution: this gpost has pdf-id
// instead of transition-id, which is
// unusual.
const Vector<BaseFloat> &posterior(gpost[i][j].second);
spk_stats.AccumulateFromPosteriors(am_gmm.GetPdf(pdf_id), feat,
posterior);
}
}
const BasisFmllrEstimate &basis = models_.GetFmllrBasis();
if (basis.Dim() == 0)
KALDI_ERR << "In order to estimate fMLLR, you need to supply the "
<< "--fmllr-basis option.";
Vector<BaseFloat> basis_coeffs;
BaseFloat impr =
basis.ComputeTransform(spk_stats, &adaptation_state_.transform,
&basis_coeffs, config_.basis_opts);
KALDI_VLOG(3) << "Objective function improvement from basis-fMLLR is "
<< (impr / spk_stats.beta_) << " per frame, over "
<< spk_stats.beta_ << " frames, #params estimated is "
<< basis_coeffs.Dim();
feature_pipeline_->SetTransform(adaptation_state_.transform);
}
bool SingleUtteranceGmmDecoder::HaveTransform() const {
return (feature_pipeline_->HaveFmllrTransform());
}
void SingleUtteranceGmmDecoder::GetAdaptationState(
OnlineGmmAdaptationState *adaptation_state) const {
*adaptation_state = adaptation_state_;
feature_pipeline_->GetCmvnState(&adaptation_state->cmvn_state);
}
bool SingleUtteranceGmmDecoder::RescoringIsNeeded() const {
if (orig_adaptation_state_.transform.NumRows() !=
adaptation_state_.transform.NumRows())
return true; // fMLLR was estimated
if (!orig_adaptation_state_.transform.ApproxEqual(
adaptation_state_.transform))
return true; // fMLLR was re-estimated
if (adaptation_state_.transform.NumRows() != 0 &&
&models_.GetModel() != &models_.GetFinalModel())
return true; // we have an fMLLR transform, and a discriminatively
// estimated
// model which differs from the one used to estimate fMLLR.
return false;
}
SingleUtteranceGmmDecoder::~SingleUtteranceGmmDecoder() {
delete feature_pipeline_;
}
bool SingleUtteranceGmmDecoder::EndpointDetected(
const OnlineEndpointConfig &config) {
const TransitionModel &tmodel = models_.GetTransitionModel();
return kaldi::EndpointDetected(
config, tmodel, feature_pipeline_->FrameShiftInSeconds(), decoder_);
}
void SingleUtteranceGmmDecoder::GetLattice(bool rescore_if_needed,
bool end_of_utterance,
CompactLattice *clat) const {
Lattice lat;
double lat_beam = config_.faster_decoder_opts.lattice_beam;
decoder_.GetRawLattice(&lat, end_of_utterance);
if (rescore_if_needed && RescoringIsNeeded()) {
DecodableDiagGmmScaledOnline decodable(
models_.GetFinalModel(), models_.GetTransitionModel(),
config_.acoustic_scale, feature_pipeline_);
if (!kaldi::RescoreLattice(&decodable, &lat))
KALDI_WARN << "Error rescoring lattice";
}
PruneLattice(lat_beam, &lat);
DeterminizeLatticePhonePrunedWrapper(models_.GetTransitionModel(), &lat,
lat_beam, clat,
config_.faster_decoder_opts.det_opts);
}
void SingleUtteranceGmmDecoder::GetBestPath(bool end_of_utterance,
Lattice *best_path) const {
decoder_.GetBestPath(best_path, end_of_utterance);
}
OnlineGmmDecodingModels::OnlineGmmDecodingModels(
const OnlineGmmDecodingConfig &config) {
KALDI_ASSERT(!config.model_rxfilename.empty() &&
"You must supply the --model option");
{
bool binary;
Input ki(config.model_rxfilename, &binary);
tmodel_.Read(ki.Stream(), binary);
model_.Read(ki.Stream(), binary);
}
if (!config.online_alimdl_rxfilename.empty()) {
bool binary;
Input ki(config.online_alimdl_rxfilename, &binary);
TransitionModel tmodel;
tmodel.Read(ki.Stream(), binary);
if (!tmodel.Compatible(tmodel_))
KALDI_ERR << "Incompatible models given to the --model and "
<< "--online-alignment-model options";
online_alignment_model_.Read(ki.Stream(), binary);
}
if (!config.rescore_model_rxfilename.empty()) {
bool binary;
Input ki(config.rescore_model_rxfilename, &binary);
TransitionModel tmodel;
tmodel.Read(ki.Stream(), binary);
if (!tmodel.Compatible(tmodel_))
KALDI_ERR << "Incompatible models given to the --model and "
<< "--final-model options";
rescore_model_.Read(ki.Stream(), binary);
}
if (!config.fmllr_basis_rxfilename.empty()) {
// We could just as easily use ReadKaldiObject() here.
bool binary;
Input ki(config.fmllr_basis_rxfilename, &binary);
fmllr_basis_.Read(ki.Stream(), binary);
}
}
const TransitionModel &OnlineGmmDecodingModels::GetTransitionModel() const {
return tmodel_;
}
const AmDiagGmm &OnlineGmmDecodingModels::GetOnlineAlignmentModel() const {
if (online_alignment_model_.NumPdfs() != 0)
return online_alignment_model_;
else
return model_;
}
const AmDiagGmm &OnlineGmmDecodingModels::GetModel() const { return model_; }
const AmDiagGmm &OnlineGmmDecodingModels::GetFinalModel() const {
if (rescore_model_.NumPdfs() != 0)
return rescore_model_;
else
return model_;
}
const BasisFmllrEstimate &OnlineGmmDecodingModels::GetFmllrBasis() const {
return fmllr_basis_;
}
void OnlineGmmDecodingAdaptationPolicyConfig::Check() const {
KALDI_ASSERT(adaptation_first_utt_delay > 0.0 &&
adaptation_first_utt_ratio > 1.0);
KALDI_ASSERT(adaptation_delay > 0.0 && adaptation_ratio > 1.0);
}
bool OnlineGmmDecodingAdaptationPolicyConfig::DoAdapt(
BaseFloat chunk_begin_secs, BaseFloat chunk_end_secs,
bool is_first_utterance) const {
Check();
if (is_first_utterance) {
// We aim to return true if a member of the sequence
// ( adaptation_first_utt_delay * adaptation_first_utt_ratio^n )
// for n = 0, 1, 2, ...
// is in the range [ chunk_begin_secs, chunk_end_secs ).
BaseFloat delay = adaptation_first_utt_delay;
while (delay < chunk_begin_secs) delay *= adaptation_first_utt_ratio;
return (delay < chunk_end_secs);
} else {
// as above, but remove "first_utt".
BaseFloat delay = adaptation_delay;
while (delay < chunk_begin_secs) delay *= adaptation_ratio;
return (delay < chunk_end_secs);
}
}
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-fmpe-acc-stats.cc<|end_filename|>
// gmmbin/gmm-fmpe-acc-stats.cc
// Copyright 2012 Johns Hopkins University (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "transform/fmpe.h"
int main(int argc, char *argv[]) {
using namespace kaldi;
using kaldi::int32;
try {
const char *usage =
"Accumulate stats for fMPE training, using GMM model. Note: this "
"could\n"
"be done using gmm-get-feat-deriv and fmpe-acc-stats (but you'd be "
"computing\n"
"the features twice). Features input should be pre-fMPE features.\n"
"\n"
"Usage: gmm-fmpe-acc-stats [options] <model-in> <fmpe-in> "
"<feature-rspecifier> "
"<gselect-rspecifier> <posteriors-rspecifier> <fmpe-stats-out>\n"
"e.g.: \n"
" gmm-fmpe-acc-stats --model-derivative 1.accs 1.mdl 1.fmpe \"$feats\" "
"ark:1.gselect ark:1.post 1.fmpe_stats\n";
ParseOptions po(usage);
bool binary = true;
std::string model_derivative_rxfilename;
po.Register("binary", &binary, "If true, write stats in binary mode.");
po.Register("model-derivative", &model_derivative_rxfilename,
"GMM-accs file containing model derivative [note: contains no "
"transition stats]. Used for indirect differential. Warning: "
"this will only work correctly in the case of MMI/BMMI "
"objective function, with non-canceled stats.");
po.Read(argc, argv);
if (po.NumArgs() != 6) {
po.PrintUsage();
exit(1);
}
std::string model_rxfilename = po.GetArg(1), fmpe_rxfilename = po.GetArg(2),
feature_rspecifier = po.GetArg(3),
gselect_rspecifier = po.GetArg(4),
posteriors_rspecifier = po.GetArg(5),
stats_wxfilename = po.GetArg(6);
AmDiagGmm am_gmm;
TransitionModel trans_model;
{
bool binary;
Input ki(model_rxfilename, &binary);
trans_model.Read(ki.Stream(), binary);
am_gmm.Read(ki.Stream(), binary);
}
Fmpe fmpe;
ReadKaldiObject(fmpe_rxfilename, &fmpe);
bool have_indirect = (model_derivative_rxfilename != "");
AccumAmDiagGmm model_derivative;
if (have_indirect)
ReadKaldiObject(model_derivative_rxfilename, &model_derivative);
FmpeStats fmpe_stats(fmpe);
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
RandomAccessInt32VectorVectorReader gselect_reader(gselect_rspecifier);
RandomAccessPosteriorReader posteriors_reader(posteriors_rspecifier);
BaseFloat tot_like = 0.0; // tot like weighted by posterior.
int32 num_frames = 0;
int32 num_done = 0, num_err = 0;
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string key = feature_reader.Key();
if (!posteriors_reader.HasKey(key)) {
num_err++;
KALDI_WARN << "No posteriors for utterance " << key;
continue;
}
const Matrix<BaseFloat> &feat_in = feature_reader.Value();
const Posterior &posterior = posteriors_reader.Value(key);
if (static_cast<int32>(posterior.size()) != feat_in.NumRows()) {
KALDI_WARN << "Posterior vector has wrong size " << (posterior.size())
<< " vs. " << (feat_in.NumRows());
num_err++;
continue;
}
if (!gselect_reader.HasKey(key)) {
KALDI_WARN << "No gselect information for key " << key;
num_err++;
continue;
}
const std::vector<std::vector<int32> > &gselect =
gselect_reader.Value(key);
if (static_cast<int32>(gselect.size()) != feat_in.NumRows()) {
KALDI_WARN << "gselect information has wrong size";
num_err++;
continue;
}
num_done++;
Matrix<BaseFloat> fmpe_feat(feat_in.NumRows(), feat_in.NumCols());
fmpe.ComputeFeatures(feat_in, gselect, &fmpe_feat);
fmpe_feat.AddMat(1.0, feat_in);
Matrix<BaseFloat> direct_deriv, indirect_deriv;
tot_like += ComputeAmGmmFeatureDeriv(
am_gmm, trans_model, posterior, fmpe_feat, &direct_deriv,
(have_indirect ? &model_derivative : NULL),
(have_indirect ? &indirect_deriv : NULL));
num_frames += feat_in.NumRows();
fmpe.AccStats(feat_in, gselect, direct_deriv,
(have_indirect ? &indirect_deriv : NULL), &fmpe_stats);
if (num_done % 100 == 0)
KALDI_LOG << "Processed " << num_done << " utterances.";
}
KALDI_LOG << "Done " << num_done << " files, " << num_err
<< " with errors.";
KALDI_LOG << "Overall weighted acoustic likelihood per frame is "
<< (tot_like / num_frames) << " over " << num_frames
<< " frames.";
Output ko(stats_wxfilename, binary);
fmpe_stats.Write(ko.Stream(), binary);
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-stats.cc<|end_filename|>
// nnet2/nnet-stats.cc
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet2/nnet-stats.h"
namespace kaldi {
namespace nnet2 {
void NnetStats::StatsElement::PrintStats(std::ostream &os) {
BaseFloat c = (count == 0 ? 1 : count), // prevent division by zero.
deriv_mean = deriv_sum / c,
deriv_stddev = std::sqrt(deriv_sumsq / c - deriv_mean * deriv_mean),
abs_value_mean = abs_value_sum / c,
abs_value_stddev = std::sqrt(abs_value_sumsq / c -
abs_value_mean * abs_value_mean);
os << '[' << deriv_begin << ':' << deriv_end << "] count=" << count
<< ", deriv mean,stddev=" << deriv_mean << ',' << deriv_stddev
<< ", abs-avg-value mean,stddev=" << abs_value_mean << ','
<< abs_value_stddev;
}
void NnetStats::StatsElement::AddStats(BaseFloat avg_deriv,
BaseFloat avg_value) {
count++;
deriv_sum += avg_deriv;
deriv_sumsq += avg_deriv * avg_deriv;
abs_value_sum += std::abs(avg_value);
abs_value_sumsq += avg_value * avg_value;
}
int32 NnetStats::BucketFor(BaseFloat avg_deriv) {
KALDI_ASSERT(avg_deriv >= 0.0);
KALDI_ASSERT(bucket_width_ > 0.0);
// cast ratio to int. Since we do +0.5, this rounds down.
int32 index = static_cast<int32>(avg_deriv / bucket_width_ + 0.5);
while (index >= static_cast<int32>(buckets_.size()))
buckets_.push_back(StatsElement(buckets_.size() * bucket_width_,
(buckets_.size() + 1) * bucket_width_));
return index;
}
void NnetStats::AddStats(BaseFloat avg_deriv, BaseFloat avg_value) {
global_.AddStats(avg_deriv, avg_value);
buckets_[BucketFor(avg_deriv)].AddStats(avg_deriv, avg_value);
}
void NnetStats::AddStatsFromNnet(const Nnet &nnet) {
const AffineComponent *ac = dynamic_cast<const AffineComponent *>(
&(nnet.GetComponent(affine_component_index_)));
KALDI_ASSERT(ac != NULL); // would be an error in calling code.
const NonlinearComponent *nc = dynamic_cast<const NonlinearComponent *>(
&(nnet.GetComponent(affine_component_index_ + 1)));
KALDI_ASSERT(nc != NULL); // would be an error in calling code.
double count = nc->Count();
if (count == 0) {
KALDI_WARN << "No stats stored with nonlinear component";
return;
}
const CuVector<double> &value_sum = nc->ValueSum();
const CuVector<double> &deriv_sum = nc->DerivSum();
if (value_sum.Dim() != deriv_sum.Dim())
KALDI_ERR << "Error computing nnet stats: probably you are "
<< "trying to compute stats for a sigmoid layer.";
for (int32 i = 0; i < value_sum.Dim(); i++) {
BaseFloat avg_value = value_sum(i) / count,
avg_deriv = deriv_sum(i) / count;
AddStats(avg_deriv, avg_value);
}
}
void NnetStats::PrintStats(std::ostream &os) {
os << "Stats for buckets:" << std::endl;
for (size_t i = 0; i < buckets_.size(); i++) {
buckets_[i].PrintStats(os);
os << std::endl;
}
os << "Global stats: ";
global_.PrintStats(os);
os << std::endl;
}
void GetNnetStats(const NnetStatsConfig &config, const Nnet &nnet,
std::vector<NnetStats> *stats) {
KALDI_ASSERT(stats->size() == 0);
for (int32 c = 0; c + 1 < nnet.NumComponents(); c++) {
const AffineComponent *ac =
dynamic_cast<const AffineComponent *>(&(nnet.GetComponent(c)));
if (ac == NULL) continue;
const NonlinearComponent *nc =
dynamic_cast<const NonlinearComponent *>(&(nnet.GetComponent(c + 1)));
if (nc == NULL) continue;
// exclude softmax.
const SoftmaxComponent *sc =
dynamic_cast<const SoftmaxComponent *>(&(nnet.GetComponent(c + 1)));
if (sc != NULL) continue;
stats->push_back(NnetStats(c, config.bucket_width));
stats->back().AddStatsFromNnet(nnet);
}
}
} // namespace nnet2
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/online/online-decodable.h<|end_filename|>
// online/online-decodable.h
// Copyright 2012 Cisco Systems (author: <NAME>)
// Modifications to the original contribution by Cisco Systems made by:
// <NAME>,
// Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_ONLINE_ONLINE_DECODABLE_H_
#define KALDI_ONLINE_ONLINE_DECODABLE_H_
#include "online/online-feat-input.h"
#include "gmm/decodable-am-diag-gmm.h"
namespace kaldi {
// A decodable, taking input from an OnlineFeatureInput object on-demand
class OnlineDecodableDiagGmmScaled : public DecodableInterface {
public:
OnlineDecodableDiagGmmScaled(const AmDiagGmm &am,
const TransitionModel &trans_model,
const BaseFloat scale,
OnlineFeatureMatrix *input_feats);
/// Returns the log likelihood, which will be negated in the decoder.
virtual BaseFloat LogLikelihood(int32 frame, int32 index);
virtual bool IsLastFrame(int32 frame) const;
/// Indices are one-based! This is for compatibility with OpenFst.
virtual int32 NumIndices() const { return trans_model_.NumTransitionIds(); }
private:
void CacheFrame(int32 frame);
OnlineFeatureMatrix *features_;
const AmDiagGmm &ac_model_;
BaseFloat ac_scale_;
const TransitionModel &trans_model_;
const int32 feat_dim_; // dimensionality of the input features
Vector<BaseFloat> cur_feats_;
int32 cur_frame_;
std::vector<std::pair<int32, BaseFloat> > cache_;
KALDI_DISALLOW_COPY_AND_ASSIGN(OnlineDecodableDiagGmmScaled);
};
} // namespace kaldi
#endif // KALDI_ONLINE_ONLINE_DECODABLE_H_
<|start_filename|>tonic-suite/asr/src/ivectorbin/select-voiced-frames.cc<|end_filename|>
// ivectorbin/select-voiced-frames.cc
// Copyright 2013 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/kaldi-matrix.h"
#include "feat/feature-functions.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using kaldi::int32;
const char *usage =
"Select a subset of frames of the input files, based on the output of\n"
"compute-vad or a similar program (a vector of length num-frames,\n"
"containing 1.0 for voiced, 0.0 for unvoiced).\n"
"Usage: select-voiced-frames [options] <feats-rspecifier> "
" <vad-rspecifier> <feats-wspecifier>\n"
"E.g.: select-voiced-frames [options] scp:feats.scp scp:vad.scp "
"ark:-\n";
ParseOptions po(usage);
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string feat_rspecifier = po.GetArg(1), vad_rspecifier = po.GetArg(2),
feat_wspecifier = po.GetArg(3);
SequentialBaseFloatMatrixReader feat_reader(feat_rspecifier);
RandomAccessBaseFloatVectorReader vad_reader(vad_rspecifier);
BaseFloatMatrixWriter feat_writer(feat_wspecifier);
int32 num_done = 0, num_err = 0;
for (; !feat_reader.Done(); feat_reader.Next()) {
std::string utt = feat_reader.Key();
const Matrix<BaseFloat> &feat = feat_reader.Value();
if (feat.NumRows() == 0) {
KALDI_WARN << "Empty feature matrix for utterance " << utt;
num_err++;
continue;
}
if (!vad_reader.HasKey(utt)) {
KALDI_WARN << "No VAD input found for utterance " << utt;
num_err++;
continue;
}
const Vector<BaseFloat> &voiced = vad_reader.Value(utt);
if (feat.NumRows() != voiced.Dim()) {
KALDI_WARN << "Mismatch in number for frames " << feat.NumRows()
<< " for features and VAD " << voiced.Dim()
<< ", for utterance " << utt;
num_err++;
continue;
}
if (voiced.Sum() == 0.0) {
KALDI_WARN << "No features were judged as voiced for utterance " << utt;
num_err++;
continue;
}
int32 dim = 0;
for (int32 i = 0; i < voiced.Dim(); i++)
if (voiced(i) != 0.0) dim++;
Matrix<BaseFloat> voiced_feat(dim, feat.NumCols());
int32 index = 0;
for (int32 i = 0; i < feat.NumRows(); i++) {
if (voiced(i) != 0.0) {
KALDI_ASSERT(voiced(i) == 1.0); // should be zero or one.
voiced_feat.Row(index).CopyFromVec(feat.Row(i));
index++;
}
}
KALDI_ASSERT(index == dim);
feat_writer.Write(utt, voiced_feat);
num_done++;
}
KALDI_LOG << "Done selecting voiced frames; processed " << num_done
<< " utterances, " << num_err << " had errors.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/fstext/lattice-weight.h<|end_filename|>
// fstext/lattice-weight.h
// Copyright 2009-2012 Microsoft Corporation
// Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_FSTEXT_LATTICE_WEIGHT_H_
#define KALDI_FSTEXT_LATTICE_WEIGHT_H_
#include "fst/fstlib.h"
#include "base/kaldi-common.h"
namespace fst {
// Declare weight type for lattice... will import to namespace kaldi. has two
// members, value1_ and value2_, of type BaseFloat (normally equals float). It
// is basically the same as the tropical semiring on value1_+value2_, except it
// keeps track of a and b separately. More precisely, it is equivalent to the
// lexicographic semiring on (value1_+value2_), (value1_-value2_)
template <class FloatType>
class LatticeWeightTpl;
template <class FloatType>
inline ostream &operator<<(ostream &strm, const LatticeWeightTpl<FloatType> &w);
template <class FloatType>
inline istream &operator>>(istream &strm, LatticeWeightTpl<FloatType> &w);
template <class FloatType>
class LatticeWeightTpl {
public:
typedef FloatType T; // normally float.
typedef LatticeWeightTpl ReverseWeight;
inline T Value1() const { return value1_; }
inline T Value2() const { return value2_; }
inline void SetValue1(T f) { value1_ = f; }
inline void SetValue2(T f) { value2_ = f; }
LatticeWeightTpl() {}
LatticeWeightTpl(T a, T b) : value1_(a), value2_(b) {}
LatticeWeightTpl(const LatticeWeightTpl &other)
: value1_(other.value1_), value2_(other.value2_) {}
LatticeWeightTpl &operator=(const LatticeWeightTpl &w) {
value1_ = w.value1_;
value2_ = w.value2_;
return *this;
}
LatticeWeightTpl<FloatType> Reverse() const { return *this; }
static const LatticeWeightTpl Zero() {
return LatticeWeightTpl(numeric_limits<T>::infinity(),
numeric_limits<T>::infinity());
}
static const LatticeWeightTpl One() { return LatticeWeightTpl(0.0, 0.0); }
static const string &Type() {
static const string type = (sizeof(T) == 4 ? "lattice4" : "lattice8");
return type;
}
static const LatticeWeightTpl NoWeight() {
return LatticeWeightTpl(numeric_limits<FloatType>::quiet_NaN(),
numeric_limits<FloatType>::quiet_NaN());
}
bool Member() const {
// value1_ == value1_ tests for NaN.
// also test for no -inf, and either both or neither
// must be +inf, and
if (value1_ != value1_ || value2_ != value2_) return false; // NaN
if (value1_ == -numeric_limits<T>::infinity() ||
value2_ == -numeric_limits<T>::infinity())
return false; // -infty not allowed
if (value1_ == numeric_limits<T>::infinity() ||
value2_ == numeric_limits<T>::infinity()) {
if (value1_ != numeric_limits<T>::infinity() ||
value2_ != numeric_limits<T>::infinity())
return false; // both must be +infty;
// this is necessary so that the semiring has only one zero.
}
return true;
}
LatticeWeightTpl Quantize(float delta = kDelta) const {
if (value1_ + value2_ == -numeric_limits<T>::infinity()) {
return LatticeWeightTpl(-numeric_limits<T>::infinity(),
-numeric_limits<T>::infinity());
} else if (value1_ + value2_ == numeric_limits<T>::infinity()) {
return LatticeWeightTpl(numeric_limits<T>::infinity(),
numeric_limits<T>::infinity());
} else if (value1_ + value2_ != value1_ + value2_) { // NaN
return LatticeWeightTpl(value1_ + value2_, value1_ + value2_);
} else {
return LatticeWeightTpl(floor(value1_ / delta + 0.5F) * delta,
floor(value2_ / delta + 0.5F) * delta);
}
}
static uint64 Properties() {
return kLeftSemiring | kRightSemiring | kCommutative | kPath | kIdempotent;
}
// This is used in OpenFst for binary I/O. This is OpenFst-style,
// not Kaldi-style, I/O.
istream &Read(istream &strm) {
// Always read/write as float, even if T is double,
// so we can use OpenFst-style read/write and still maintain
// compatibility when compiling with different FloatTypes
ReadType(strm, &value1_);
ReadType(strm, &value2_);
return strm;
}
// This is used in OpenFst for binary I/O. This is OpenFst-style,
// not Kaldi-style, I/O.
ostream &Write(ostream &strm) const {
WriteType(strm, value1_);
WriteType(strm, value2_);
return strm;
}
size_t Hash() const {
size_t ans;
union {
T f;
size_t s;
} u;
u.s = 0;
u.f = value1_;
ans = u.s;
u.f = value2_;
ans += u.s;
return ans;
}
protected:
inline static void WriteFloatType(ostream &strm, const T &f) {
if (f == numeric_limits<T>::infinity())
strm << "Infinity";
else if (f == -numeric_limits<T>::infinity())
strm << "-Infinity";
else if (f != f)
strm << "BadNumber";
else
strm << f;
}
// Internal helper function, used in ReadNoParen.
inline static void ReadFloatType(istream &strm, T &f) {
string s;
strm >> s;
if (s == "Infinity") {
f = numeric_limits<T>::infinity();
} else if (s == "-Infinity") {
f = -numeric_limits<T>::infinity();
} else if (s == "BadNumber") {
f = numeric_limits<T>::infinity();
f -= f; // get NaN
} else {
char *p;
f = strtod(s.c_str(), &p);
if (p < s.c_str() + s.size()) strm.clear(std::ios::badbit);
}
}
// Reads LatticeWeight when there are no parentheses around pair terms...
// currently the only form supported.
inline istream &ReadNoParen(istream &strm, char separator) {
int c;
do {
c = strm.get();
} while (isspace(c));
string s1;
while (c != separator) {
if (c == EOF) {
strm.clear(std::ios::badbit);
return strm;
}
s1 += c;
c = strm.get();
}
istringstream strm1(s1);
ReadFloatType(strm1, value1_); // ReadFloatType is class member function
// read second element
ReadFloatType(strm, value2_);
return strm;
}
friend istream &operator>>
<FloatType>(istream &, LatticeWeightTpl<FloatType> &);
friend ostream &operator<<<FloatType>(ostream &,
const LatticeWeightTpl<FloatType> &);
private:
T value1_;
T value2_;
};
/* ScaleTupleWeight is a function defined for LatticeWeightTpl and
CompactLatticeWeightTpl that mutliplies the pair (value1_, value2_) by a 2x2
matrix. Used, for example, in applying acoustic scaling.
*/
template <class FloatType, class ScaleFloatType>
inline LatticeWeightTpl<FloatType> ScaleTupleWeight(
const LatticeWeightTpl<FloatType> &w,
const vector<vector<ScaleFloatType> > &scale) {
// Without the next special case we'd get NaNs from infinity * 0
if (w.Value1() == numeric_limits<FloatType>::infinity())
return LatticeWeightTpl<FloatType>::Zero();
return LatticeWeightTpl<FloatType>(
scale[0][0] * w.Value1() + scale[0][1] * w.Value2(),
scale[1][0] * w.Value1() + scale[1][1] * w.Value2());
}
/* For testing purposes and in case it's ever useful, we define a similar
function to apply to LexicographicWeight and the like, templated on
TropicalWeight<float> etc.; we use PairWeight which is the base class of
LexicographicWeight.
*/
template <class FloatType, class ScaleFloatType>
inline PairWeight<TropicalWeightTpl<FloatType>, TropicalWeightTpl<FloatType> >
ScaleTupleWeight(const PairWeight<TropicalWeightTpl<FloatType>,
TropicalWeightTpl<FloatType> > &w,
const vector<vector<ScaleFloatType> > &scale) {
typedef TropicalWeightTpl<FloatType> BaseType;
typedef PairWeight<BaseType, BaseType> PairType;
const BaseType zero = BaseType::Zero();
// Without the next special case we'd get NaNs from infinity * 0
if (w.Value1() == zero || w.Value2() == zero) return PairType(zero, zero);
FloatType f1 = w.Value1().Value(), f2 = w.Value2().Value();
return PairType(BaseType(scale[0][0] * f1 + scale[0][1] * f2),
BaseType(scale[1][0] * f1 + scale[1][1] * f2));
}
template <class FloatType>
inline bool operator==(const LatticeWeightTpl<FloatType> &wa,
const LatticeWeightTpl<FloatType> &wb) {
// Volatile qualifier thwarts over-aggressive compiler optimizations
// that lead to problems esp. with NaturalLess().
volatile FloatType va1 = wa.Value1(), va2 = wa.Value2(), vb1 = wb.Value1(),
vb2 = wb.Value2();
return (va1 == vb1 && va2 == vb2);
}
template <class FloatType>
inline bool operator!=(const LatticeWeightTpl<FloatType> &wa,
const LatticeWeightTpl<FloatType> &wb) {
// Volatile qualifier thwarts over-aggressive compiler optimizations
// that lead to problems esp. with NaturalLess().
volatile FloatType va1 = wa.Value1(), va2 = wa.Value2(), vb1 = wb.Value1(),
vb2 = wb.Value2();
return (va1 != vb1 || va2 != vb2);
}
// We define a Compare function LatticeWeightTpl even though it's
// not required by the semiring standard-- it's just more efficient
// to do it this way rather than using the NaturalLess template.
/// Compare returns -1 if w1 < w2, +1 if w1 > w2, and 0 if w1 == w2.
template <class FloatType>
inline int Compare(const LatticeWeightTpl<FloatType> &w1,
const LatticeWeightTpl<FloatType> &w2) {
FloatType f1 = w1.Value1() + w1.Value2(), f2 = w2.Value1() + w2.Value2();
if (f1 < f2) {
return 1;
} // having smaller cost means you're larger
// in the semiring [higher probability]
else if (f1 > f2) {
return -1;
}
// mathematically we should be comparing (w1.value1_-w1.value2_ <
// w2.value1_-w2.value2_)
// in the next line, but add w1.value1_+w1.value2_ = w2.value1_+w2.value2_ to
// both sides and
// divide by two, and we get the simpler equivalent form w1.value1_ <
// w2.value1_.
else if (w1.Value1() < w2.Value1()) {
return 1;
} else if (w1.Value1() > w2.Value1()) {
return -1;
} else {
return 0;
}
}
template <class FloatType>
inline LatticeWeightTpl<FloatType> Plus(const LatticeWeightTpl<FloatType> &w1,
const LatticeWeightTpl<FloatType> &w2) {
return (Compare(w1, w2) >= 0 ? w1 : w2);
}
// For efficiency, override the NaturalLess template class.
template <class FloatType>
class NaturalLess<LatticeWeightTpl<FloatType> > {
public:
typedef LatticeWeightTpl<FloatType> Weight;
bool operator()(const Weight &w1, const Weight &w2) const {
// NaturalLess is a negative order (opposite to normal ordering).
// This operator () corresponds to "<" in the negative order, which
// corresponds to the ">" in the normal order.
return (Compare(w1, w2) == 1);
}
};
template <class FloatType>
inline LatticeWeightTpl<FloatType> Times(
const LatticeWeightTpl<FloatType> &w1,
const LatticeWeightTpl<FloatType> &w2) {
return LatticeWeightTpl<FloatType>(w1.Value1() + w2.Value1(),
w1.Value2() + w2.Value2());
}
// divide w1 by w2 (on left/right/any doesn't matter as
// commutative).
template <class FloatType>
inline LatticeWeightTpl<FloatType> Divide(const LatticeWeightTpl<FloatType> &w1,
const LatticeWeightTpl<FloatType> &w2,
DivideType typ = DIVIDE_ANY) {
typedef FloatType T;
T a = w1.Value1() - w2.Value1(), b = w1.Value2() - w2.Value2();
if (a != a || b != b || a == -numeric_limits<T>::infinity() ||
b == -numeric_limits<T>::infinity()) {
std::cerr << "LatticeWeightTpl::Divide, NaN or invalid number produced. "
<< "[dividing by zero?] Returning zero.";
return LatticeWeightTpl<T>::Zero();
}
if (a == numeric_limits<T>::infinity() || b == numeric_limits<T>::infinity())
return LatticeWeightTpl<T>::Zero(); // not a valid number if only one is
// infinite.
return LatticeWeightTpl<T>(a, b);
}
template <class FloatType>
inline bool ApproxEqual(const LatticeWeightTpl<FloatType> &w1,
const LatticeWeightTpl<FloatType> &w2,
float delta = kDelta) {
if (w1.Value1() == w2.Value1() && w1.Value2() == w2.Value2())
return true; // handles Zero().
return (fabs((w1.Value1() + w1.Value2()) - (w2.Value1() + w2.Value2())) <=
delta);
}
template <class FloatType>
inline ostream &operator<<(ostream &strm,
const LatticeWeightTpl<FloatType> &w) {
typedef FloatType T;
LatticeWeightTpl<FloatType>::WriteFloatType(strm, w.Value1());
CHECK(FLAGS_fst_weight_separator.size() == 1);
strm << FLAGS_fst_weight_separator[0]; // comma by default;
// may or may not be settable from Kaldi programs.
LatticeWeightTpl<FloatType>::WriteFloatType(strm, w.Value2());
return strm;
}
template <class FloatType>
inline istream &operator>>(istream &strm, LatticeWeightTpl<FloatType> &w1) {
CHECK(FLAGS_fst_weight_separator.size() == 1);
// separator defaults to ','
return w1.ReadNoParen(strm, FLAGS_fst_weight_separator[0]);
}
// CompactLattice will be an acceptor (accepting the words/output-symbols),
// with the weights and input-symbol-seqs on the arcs.
// There must be a total order on W. We assume for the sake of efficiency
// that there is a function
// Compare(W w1, W w2) that returns -1 if w1 < w2, +1 if w1 > w2, and
// zero if w1 == w2, and Plus for type W returns (Compare(w1,w2) >= 0 ? w1 :
// w2).
template <class WeightType, class IntType>
class CompactLatticeWeightTpl {
public:
typedef WeightType W;
typedef CompactLatticeWeightTpl<WeightType, IntType> ReverseWeight;
// Plus is like LexicographicWeight on the pair (weight_, string_), but where
// we
// use standard lexicographic order on string_ [this is not the same as
// NaturalLess on the StringWeight equivalent, which does not define a
// total order].
// Times, Divide obvious... (support both left & right division..)
// CommonDivisor would need to be coded separately.
CompactLatticeWeightTpl() {}
CompactLatticeWeightTpl(const WeightType &w, const vector<IntType> &s)
: weight_(w), string_(s) {}
CompactLatticeWeightTpl &operator=(
const CompactLatticeWeightTpl<WeightType, IntType> &w) {
weight_ = w.weight_;
string_ = w.string_;
return *this;
}
const W &Weight() const { return weight_; }
const vector<IntType> &String() const { return string_; }
void SetWeight(const W &w) { weight_ = w; }
void SetString(const vector<IntType> &s) { string_ = s; }
static const CompactLatticeWeightTpl<WeightType, IntType> Zero() {
return CompactLatticeWeightTpl<WeightType, IntType>(WeightType::Zero(),
vector<IntType>());
}
static const CompactLatticeWeightTpl<WeightType, IntType> One() {
return CompactLatticeWeightTpl<WeightType, IntType>(WeightType::One(),
vector<IntType>());
}
inline static string GetIntSizeString() {
char buf[2];
buf[0] = '0' + sizeof(IntType);
buf[1] = '\0';
return buf;
}
static const string &Type() {
static const string type =
"compact" + WeightType::Type() + GetIntSizeString();
return type;
}
static const CompactLatticeWeightTpl<WeightType, IntType> NoWeight() {
return CompactLatticeWeightTpl<WeightType, IntType>(WeightType::NoWeight(),
std::vector<IntType>());
}
CompactLatticeWeightTpl<WeightType, IntType> Reverse() const {
size_t s = string_.size();
vector<IntType> v(s);
for (size_t i = 0; i < s; i++) v[i] = string_[s - i - 1];
return CompactLatticeWeightTpl<WeightType, IntType>(weight_, v);
}
bool Member() const {
// a semiring has only one zero, this is the important property
// we're trying to maintain here. So force string_ to be empty if
// w_ == zero.
if (!weight_.Member()) return false;
if (weight_ == WeightType::Zero())
return string_.empty();
else
return true;
}
CompactLatticeWeightTpl Quantize(float delta = kDelta) const {
return CompactLatticeWeightTpl(weight_.Quantize(delta), string_);
}
static uint64 Properties() {
return kLeftSemiring | kRightSemiring | kPath | kIdempotent;
}
// This is used in OpenFst for binary I/O. This is OpenFst-style,
// not Kaldi-style, I/O.
istream &Read(istream &strm) {
weight_.Read(strm);
if (strm.fail()) {
return strm;
}
int32 sz;
ReadType(strm, &sz);
if (strm.fail()) {
return strm;
}
if (sz < 0) {
std::cerr << "Negative string size! Read failure.";
strm.clear(std::ios::badbit);
return strm;
}
string_.resize(sz);
for (int32 i = 0; i < sz; i++) {
ReadType(strm, &(string_[i]));
}
return strm;
}
// This is used in OpenFst for binary I/O. This is OpenFst-style,
// not Kaldi-style, I/O.
ostream &Write(ostream &strm) const {
weight_.Write(strm);
if (strm.fail()) {
return strm;
}
int32 sz = static_cast<int32>(string_.size());
WriteType(strm, sz);
for (int32 i = 0; i < sz; i++) WriteType(strm, string_[i]);
return strm;
}
size_t Hash() const {
size_t ans = weight_.Hash();
// any weird numbers here are largish primes
size_t sz = string_.size(), mult = 6967;
for (size_t i = 0; i < sz; i++) {
ans += string_[i] * mult;
mult *= 7499;
}
return ans;
}
private:
W weight_;
vector<IntType> string_;
};
template <class WeightType, class IntType>
inline bool operator==(const CompactLatticeWeightTpl<WeightType, IntType> &w1,
const CompactLatticeWeightTpl<WeightType, IntType> &w2) {
return (w1.Weight() == w2.Weight() && w1.String() == w2.String());
}
template <class WeightType, class IntType>
inline bool operator!=(const CompactLatticeWeightTpl<WeightType, IntType> &w1,
const CompactLatticeWeightTpl<WeightType, IntType> &w2) {
return (w1.Weight() != w2.Weight() || w1.String() != w2.String());
}
template <class WeightType, class IntType>
inline bool ApproxEqual(const CompactLatticeWeightTpl<WeightType, IntType> &w1,
const CompactLatticeWeightTpl<WeightType, IntType> &w2,
float delta = kDelta) {
return (ApproxEqual(w1.Weight(), w2.Weight(), delta) &&
w1.String() == w2.String());
}
// Compare is not part of the standard for weight types, but used internally for
// efficiency. The comparison here first compares the weight; if this is the
// same, it compares the string. The comparison on strings is: first compare
// the length, if this is the same, use lexicographical order. We can't just
// use the lexicographical order because this would destroy the distributive
// property of multiplication over addition, taking into account that addition
// uses Compare. The string element of "Compare" isn't super-important in
// practical terms; it's only needed to ensure that Plus always give consistent
// answers and is symmetric. It's essentially for tie-breaking, but we need to
// make sure all the semiring axioms are satisfied otherwise OpenFst might
// break.
template <class WeightType, class IntType>
inline int Compare(const CompactLatticeWeightTpl<WeightType, IntType> &w1,
const CompactLatticeWeightTpl<WeightType, IntType> &w2) {
int c1 = Compare(w1.Weight(), w2.Weight());
if (c1 != 0) return c1;
int l1 = w1.String().size(), l2 = w2.String().size();
// Use opposite order on the string lengths, so that if the costs are the
// same,
// the shorter string wins.
if (l1 > l2)
return -1;
else if (l1 < l2)
return 1;
for (int i = 0; i < l1; i++) {
if (w1.String()[i] < w2.String()[i])
return -1;
else if (w1.String()[i] > w2.String()[i])
return 1;
}
return 0;
}
// For efficiency, override the NaturalLess template class.
template <class FloatType, class IntType>
class NaturalLess<
CompactLatticeWeightTpl<LatticeWeightTpl<FloatType>, IntType> > {
public:
typedef CompactLatticeWeightTpl<LatticeWeightTpl<FloatType>, IntType> Weight;
bool operator()(const Weight &w1, const Weight &w2) const {
// NaturalLess is a negative order (opposite to normal ordering).
// This operator () corresponds to "<" in the negative order, which
// corresponds to the ">" in the normal order.
return (Compare(w1, w2) == 1);
}
};
// Make sure Compare is defined for TropicalWeight, so everything works
// if we substitute LatticeWeight for TropicalWeight.
inline int Compare(const TropicalWeight &w1, const TropicalWeight &w2) {
float f1 = w1.Value(), f2 = w2.Value();
if (f1 == f2)
return 0;
else if (f1 > f2)
return -1;
else
return 1;
}
template <class WeightType, class IntType>
inline CompactLatticeWeightTpl<WeightType, IntType> Plus(
const CompactLatticeWeightTpl<WeightType, IntType> &w1,
const CompactLatticeWeightTpl<WeightType, IntType> &w2) {
return (Compare(w1, w2) >= 0 ? w1 : w2);
}
template <class WeightType, class IntType>
inline CompactLatticeWeightTpl<WeightType, IntType> Times(
const CompactLatticeWeightTpl<WeightType, IntType> &w1,
const CompactLatticeWeightTpl<WeightType, IntType> &w2) {
typedef WeightType T;
WeightType w = Times(w1.Weight(), w2.Weight());
if (w == WeightType::Zero()) {
return CompactLatticeWeightTpl<WeightType, IntType>::Zero();
// special case to ensure zero is unique
} else {
vector<IntType> v;
v.resize(w1.String().size() + w2.String().size());
typename vector<IntType>::iterator iter = v.begin();
iter = std::copy(w1.String().begin(), w1.String().end(),
iter); // returns end of first range.
std::copy(w2.String().begin(), w2.String().end(), iter);
return CompactLatticeWeightTpl<WeightType, IntType>(w, v);
}
}
template <class WeightType, class IntType>
inline CompactLatticeWeightTpl<WeightType, IntType> Divide(
const CompactLatticeWeightTpl<WeightType, IntType> &w1,
const CompactLatticeWeightTpl<WeightType, IntType> &w2,
DivideType div = DIVIDE_ANY) {
if (w1.Weight() == WeightType::Zero()) {
if (w2.Weight() != WeightType::Zero()) {
return CompactLatticeWeightTpl<WeightType, IntType>::Zero();
} else {
std::cerr << "Division by zero [0/0] in CompactLatticeWeightTpl\n";
exit(1);
}
} else if (w2.Weight() == WeightType::Zero()) {
std::cerr << "Error: division by zero in CompactLatticeWeightTpl::Divide()";
exit(1);
}
WeightType w = Divide(w1.Weight(), w2.Weight());
const vector<IntType> v1 = w1.String(), v2 = w2.String();
if (v2.size() > v1.size()) {
std::cerr << "Error in Divide (CompactLatticeWeightTpl): cannot divide, "
"length mismatch.\n";
exit(1);
}
typename vector<IntType>::const_iterator v1b = v1.begin(), v1e = v1.end(),
v2b = v2.begin(), v2e = v2.end();
if (div == DIVIDE_LEFT) {
if (!std::equal(v2b, v2e,
v1b)) { // v2 must be identical to first part of v1.
std::cerr << "Error in Divide (CompactLatticeWeighTpl): cannot divide, "
"data mismatch.\n";
exit(1);
}
return CompactLatticeWeightTpl<WeightType, IntType>(
w, vector<IntType>(v1b + (v2e - v2b), v1e)); // return last part of v1.
} else if (div == DIVIDE_RIGHT) {
if (!std::equal(
v2b, v2e,
v1e - (v2e - v2b))) { // v2 must be identical to last part of v1.
std::cerr << "Error in Divide (CompactLatticeWeighTpl): cannot divide, "
"data mismatch.\n";
exit(1);
}
return CompactLatticeWeightTpl<WeightType, IntType>(
w,
vector<IntType>(v1b, v1e - (v2e - v2b))); // return first part of v1.
} else {
std::cerr << "Cannot divide CompactLatticeWeightTpl with DIVIDE_ANY.\n";
exit(1);
}
return CompactLatticeWeightTpl<WeightType,
IntType>::Zero(); // keep compiler happy.
}
template <class WeightType, class IntType>
inline ostream &operator<<(
ostream &strm, const CompactLatticeWeightTpl<WeightType, IntType> &w) {
strm << w.Weight();
CHECK(FLAGS_fst_weight_separator.size() == 1);
strm << FLAGS_fst_weight_separator[0]; // comma by default.
for (size_t i = 0; i < w.String().size(); i++) {
strm << w.String()[i];
if (i + 1 < w.String().size())
strm << kStringSeparator; // '_'; defined in string-weight.h in OpenFst
// code.
}
return strm;
}
template <class WeightType, class IntType>
inline istream &operator>>(istream &strm,
CompactLatticeWeightTpl<WeightType, IntType> &w) {
std::string s;
strm >> s;
if (strm.fail()) {
return strm;
}
CHECK(FLAGS_fst_weight_separator.size() == 1);
size_t pos = s.find_last_of(FLAGS_fst_weight_separator); // normally ","
if (pos == std::string::npos) {
strm.clear(std::ios::badbit);
return strm;
}
// get parts of str before and after the separator (default: ',');
std::string s1(s, 0, pos), s2(s, pos + 1);
std::istringstream strm1(s1);
WeightType weight;
strm1 >> weight;
w.SetWeight(weight);
if (strm1.fail() || !strm1.eof()) {
strm.clear(std::ios::badbit);
return strm;
}
// read string part.
vector<IntType> string;
const char *c = s2.c_str();
while (*c != '\0') {
if (*c == kStringSeparator) // '_'
c++;
char *c2;
long int i = strtol(c, &c2, 10);
if (c2 == c || static_cast<long int>(static_cast<IntType>(i)) != i) {
strm.clear(std::ios::badbit);
return strm;
}
c = c2;
string.push_back(static_cast<IntType>(i));
}
w.SetString(string);
return strm;
}
template <class BaseWeightType, class IntType>
class CompactLatticeWeightCommonDivisorTpl {
public:
typedef CompactLatticeWeightTpl<BaseWeightType, IntType> Weight;
Weight operator()(const Weight &w1, const Weight &w2) const {
// First find longest common prefix of the strings.
typename vector<IntType>::const_iterator s1b = w1.String().begin(),
s1e = w1.String().end(),
s2b = w2.String().begin(),
s2e = w2.String().end();
while (s1b < s1e && s2b < s2e && *s1b == *s2b) {
s1b++;
s2b++;
}
return Weight(Plus(w1.Weight(), w2.Weight()),
vector<IntType>(w1.String().begin(), s1b));
}
};
/** Scales the pair (a, b) of floating-point weights inside a
CompactLatticeWeight by premultiplying it (viewed as a vector)
by a 2x2 matrix "scale".
Assumes there is a ScaleTupleWeight function that applies to "Weight";
this currently only works if Weight equals LatticeWeightTpl<FloatType>
for some FloatType.
*/
template <class Weight, class IntType, class ScaleFloatType>
inline CompactLatticeWeightTpl<Weight, IntType> ScaleTupleWeight(
const CompactLatticeWeightTpl<Weight, IntType> &w,
const vector<vector<ScaleFloatType> > &scale) {
return CompactLatticeWeightTpl<Weight, IntType>(
ScaleTupleWeight(w.Weight(), scale), w.String());
}
/** Define some ConvertLatticeWeight functions that are used in various lattice
conversions... make them all templates, some with no arguments, since some
must be templates.*/
template <class Float1, class Float2>
inline void ConvertLatticeWeight(const LatticeWeightTpl<Float1> &w_in,
LatticeWeightTpl<Float2> *w_out) {
w_out->SetValue1(w_in.Value1());
w_out->SetValue2(w_in.Value2());
}
template <class Float1, class Float2, class Int>
inline void ConvertLatticeWeight(
const CompactLatticeWeightTpl<LatticeWeightTpl<Float1>, Int> &w_in,
CompactLatticeWeightTpl<LatticeWeightTpl<Float2>, Int> *w_out) {
LatticeWeightTpl<Float2> weight2(w_in.Weight().Value1(),
w_in.Weight().Value2());
w_out->SetWeight(weight2);
w_out->SetString(w_in.String());
}
// to convert from Lattice to standard FST
template <class Float1, class Float2>
inline void ConvertLatticeWeight(const LatticeWeightTpl<Float1> &w_in,
TropicalWeightTpl<Float2> *w_out) {
TropicalWeightTpl<Float2> w1(w_in.Value1());
TropicalWeightTpl<Float2> w2(w_in.Value2());
*w_out = Times(w1, w2);
}
template <class Float>
inline double ConvertToCost(const LatticeWeightTpl<Float> &w) {
return static_cast<double>(w.Value1()) + static_cast<double>(w.Value2());
}
template <class Float, class Int>
inline double ConvertToCost(
const CompactLatticeWeightTpl<LatticeWeightTpl<Float>, Int> &w) {
return static_cast<double>(w.Weight().Value1()) +
static_cast<double>(w.Weight().Value2());
}
template <class Float>
inline double ConvertToCost(const TropicalWeightTpl<Float> &w) {
return w.Value();
}
} // end namespace fst
#endif // KALDI_FSTEXT_LATTICE_WEIGHT_H_
<|start_filename|>tonic-suite/asr/src/fgmmbin/fgmm-global-get-frame-likes.cc<|end_filename|>
// fgmmbin/fgmm-global-get-frame-likes.cc
// Copyright 2009-2011 Microsoft Corporation; Saarland University
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/model-common.h"
#include "gmm/full-gmm.h"
#include "gmm/diag-gmm.h"
#include "gmm/mle-full-gmm.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Print out per-frame log-likelihoods for each utterance, as an "
"archive\n"
"of vectors of floats. If --average=true, prints out the average "
"per-frame\n"
"log-likelihood for each utterance, as a single float.\n"
"Usage: fgmm-global-get-frame-likes [options] <model-in> "
"<feature-rspecifier> "
"<likes-out-wspecifier>\n"
"e.g.: fgmm-global-get-frame-likes 1.mdl scp:train.scp ark:1.likes\n";
ParseOptions po(usage);
bool average = false;
std::string gselect_rspecifier;
po.Register("gselect", &gselect_rspecifier,
"rspecifier for gselect objects "
"to limit the #Gaussians accessed on each frame.");
po.Register("average", &average,
"If true, print out the average per-frame "
"log-likelihood as a single float per utterance.");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string model_filename = po.GetArg(1),
feature_rspecifier = po.GetArg(2),
likes_wspecifier = po.GetArg(3);
FullGmm fgmm;
{
bool binary_read;
Input ki(model_filename, &binary_read);
fgmm.Read(ki.Stream(), binary_read);
}
double tot_like = 0.0, tot_frames = 0.0;
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
RandomAccessInt32VectorVectorReader gselect_reader(gselect_rspecifier);
BaseFloatVectorWriter likes_writer(average ? "" : likes_wspecifier);
BaseFloatWriter average_likes_writer(average ? likes_wspecifier : "");
int32 num_done = 0, num_err = 0;
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string key = feature_reader.Key();
const Matrix<BaseFloat> &mat = feature_reader.Value();
int32 file_frames = mat.NumRows();
Vector<BaseFloat> likes(file_frames);
if (gselect_rspecifier != "") {
if (!gselect_reader.HasKey(key)) {
KALDI_WARN << "No gselect information for utterance " << key;
num_err++;
continue;
}
const std::vector<std::vector<int32> > &gselect =
gselect_reader.Value(key);
if (gselect.size() != static_cast<size_t>(file_frames)) {
KALDI_WARN << "gselect information for utterance " << key
<< " has wrong size " << gselect.size() << " vs. "
<< file_frames;
num_err++;
continue;
}
for (int32 i = 0; i < file_frames; i++) {
SubVector<BaseFloat> data(mat, i);
const std::vector<int32> &this_gselect = gselect[i];
int32 gselect_size = this_gselect.size();
KALDI_ASSERT(gselect_size > 0);
Vector<BaseFloat> loglikes;
fgmm.LogLikelihoodsPreselect(data, this_gselect, &loglikes);
likes(i) = loglikes.LogSumExp();
}
} else { // no gselect..
for (int32 i = 0; i < file_frames; i++)
likes(i) = fgmm.LogLikelihood(mat.Row(i));
}
tot_like += likes.Sum();
tot_frames += file_frames;
if (average)
average_likes_writer.Write(key, likes.Sum() / file_frames);
else
likes_writer.Write(key, likes);
num_done++;
}
KALDI_LOG << "Done " << num_done << " files; " << num_err
<< " with errors.";
KALDI_LOG << "Overall likelihood per "
<< "frame = " << (tot_like / tot_frames) << " over " << tot_frames
<< " frames.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/lat/confidence.cc<|end_filename|>
// lat/confidence.cc
// Copyright 2013 Johns Hopkins University (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "lat/confidence.h"
#include "lat/lattice-functions.h"
#include "lat/determinize-lattice-pruned.h"
namespace kaldi {
BaseFloat SentenceLevelConfidence(const CompactLattice &clat, int32 *num_paths,
std::vector<int32> *best_sentence,
std::vector<int32> *second_best_sentence) {
/* It may seem strange that the first thing we do is to convert the
CompactLattice to a Lattice, given that we may have just created the
CompactLattice by determinizing a Lattice. However, this is not just
a circular conversion; "lat" will have the property that distinct
paths have distinct word sequences.
Below, we could run NbestAsFsts on a CompactLattice, but the time
taken would be quadratic in the length in words of the CompactLattice,
because of the alignment information getting appended as vectors.
That's why we convert back to Lattice.
*/
Lattice lat;
ConvertLattice(clat, &lat);
std::vector<Lattice> lats;
NbestAsFsts(lat, 2, &lats);
int32 n = lats.size();
KALDI_ASSERT(n >= 0 && n <= 2);
if (num_paths != NULL) *num_paths = n;
if (best_sentence != NULL) best_sentence->clear();
if (second_best_sentence != NULL) second_best_sentence->clear();
LatticeWeight weight1, weight2;
if (n >= 1)
fst::GetLinearSymbolSequence<LatticeArc, int32>(lats[0], NULL,
best_sentence, &weight1);
if (n >= 2)
fst::GetLinearSymbolSequence<LatticeArc, int32>(
lats[1], NULL, second_best_sentence, &weight2);
if (n == 0) {
return 0; // this seems most appropriate because it will be interpreted as
// zero confidence, and something definitely went wrong for this
// to happen.
} else if (n == 1) {
// If there is only one sentence in the lattice, we interpret this as there
// being perfect confidence
return std::numeric_limits<BaseFloat>::infinity();
} else {
BaseFloat best_cost = ConvertToCost(weight1),
second_best_cost = ConvertToCost(weight2);
BaseFloat ans = second_best_cost - best_cost;
if (!(ans >= -0.001 * (fabs(best_cost) + fabs(second_best_cost)))) {
// Answer should be positive. Make sure it's at at least not
// substantially negative. This would be very strange.
KALDI_WARN << "Very negative difference." << ans;
}
if (ans < 0) ans = 0;
return ans;
}
}
BaseFloat SentenceLevelConfidence(const Lattice &lat, int32 *num_paths,
std::vector<int32> *best_sentence,
std::vector<int32> *second_best_sentence) {
int32 max_sentence_length = LongestSentenceLength(lat);
fst::DeterminizeLatticePrunedOptions determinize_opts;
// The basic idea of expanding only up to "max_sentence_length * 2" arcs,
// is that that should be sufficient to get the best and second-best paths
// through the lattice, which is all we need for this particular application.
// "safety_term" is just in case there is some reason why we might need a few
// extra arcs, e.g. in case of a tie on the weights of the second-best path.
int32 safety_term = 4 + max_sentence_length;
determinize_opts.max_arcs = max_sentence_length * 2 + safety_term;
// set prune_beam to a large value... we don't really rely on the beam; we
// rely on the max_arcs variable to limit the size of the lattice.
double prune_beam = std::numeric_limits<double>::infinity();
CompactLattice clat;
// We ignore the return status of DeterminizeLatticePruned. It will likely
// return false, but this is expected because the expansion is limited
// by "max_arcs" not "prune_beam".
Lattice inverse_lat(lat);
fst::Invert(&inverse_lat); // Swap input and output symbols.
DeterminizeLatticePruned(inverse_lat, prune_beam, &clat, determinize_opts);
// Call the version of this function that takes a CompactLattice.
return SentenceLevelConfidence(clat, num_paths, best_sentence,
second_best_sentence);
}
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/nnet/nnet-component-test.cc<|end_filename|>
// nnet/nnet-component-test.cc
// Copyright 2014 Brno University of Technology (author: <NAME>),
// The Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet/nnet-component.h"
#include "nnet/nnet-nnet.h"
#include "nnet/nnet-convolutional-component.h"
#include "nnet/nnet-convolutional-2d-component.h"
#include "nnet/nnet-max-pooling-component.h"
#include "nnet/nnet-max-pooling-2d-component.h"
#include "nnet/nnet-average-pooling-2d-component.h"
#include "util/common-utils.h"
#include <sstream>
#include <fstream>
#include <algorithm>
namespace kaldi {
namespace nnet1 {
void UnitTestConvolutionalComponent() {
ConvolutionalComponent* c = new ConvolutionalComponent(5, 5);
std::string comp_data_str =
"<PatchDim> 1 <PatchStep> 1 <PatchStride> 5 <Filters> [ 1 \n] <Bias> [ 0 "
"]\n";
std::istringstream is_comp_data(comp_data_str);
c->ReadData(is_comp_data, false);
std::string matrix_str = "[ 1 2 3 4 5 ] ";
std::istringstream is_matrix(matrix_str);
CuMatrix<BaseFloat> mat_in;
mat_in.Read(is_matrix, false);
// propagate
CuMatrix<BaseFloat> mat_out;
c->Propagate(mat_in, &mat_out);
KALDI_LOG << "mat_in" << mat_in << "mat_out" << mat_out;
AssertEqual(mat_in, mat_out);
// backpropagate
CuMatrix<BaseFloat> mat_out_diff(mat_in), mat_in_diff;
c->Backpropagate(mat_in, mat_out, mat_out_diff, &mat_in_diff);
KALDI_LOG << "mat_out_diff " << mat_out_diff << " mat_in_diff "
<< mat_in_diff;
AssertEqual(mat_out_diff, mat_in_diff);
// once again
c->Backpropagate(mat_in, mat_out, mat_out_diff, &mat_in_diff);
KALDI_LOG << "mat_out_diff " << mat_out_diff << " mat_in_diff "
<< mat_in_diff;
AssertEqual(mat_out_diff, mat_in_diff);
delete c;
}
void UnitTestMaxPooling2DComponent() {
std::string dim_str;
std::ifstream infile("/home/harish/kaldi_cnn_testfiles/avgpool1.txt");
std::getline(infile, dim_str);
std::stringstream stream(dim_str);
std::vector<int> dims;
int n;
while (stream >> n) {
dims.push_back(n);
}
std::string comp_data_str, matrix_str;
std::getline(infile, comp_data_str);
std::getline(infile, matrix_str);
MaxPooling2DComponent* c = new MaxPooling2DComponent(dims[0], dims[1]);
std::istringstream is_comp_data(comp_data_str);
c->ReadData(is_comp_data, false);
std::istringstream is_matrix(matrix_str);
CuMatrix<BaseFloat> mat_in;
mat_in.Read(is_matrix, false);
CuMatrix<BaseFloat> mat_out;
c->Propagate(mat_in, &mat_out);
KALDI_LOG << "mat_out " << mat_out;
std::string mat_out_diff_str;
std::getline(infile, mat_out_diff_str);
std::istringstream is_mat_out_diff(mat_out_diff_str);
CuMatrix<BaseFloat> out_diff, in_diff;
out_diff.Read(is_mat_out_diff, false);
c->Backpropagate(mat_in, mat_out, out_diff, &in_diff);
KALDI_LOG << "out_diff" << out_diff;
KALDI_LOG << "in_diff " << in_diff;
delete c;
}
void UnitTestAveragePooling2DComponent() {
std::string dim_str;
std::ifstream infile("/home/harish/kaldi_cnn_testfiles/avgpool1.txt");
std::getline(infile, dim_str);
std::stringstream stream(dim_str);
std::vector<int> dims;
int n;
while (stream >> n) {
dims.push_back(n);
}
std::string comp_data_str, matrix_str;
std::getline(infile, comp_data_str);
std::getline(infile, matrix_str);
AveragePooling2DComponent* c =
new AveragePooling2DComponent(dims[0], dims[1]);
std::istringstream is_comp_data(comp_data_str);
c->ReadData(is_comp_data, false);
std::istringstream is_matrix(matrix_str);
CuMatrix<BaseFloat> mat_in;
mat_in.Read(is_matrix, false);
CuMatrix<BaseFloat> mat_out;
c->Propagate(mat_in, &mat_out);
KALDI_LOG << "mat_out " << mat_out;
std::string mat_out_diff_str;
std::getline(infile, mat_out_diff_str);
std::istringstream is_mat_out_diff(mat_out_diff_str);
CuMatrix<BaseFloat> out_diff, in_diff;
out_diff.Read(is_mat_out_diff, false);
c->Backpropagate(mat_in, mat_out, out_diff, &in_diff);
KALDI_LOG << "out_diff" << out_diff;
KALDI_LOG << "in_diff " << in_diff;
delete c;
}
void UnitTestMaxPoolingComponent() {
MaxPoolingComponent* m = new MaxPoolingComponent(9, 7);
std::string comp_data_str = "<PoolSize> 3 <PoolStep> 1 <PoolStride> 1 \n";
std::istringstream is_comp_data(comp_data_str);
m->ReadData(is_comp_data, false);
std::string matrix_str =
"[ 1 2 1 1 2 1 1 2 1 ; 2 3 2 2 3 2 2 3 2 ; 2 2 2 1 2 1 1 2 1 ; 1 2 3 1 4 "
"1 1 2 1 ] ";
std::istringstream is_matrix(matrix_str);
// expected output
std::string exp_out_str = "[ 2 2 2 ; 3 3 3 ] ";
std::istringstream is_exp_out_str(exp_out_str);
CuMatrix<BaseFloat> mat_exp;
mat_exp.Read(is_exp_out_str, false);
CuMatrix<BaseFloat> mat_in;
CuMatrix<BaseFloat> mat_out;
CuMatrix<BaseFloat> inp_diff;
mat_in.Read(is_matrix, false);
KALDI_LOG << mat_in.ColRange(0, 2);
m->Propagate(mat_in, &mat_out);
KALDI_LOG << "mat_in" << mat_in << "mat_out" << mat_out << "mat_exp"
<< mat_exp;
m->Backpropagate(mat_in, mat_out, mat_out, &inp_diff);
KALDI_LOG << inp_diff;
// KALDI_LOG << "mat_in" << mat_in << "mat_out" << mat_out << "mat_exp" <<
// mat_exp;
// AssertEqual(mat_out, mat_exp);
delete m;
}
void UnitTestConvolutional2DComponent() {
std::string dim_str;
std::ifstream infile("/home/harish/kaldi_cnn_testfiles/filt6.txt");
std::getline(infile, dim_str);
std::stringstream stream(dim_str);
std::vector<int> dims;
int n;
while (stream >> n) {
dims.push_back(n);
}
std::string comp_data_str, matrix_str;
std::getline(infile, comp_data_str);
std::getline(infile, matrix_str);
Convolutional2DComponent* c = new Convolutional2DComponent(dims[0], dims[1]);
std::istringstream is_comp_data(comp_data_str);
c->ReadData(is_comp_data, false);
std::istringstream is_matrix(matrix_str);
CuMatrix<BaseFloat> mat_in;
mat_in.Read(is_matrix, false);
CuMatrix<BaseFloat> mat_out;
c->Propagate(mat_in, &mat_out);
KALDI_LOG << "mat_out " << mat_out;
std::string mat_out_diff_str;
std::getline(infile, mat_out_diff_str);
std::istringstream is_mat_out_diff(mat_out_diff_str);
CuMatrix<BaseFloat> out_diff, in_diff;
out_diff.Read(is_mat_out_diff, false);
// CuMatrix<BaseFloat> out_diff(mat_out), in_diff;
c->Backpropagate(mat_in, mat_out, out_diff, &in_diff);
KALDI_LOG << "out_diff" << out_diff;
KALDI_LOG << "in_diff " << in_diff;
c->Update(mat_in, out_diff);
delete c;
}
void UnitTestMatOperations() {
// CuMatrix<BaseFloat> A;
Vector<BaseFloat> v(10), w(9);
CuVector<BaseFloat> b(9);
CuArray<int32> id;
for (int i = 0; i < 9; i++) {
v(i) = i;
w(i) = i + 1;
}
Matrix<BaseFloat> M(10, 9);
Matrix<BaseFloat> W(10, 9);
CuMatrix<BaseFloat> A, B(10, 9);
M.AddVecVec(1.0, v, w);
A = M;
B.Set(-1e20);
B.Max(A);
A.FindRowMaxId(&id);
CuMatrix<BaseFloat> C(A);
C.Set(2);
KALDI_LOG << "C=" << C;
KALDI_LOG << "A=" << A;
// KALDI_LOG << "id=" << id;
// KALDI_LOG << "A " << B.Max(A);
// b.AddRowSumMat(1.0, A, 0.0);
// KALDI_LOG << b;
// b.AddRowSumMat(1.0, A, 0.0);
// KALDI_LOG << b;
// CuSubMatrix<BaseFloat> As(A.ColRange(0,1));
// KALDI_LOG << "As " << As;
// std::vector<MatrixIndexT> id(2,4);
// CuMatrix<BaseFloat> B;
// B.Resize(A.NumRows(), 2, kSetZero);
// B.CopyCols(A, id);
// KALDI_LOG << "B " << B ;
// KALDI_LOG << "Sum="<< B.Sum();
// Matrix<BaseFloat> C(2,2), D(2,2), E(2,2);
// Vector<BaseFloat> c(2);
// c(0)=1;c(1)=2;
// C.AddVecVec(1.0,c,c);
// KALDI_LOG << "C " << C;
// D(1,1)=1;
// // KALDI_LOG << "D " <<D;
// // C.MulElements(D);
// // KALDI_LOG << "C " << C;
// CuMatrix<BaseFloat> CuC, CuD;
// CuC = C;
// CuD = D;
// KALDI_LOG << "CuC " << CuC;
// CuC.MulElements(CuD);
// KALDI_LOG << "CuC " << CuC;
// KALDI_LOG << "Sum=" << CuC.Sum();
}
} // namespace nnet1
} // namespace kaldi
int main() {
using namespace kaldi;
using namespace kaldi::nnet1;
for (int32 loop = 0; loop < 2; loop++) {
#if HAVE_CUDA == 1
if (loop == 0)
CuDevice::Instantiate().SelectGpuId("no"); // use no GPU
else
CuDevice::Instantiate().SelectGpuId(
"optional"); // use GPU when available
#endif
// unit-tests :
UnitTestConvolutionalComponent();
UnitTestMaxPoolingComponent();
// UnitTestConvolutional2DComponent();
// UnitTestMatOperations();
// UnitTestMaxPooling2DComponent();
// UnitTestAveragePooling2DComponent();
// end of unit-tests,
if (loop == 0)
KALDI_LOG << "Tests without GPU use succeeded.";
else
KALDI_LOG << "Tests with GPU use (if available) succeeded.";
}
#if HAVE_CUDA == 1
CuDevice::Instantiate().PrintProfile();
#endif
return 0;
}
<|start_filename|>tonic-suite/asr/src/ivectorbin/ivector-compute-lda.cc<|end_filename|>
// ivectorbin/ivector-compute-lda.cc
// Copyright 2013 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "ivector/ivector-extractor.h"
#include "thread/kaldi-task-sequence.h"
namespace kaldi {
class CovarianceStats {
public:
CovarianceStats(int32 dim)
: tot_covar_(dim), between_covar_(dim), num_spk_(0), num_utt_(0) {}
/// get total covariance, normalized per number of frames.
void GetTotalCovar(SpMatrix<double> *tot_covar) const {
KALDI_ASSERT(num_utt_ > 0);
*tot_covar = tot_covar_;
tot_covar->Scale(1.0 / num_utt_);
}
void GetWithinCovar(SpMatrix<double> *within_covar) {
KALDI_ASSERT(num_utt_ - num_spk_ > 0);
*within_covar = tot_covar_;
within_covar->AddSp(-1.0, between_covar_);
within_covar->Scale(1.0 / num_utt_);
}
void AccStats(const Matrix<double> &utts_of_this_spk) {
int32 num_utts = utts_of_this_spk.NumRows();
tot_covar_.AddMat2(1.0, utts_of_this_spk, kTrans, 1.0);
Vector<double> spk_average(Dim());
spk_average.AddRowSumMat(1.0 / num_utts, utts_of_this_spk);
between_covar_.AddVec2(num_utts, spk_average);
num_utt_ += num_utts;
num_spk_ += 1;
}
/// Will return Empty() if the within-class covariance matrix would be zero.
bool SingularTotCovar() { return (num_utt_ < Dim()); }
bool Empty() { return (num_utt_ - num_spk_ == 0); }
std::string Info() {
std::ostringstream ostr;
ostr << num_spk_ << " speakers, " << num_utt_ << " utterances. ";
return ostr.str();
}
int32 Dim() { return tot_covar_.NumRows(); }
// Use default constructor and assignment operator.
void AddStats(const CovarianceStats &other) {
tot_covar_.AddSp(1.0, other.tot_covar_);
between_covar_.AddSp(1.0, other.between_covar_);
num_spk_ += other.num_spk_;
num_utt_ += other.num_utt_;
}
private:
KALDI_DISALLOW_COPY_AND_ASSIGN(CovarianceStats);
SpMatrix<double> tot_covar_;
SpMatrix<double> between_covar_;
int32 num_spk_;
int32 num_utt_;
};
template <class Real>
void ComputeNormalizingTransform(const SpMatrix<Real> &covar,
MatrixBase<Real> *proj) {
int32 dim = covar.NumRows();
TpMatrix<Real> C(dim); // Cholesky of covar, covar = C C^T
C.Cholesky(covar);
C.Invert(); // The matrix that makes covar unit is C^{-1}, because
// C^{-1} covar C^{-T} = C^{-1} C C^T C^{-T} = I.
proj->CopyFromTp(C, kNoTrans); // set "proj" to C^{-1}.
}
void ComputeLdaTransform(
const std::map<std::string, Vector<BaseFloat> *> &utt2ivector,
const std::map<std::string, std::vector<std::string> > &spk2utt,
BaseFloat total_covariance_factor, MatrixBase<BaseFloat> *lda_out) {
KALDI_ASSERT(!utt2ivector.empty());
int32 lda_dim = lda_out->NumRows(), dim = lda_out->NumCols();
KALDI_ASSERT(dim == utt2ivector.begin()->second->Dim());
KALDI_ASSERT(lda_dim > 0 && lda_dim <= dim);
CovarianceStats stats(dim);
std::map<std::string, std::vector<std::string> >::const_iterator iter;
for (iter = spk2utt.begin(); iter != spk2utt.end(); ++iter) {
const std::vector<std::string> &uttlist = iter->second;
KALDI_ASSERT(!uttlist.empty());
int32 N = uttlist.size(); // number of utterances.
Matrix<double> utts_of_this_spk(N, dim);
for (int32 n = 0; n < N; n++) {
std::string utt = uttlist[n];
KALDI_ASSERT(utt2ivector.count(utt) != 0);
utts_of_this_spk.Row(n).CopyFromVec(*(utt2ivector.find(utt)->second));
}
stats.AccStats(utts_of_this_spk);
}
KALDI_LOG << "Stats have " << stats.Info();
KALDI_ASSERT(!stats.Empty());
KALDI_ASSERT(!stats.SingularTotCovar() &&
"Too little data for iVector dimension.");
SpMatrix<double> total_covar;
stats.GetTotalCovar(&total_covar);
SpMatrix<double> within_covar;
stats.GetWithinCovar(&within_covar);
SpMatrix<double> mat_to_normalize(dim);
mat_to_normalize.AddSp(total_covariance_factor, total_covar);
mat_to_normalize.AddSp(1.0 - total_covariance_factor, within_covar);
Matrix<double> T(dim, dim);
ComputeNormalizingTransform(mat_to_normalize, &T);
SpMatrix<double> between_covar(total_covar);
between_covar.AddSp(-1.0, within_covar);
SpMatrix<double> between_covar_proj(dim);
between_covar_proj.AddMat2Sp(1.0, T, kNoTrans, between_covar, 0.0);
Matrix<double> U(dim, dim);
Vector<double> s(dim);
between_covar_proj.Eig(&s, &U);
bool sort_on_absolute_value = false; // any negative ones will go last (they
// shouldn't exist anyway so doesn't
// really matter)
SortSvd(&s, &U, static_cast<Matrix<double> *>(NULL), sort_on_absolute_value);
KALDI_LOG << "Singular values of between-class covariance after projecting "
<< "with interpolated [total/within] covariance with a weight of "
<< total_covariance_factor
<< " on the total covariance, are: " << s;
// U^T is the transform that will diagonalize the between-class covariance.
// U_part is just the part of U that corresponds to the kept dimensions.
SubMatrix<double> U_part(U, 0, dim, 0, lda_dim);
// We first transform by T and then by U_part^T. This means T
// goes on the right.
Matrix<double> temp(lda_dim, dim);
temp.AddMatMat(1.0, U_part, kTrans, T, kNoTrans, 0.0);
lda_out->CopyFromMat(temp);
}
void ComputeAndSubtractMean(
std::map<std::string, Vector<BaseFloat> *> utt2ivector,
Vector<BaseFloat> *mean_out) {
int32 dim = utt2ivector.begin()->second->Dim();
size_t num_ivectors = utt2ivector.size();
Vector<double> mean(dim);
std::map<std::string, Vector<BaseFloat> *>::iterator iter;
for (iter = utt2ivector.begin(); iter != utt2ivector.end(); ++iter)
mean.AddVec(1.0 / num_ivectors, *(iter->second));
mean_out->Resize(dim);
mean_out->CopyFromVec(mean);
for (iter = utt2ivector.begin(); iter != utt2ivector.end(); ++iter)
iter->second->AddVec(-1.0, *mean_out);
}
}
int main(int argc, char *argv[]) {
using namespace kaldi;
typedef kaldi::int32 int32;
try {
const char *usage =
"Compute an LDA matrix for iVector system. Reads in iVectors per "
"utterance,\n"
"and an utt2spk file which it uses to help work out the within-speaker "
"and\n"
"between-speaker covariance matrices. Outputs an LDA projection to a\n"
"specified dimension. By default it will normalize so that the "
"projected\n"
"within-class covariance is unit, but if you set "
"--normalize-total-covariance\n"
"to true, it will normalize the total covariance.\n"
"Note: the transform we produce is actually an affine transform which "
"will\n"
"also set the global mean to zero.\n"
"\n"
"Usage: ivector-compute-lda [options] <ivector-rspecifier> "
"<utt2spk-rspecifier> "
"<lda-matrix-out>\n"
"e.g.: \n"
" ivector-compute-lda ark:ivectors.ark ark:utt2spk lda.mat\n";
ParseOptions po(usage);
int32 lda_dim = 100; // Dimension we reduce to
BaseFloat total_covariance_factor = 0.0;
bool binary = true;
po.Register("dim", &lda_dim, "Dimension we keep with the LDA transform");
po.Register(
"total-covariance-factor", &total_covariance_factor,
"If this is 0.0 we normalize to make the within-class covariance "
"unit; if 1.0, the total covariance; if between, we normalize "
"an interpolated matrix.");
po.Register("binary", &binary, "Write output in binary mode");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string ivector_rspecifier = po.GetArg(1),
utt2spk_rspecifier = po.GetArg(2),
lda_wxfilename = po.GetArg(3);
int32 num_done = 0, num_err = 0, dim = 0;
SequentialBaseFloatVectorReader ivector_reader(ivector_rspecifier);
RandomAccessTokenReader utt2spk_reader(utt2spk_rspecifier);
std::map<std::string, Vector<BaseFloat> *> utt2ivector;
std::map<std::string, std::vector<std::string> > spk2utt;
for (; !ivector_reader.Done(); ivector_reader.Next()) {
std::string utt = ivector_reader.Key();
const Vector<BaseFloat> &ivector = ivector_reader.Value();
if (utt2ivector.count(utt) != 0) {
KALDI_WARN << "Duplicate iVector found for utterance " << utt
<< ", ignoring it.";
num_err++;
continue;
}
if (!utt2spk_reader.HasKey(utt)) {
KALDI_WARN << "utt2spk has no entry for utterance " << utt
<< ", skipping it.";
num_err++;
continue;
}
std::string spk = utt2spk_reader.Value(utt);
utt2ivector[utt] = new Vector<BaseFloat>(ivector);
if (dim == 0) {
dim = ivector.Dim();
} else {
KALDI_ASSERT(dim == ivector.Dim() && "iVector dimension mismatch");
}
spk2utt[spk].push_back(utt);
num_done++;
}
KALDI_LOG << "Read " << num_done << " utterances, " << num_err
<< " with errors.";
if (num_done == 0) {
KALDI_ERR << "Did not read any utterances.";
} else {
KALDI_LOG << "Computing within-class covariance.";
}
Vector<BaseFloat> mean;
ComputeAndSubtractMean(utt2ivector, &mean);
KALDI_LOG << "2-norm of iVector mean is " << mean.Norm(2.0);
Matrix<BaseFloat> lda_mat(lda_dim,
dim + 1); // LDA matrix without the offset term.
SubMatrix<BaseFloat> linear_part(lda_mat, 0, lda_dim, 0, dim);
ComputeLdaTransform(utt2ivector, spk2utt, total_covariance_factor,
&linear_part);
Vector<BaseFloat> offset(lda_dim);
offset.AddMatVec(-1.0, linear_part, kNoTrans, mean, 0.0);
lda_mat.CopyColFromVec(offset, dim); // add mean-offset to transform
KALDI_VLOG(2) << "2-norm of transformed iVector mean is "
<< offset.Norm(2.0);
WriteKaldiObject(lda_mat, lda_wxfilename, binary);
KALDI_LOG << "Wrote LDA transform to "
<< PrintableWxfilename(lda_wxfilename);
std::map<std::string, Vector<BaseFloat> *>::iterator iter;
for (iter = utt2ivector.begin(); iter != utt2ivector.end(); ++iter)
delete iter->second;
utt2ivector.clear();
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-example.cc<|end_filename|>
// nnet/nnet-example.cc
// Copyright 2012-2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet2/nnet-example.h"
#include "lat/lattice-functions.h"
#include "hmm/posterior.h"
namespace kaldi {
namespace nnet2 {
void NnetExample::Write(std::ostream &os, bool binary) const {
// Note: weight, label, input_frames and spk_info are members. This is a
// struct.
WriteToken(os, binary, "<NnetExample>");
WriteToken(os, binary, "<Labels>");
int32 size = labels.size();
WriteBasicType(os, binary, size);
for (int32 i = 0; i < size; i++) {
WriteBasicType(os, binary, labels[i].first);
WriteBasicType(os, binary, labels[i].second);
}
WriteToken(os, binary, "<InputFrames>");
input_frames.Write(os, binary); // can be read as regular Matrix.
WriteToken(os, binary, "<LeftContext>");
WriteBasicType(os, binary, left_context);
WriteToken(os, binary, "<SpkInfo>");
spk_info.Write(os, binary);
WriteToken(os, binary, "</NnetExample>");
}
void NnetExample::Read(std::istream &is, bool binary) {
// Note: weight, label, input_frames, left_context and spk_info are members.
// This is a struct.
ExpectToken(is, binary, "<NnetExample>");
ExpectToken(is, binary, "<Labels>");
int32 size;
ReadBasicType(is, binary, &size);
labels.resize(size);
for (int32 i = 0; i < size; i++) {
ReadBasicType(is, binary, &(labels[i].first));
ReadBasicType(is, binary, &(labels[i].second));
}
ExpectToken(is, binary, "<InputFrames>");
input_frames.Read(is, binary);
ExpectToken(is, binary, "<LeftContext>"); // Note: this member is
// recently added, but I don't think we'll get too much back-compatibility
// problems from not handling the old format.
ReadBasicType(is, binary, &left_context);
ExpectToken(is, binary, "<SpkInfo>");
spk_info.Read(is, binary);
ExpectToken(is, binary, "</NnetExample>");
}
void ExamplesRepository::AcceptExamples(std::vector<NnetExample> *examples) {
KALDI_ASSERT(!examples->empty());
empty_semaphore_.Wait();
KALDI_ASSERT(examples_.empty());
examples_.swap(*examples);
full_semaphore_.Signal();
}
void ExamplesRepository::ExamplesDone() {
empty_semaphore_.Wait();
KALDI_ASSERT(examples_.empty());
done_ = true;
full_semaphore_.Signal();
}
bool ExamplesRepository::ProvideExamples(std::vector<NnetExample> *examples) {
full_semaphore_.Wait();
if (done_) {
KALDI_ASSERT(examples_.empty());
full_semaphore_.Signal(); // Increment the semaphore so
// the call by the next thread will not block.
return false; // no examples to return-- all finished.
} else {
KALDI_ASSERT(!examples_.empty() && examples->empty());
examples->swap(examples_);
empty_semaphore_.Signal();
return true;
}
}
void DiscriminativeNnetExample::Write(std::ostream &os, bool binary) const {
// Note: weight, num_ali, den_lat, input_frames, left_context and spk_info are
// members. This is a struct.
WriteToken(os, binary, "<DiscriminativeNnetExample>");
WriteToken(os, binary, "<Weight>");
WriteBasicType(os, binary, weight);
WriteToken(os, binary, "<NumAli>");
WriteIntegerVector(os, binary, num_ali);
if (!WriteCompactLattice(os, binary, den_lat)) {
// We can't return error status from this function so we
// throw an exception.
KALDI_ERR << "Error writing CompactLattice to stream";
}
WriteToken(os, binary, "<InputFrames>");
{
CompressedMatrix cm(input_frames); // Note: this can be read as a regular
// matrix.
cm.Write(os, binary);
}
WriteToken(os, binary, "<LeftContext>");
WriteBasicType(os, binary, left_context);
WriteToken(os, binary, "<SpkInfo>");
spk_info.Write(os, binary);
WriteToken(os, binary, "</DiscriminativeNnetExample>");
}
void DiscriminativeNnetExample::Read(std::istream &is, bool binary) {
// Note: weight, num_ali, den_lat, input_frames, left_context and spk_info are
// members. This is a struct.
ExpectToken(is, binary, "<DiscriminativeNnetExample>");
ExpectToken(is, binary, "<Weight>");
ReadBasicType(is, binary, &weight);
ExpectToken(is, binary, "<NumAli>");
ReadIntegerVector(is, binary, &num_ali);
CompactLattice *den_lat_tmp = NULL;
if (!ReadCompactLattice(is, binary, &den_lat_tmp) || den_lat_tmp == NULL) {
// We can't return error status from this function so we
// throw an exception.
KALDI_ERR << "Error reading CompactLattice from stream";
}
den_lat = *den_lat_tmp;
delete den_lat_tmp;
ExpectToken(is, binary, "<InputFrames>");
input_frames.Read(is, binary);
ExpectToken(is, binary, "<LeftContext>");
ReadBasicType(is, binary, &left_context);
ExpectToken(is, binary, "<SpkInfo>");
spk_info.Read(is, binary);
ExpectToken(is, binary, "</DiscriminativeNnetExample>");
}
void DiscriminativeNnetExample::Check() const {
KALDI_ASSERT(weight > 0.0);
KALDI_ASSERT(!num_ali.empty());
int32 num_frames = static_cast<int32>(num_ali.size());
std::vector<int32> times;
int32 num_frames_den = CompactLatticeStateTimes(den_lat, ×);
KALDI_ASSERT(num_frames == num_frames_den);
KALDI_ASSERT(input_frames.NumRows() >= left_context + num_frames);
}
} // namespace nnet2
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/fgmmbin/fgmm-global-acc-stats.cc<|end_filename|>
// fgmmbin/fgmm-global-acc-stats.cc
// Copyright 2009-2011 Microsoft Corporation; Saarland University
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/model-common.h"
#include "gmm/full-gmm.h"
#include "gmm/diag-gmm.h"
#include "gmm/mle-full-gmm.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Accumulate stats for training a full-covariance GMM.\n"
"Usage: fgmm-global-acc-stats [options] <model-in> "
"<feature-rspecifier> "
"<stats-out>\n"
"e.g.: fgmm-global-acc-stats 1.mdl scp:train.scp 1.acc\n";
ParseOptions po(usage);
bool binary = true;
std::string update_flags_str = "mvw";
std::string gselect_rspecifier, weights_rspecifier;
po.Register("binary", &binary, "Write output in binary mode");
po.Register("update-flags", &update_flags_str,
"Which GMM parameters will be "
"updated: subset of mvw.");
po.Register("gselect", &gselect_rspecifier,
"rspecifier for gselect objects "
"to limit the #Gaussians accessed on each frame.");
po.Register("weights", &weights_rspecifier,
"rspecifier for a vector of floats "
"for each utterance, that's a per-frame weight.");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string model_filename = po.GetArg(1),
feature_rspecifier = po.GetArg(2),
accs_wxfilename = po.GetArg(3);
FullGmm fgmm;
{
bool binary_read;
Input ki(model_filename, &binary_read);
fgmm.Read(ki.Stream(), binary_read);
}
AccumFullGmm fgmm_accs;
fgmm_accs.Resize(fgmm, StringToGmmFlags(update_flags_str));
double tot_like = 0.0, tot_weight = 0.0;
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
RandomAccessInt32VectorVectorReader gselect_reader(gselect_rspecifier);
RandomAccessBaseFloatVectorReader weights_reader(weights_rspecifier);
int32 num_done = 0, num_err = 0;
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string key = feature_reader.Key();
const Matrix<BaseFloat> &mat = feature_reader.Value();
int32 file_frames = mat.NumRows();
BaseFloat file_like = 0.0,
file_weight =
0.0; // total of weights of frames (will each be 1 unless
// --weights option supplied.
Vector<BaseFloat> weights;
if (weights_rspecifier != "") { // We have per-frame weighting.
if (!weights_reader.HasKey(key)) {
KALDI_WARN << "No per-frame weights available for utterance " << key;
num_err++;
continue;
}
weights = weights_reader.Value(key);
if (weights.Dim() != file_frames) {
KALDI_WARN << "Weights for utterance " << key << " have wrong dim "
<< weights.Dim() << " vs. " << file_frames;
num_err++;
continue;
}
}
if (gselect_rspecifier != "") {
if (!gselect_reader.HasKey(key)) {
KALDI_WARN << "No gselect information for utterance " << key;
num_err++;
continue;
}
const std::vector<std::vector<int32> > &gselect =
gselect_reader.Value(key);
if (gselect.size() != static_cast<size_t>(file_frames)) {
KALDI_WARN << "gselect information for utterance " << key
<< " has wrong size " << gselect.size() << " vs. "
<< file_frames;
num_err++;
continue;
}
for (int32 i = 0; i < file_frames; i++) {
BaseFloat weight = (weights.Dim() != 0) ? weights(i) : 1.0;
if (weight == 0.0) continue;
file_weight += weight;
SubVector<BaseFloat> data(mat, i);
const std::vector<int32> &this_gselect = gselect[i];
int32 gselect_size = this_gselect.size();
KALDI_ASSERT(gselect_size > 0);
Vector<BaseFloat> loglikes;
fgmm.LogLikelihoodsPreselect(data, this_gselect, &loglikes);
file_like += weight * loglikes.ApplySoftMax();
loglikes.Scale(weight);
for (int32 j = 0; j < loglikes.Dim(); j++)
fgmm_accs.AccumulateForComponent(data, this_gselect[j],
loglikes(j));
}
} else { // no gselect...
for (int32 i = 0; i < file_frames; i++) {
BaseFloat weight = (weights.Dim() != 0) ? weights(i) : 1.0;
if (weight == 0.0) continue;
file_weight += weight;
file_like +=
weight * fgmm_accs.AccumulateFromFull(fgmm, mat.Row(i), weight);
}
}
KALDI_VLOG(2) << "File '" << key
<< "': Average likelihood = " << (file_like / file_weight)
<< " over " << file_weight << " frames.";
tot_like += file_like;
tot_weight += file_weight;
num_done++;
}
KALDI_LOG << "Done " << num_done << " files; " << num_err
<< " with errors.";
KALDI_LOG << "Overall likelihood per "
<< "frame = " << (tot_like / tot_weight) << " over " << tot_weight
<< " (weighted) frames.";
WriteKaldiObject(fgmm_accs, accs_wxfilename, binary);
KALDI_LOG << "Written accs to " << accs_wxfilename;
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/gmm/mle-full-gmm.h<|end_filename|>
// gmm/mle-full-gmm.h
// Copyright 2009-2011 <NAME>; Saarland University;
// Microsoft Corporation;
// Univ. <NAME>, <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_GMM_MLE_FULL_GMM_H_
#define KALDI_GMM_MLE_FULL_GMM_H_
#include <vector>
#include "gmm/model-common.h"
#include "gmm/full-gmm.h"
#include "gmm/full-gmm-normal.h"
#include "gmm/mle-diag-gmm.h" // for AugmentGmmFlags()
namespace kaldi {
/** \struct MleFullGmmOptions
* Configuration variables like variance floor, minimum occupancy, etc.
* needed in the estimation process.
*/
struct MleFullGmmOptions {
/// Minimum weight below which a Gaussian is removed
BaseFloat min_gaussian_weight;
/// Minimum occupancy count below which a Gaussian is removed
BaseFloat min_gaussian_occupancy;
/// Floor on eigenvalues of covariance matrices
BaseFloat variance_floor;
/// Maximum condition number of covariance matrices (apply
/// floor to eigenvalues if they pass this).
BaseFloat max_condition;
bool remove_low_count_gaussians;
MleFullGmmOptions() {
min_gaussian_weight = 1.0e-05;
min_gaussian_occupancy = 100.0;
variance_floor = 0.001;
max_condition = 1.0e+04;
remove_low_count_gaussians = true;
}
void Register(OptionsItf *po) {
std::string module = "MleFullGmmOptions: ";
po->Register("min-gaussian-weight", &min_gaussian_weight,
module + "Min Gaussian weight before we remove it.");
po->Register("min-gaussian-occupancy", &min_gaussian_occupancy,
module + "Minimum count before we remove a Gaussian.");
po->Register("variance-floor", &variance_floor,
module + "Minimum eigenvalue of covariance matrix.");
po->Register(
"max-condition", &max_condition,
module +
"Maximum condition number of covariance matrix (use it to floor).");
po->Register(
"remove-low-count-gaussians", &remove_low_count_gaussians,
module + "If true, remove Gaussians that fall below the floors.");
}
};
/** Class for computing the maximum-likelihood estimates of the parameters of
* a Gaussian mixture model.
*/
class AccumFullGmm {
public:
AccumFullGmm() : dim_(0), num_comp_(0), flags_(0) {}
AccumFullGmm(int32 num_comp, int32 dim, GmmFlagsType flags)
: dim_(0), num_comp_(0), flags_(0) {
Resize(num_comp, dim, flags);
}
explicit AccumFullGmm(const FullGmm &gmm, GmmFlagsType flags) {
Resize(gmm, flags);
}
// provide copy constructor.
explicit AccumFullGmm(const AccumFullGmm &other);
void Read(std::istream &in_stream, bool binary, bool add);
void Write(std::ostream &out_stream, bool binary) const;
/// Allocates memory for accumulators
void Resize(int32 num_components, int32 dim, GmmFlagsType flags);
/// Calls Resize with arguments based on gmm_ptr_
void Resize(const FullGmm &gmm, GmmFlagsType flags);
void ResizeVarAccumulator(int32 num_comp, int32 dim);
/// Returns the number of mixture components
int32 NumGauss() const { return num_comp_; }
/// Returns the dimensionality of the feature vectors
int32 Dim() const { return dim_; }
void SetZero(GmmFlagsType flags);
void Scale(BaseFloat f, GmmFlagsType flags); // scale stats.
/// Accumulate for a single component, given the posterior
void AccumulateForComponent(const VectorBase<BaseFloat> &data,
int32 comp_index, BaseFloat weight);
/// Accumulate for all components, given the posteriors.
void AccumulateFromPosteriors(const VectorBase<BaseFloat> &data,
const VectorBase<BaseFloat> &gauss_posteriors);
/// Accumulate for all components given a full-covariance GMM.
/// Computes posteriors and returns log-likelihood
BaseFloat AccumulateFromFull(const FullGmm &gmm,
const VectorBase<BaseFloat> &data,
BaseFloat frame_posterior);
/// Accumulate for all components given a diagonal-covariance GMM.
/// Computes posteriors and returns log-likelihood
BaseFloat AccumulateFromDiag(const DiagGmm &gmm,
const VectorBase<BaseFloat> &data,
BaseFloat frame_posterior);
/// Accessors
const GmmFlagsType Flags() const { return flags_; }
const Vector<double> &occupancy() const { return occupancy_; }
const Matrix<double> &mean_accumulator() const { return mean_accumulator_; }
const std::vector<SpMatrix<double> > &covariance_accumulator() const {
return covariance_accumulator_;
}
private:
int32 dim_;
int32 num_comp_;
GmmFlagsType flags_;
Vector<double> occupancy_;
Matrix<double> mean_accumulator_;
std::vector<SpMatrix<double> > covariance_accumulator_;
};
inline void AccumFullGmm::Resize(const FullGmm &gmm, GmmFlagsType flags) {
Resize(gmm.NumGauss(), gmm.Dim(), flags);
}
/// for computing the maximum-likelihood estimates of the parameters of a
/// Gaussian mixture model. Update using the FullGmm exponential form
void MleFullGmmUpdate(const MleFullGmmOptions &config,
const AccumFullGmm &fullgmm_acc, GmmFlagsType flags,
FullGmm *gmm, BaseFloat *obj_change_out,
BaseFloat *count_out);
/// Calc using the DiagGMM exponential form
BaseFloat MlObjective(const FullGmm &gmm, const AccumFullGmm &fullgmm_acc);
} // End namespace kaldi
#endif // KALDI_GMM_MLE_FULL_GMM_H_
<|start_filename|>tonic-suite/asr/src/nnet/nnet-randomizer-test.cc<|end_filename|>
// nnet/nnet-randomizer-test.cc
// Copyright 2013 Brno University of Technology (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet/nnet-randomizer.h"
#include <numeric>
#include <vector>
#include <algorithm>
using namespace kaldi;
using namespace kaldi::nnet1;
//////////////////////////////////////////////////
template <class Real>
static void InitRand(VectorBase<Real> *v) {
for (MatrixIndexT i = 0; i < v->Dim(); i++) (*v)(i) = RandGauss();
}
template <class Real>
static void InitRand(MatrixBase<Real> *M) {
do {
for (MatrixIndexT i = 0; i < M->NumRows(); i++)
for (MatrixIndexT j = 0; j < M->NumCols(); j++) (*M)(i, j) = RandGauss();
} while (M->NumRows() != 0 && M->Cond() > 100);
}
template <class Real>
static void AssertEqual(const VectorBase<Real> &A, const VectorBase<Real> &B,
float tol = 0.001) {
KALDI_ASSERT(A.Dim() == B.Dim());
for (MatrixIndexT i = 0; i < A.Dim(); i++)
KALDI_ASSERT(std::abs(A(i) - B(i)) < tol);
}
template <class RandomAccessIterator>
static void AssertEqual(RandomAccessIterator begin1, RandomAccessIterator end1,
RandomAccessIterator begin2,
RandomAccessIterator end2) {
KALDI_ASSERT((end1 - begin1) == (end2 - begin2));
KALDI_ASSERT(end1 > begin1);
for (; begin1 < end1; ++begin1, ++begin2) {
KALDI_ASSERT(*begin1 == *begin2);
}
}
//////////////////////////////////////////////////
void UnitTestRandomizerMask() {
NnetDataRandomizerOptions c;
RandomizerMask r;
r.Init(c);
const std::vector<int32> &m = r.Generate(5);
KALDI_ASSERT(m.size() == 5);
int32 sum_of_elems = std::accumulate(m.begin(), m.end(), 0);
KALDI_ASSERT(sum_of_elems == 4 + 3 + 2 + 1 + 0);
}
void UnitTestMatrixRandomizer() {
Matrix<BaseFloat> m(1111, 10);
InitRand(&m);
CuMatrix<BaseFloat> m2(m);
// config
NnetDataRandomizerOptions c;
c.randomizer_size = 1000;
c.minibatch_size = 100;
// randomizer
MatrixRandomizer r;
r.Init(c);
r.AddData(m2);
KALDI_ASSERT(r.IsFull());
// create vector with consecutive indices
std::vector<int32> mask(1111);
for (int32 i = 0; i < 1111; i++) {
mask[i] = i;
}
r.Randomize(mask); // no shuffling
// make sure we get same data we put to randomizer
int32 i = 0;
for (; !r.Done(); r.Next(), i++) {
KALDI_LOG << i;
const CuMatrixBase<BaseFloat> &m3 = r.Value();
Matrix<BaseFloat> m4(m3.NumRows(), m3.NumCols());
m3.CopyToMat(&m4);
AssertEqual(m4, m.RowRange(i * c.minibatch_size, c.minibatch_size));
}
KALDI_ASSERT(i == 11); // 11 minibatches
KALDI_LOG << "Filling for 2nd time";
// try to fill buffer one more time, and empty it
KALDI_ASSERT(!r.IsFull());
r.AddData(m2);
KALDI_ASSERT(r.IsFull());
KALDI_ASSERT(r.NumFrames() == 11 + 1111);
{ // check last 11 rows were copied to the front in the buffer
const CuMatrixBase<BaseFloat> &m3 = r.Value();
Matrix<BaseFloat> m4(m3.NumRows(), m3.NumCols());
m3.CopyToMat(&m4);
AssertEqual(m4.RowRange(0, 11), m.RowRange(1100, 11));
}
KALDI_ASSERT(!r.Done());
for (; !r.Done(); r.Next(), i++) {
KALDI_LOG << i;
const CuMatrixBase<BaseFloat> &m3 = r.Value();
}
KALDI_ASSERT(i == 22); // 22 minibatches
}
void UnitTestVectorRandomizer() {
Vector<BaseFloat> v(1111);
InitRand(&v);
// config
NnetDataRandomizerOptions c;
c.randomizer_size = 1000;
c.minibatch_size = 100;
// randomizer
VectorRandomizer r;
r.Init(c);
r.AddData(v);
KALDI_ASSERT(r.IsFull());
// create vector with consecutive indices
std::vector<int32> mask(1111);
for (int32 i = 0; i < 1111; i++) {
mask[i] = i;
}
r.Randomize(mask); // no shuffling
// make sure we get same data we put to randomizer
int32 i = 0;
for (; !r.Done(); r.Next(), i++) {
KALDI_LOG << i;
const VectorBase<BaseFloat> &v2 = r.Value();
AssertEqual(v2, v.Range(i * c.minibatch_size, c.minibatch_size));
}
KALDI_ASSERT(i == 11); // 11 minibatches
KALDI_LOG << "Filling for 2nd time";
// try to fill buffer one more time, and empty it
KALDI_ASSERT(!r.IsFull());
r.AddData(v);
KALDI_ASSERT(r.IsFull());
KALDI_ASSERT(r.NumFrames() == 11 + 1111);
{ // check last 11 rows were copied to the front in the buffer
const VectorBase<BaseFloat> &v2 = r.Value();
AssertEqual(v2.Range(0, 11), v.Range(1100, 11));
}
KALDI_ASSERT(!r.Done());
for (; !r.Done(); r.Next(), i++) {
KALDI_LOG << i;
const VectorBase<BaseFloat> &v2 = r.Value();
}
KALDI_ASSERT(i == 22); // 22 minibatches
}
void UnitTestStdVectorRandomizer() {
// prepare vector with some data
std::vector<int32> v(1111);
for (int32 i = 0; i < v.size(); i++) {
v.at(i) = i;
}
std::random_shuffle(v.begin(), v.end());
// config
NnetDataRandomizerOptions c;
c.randomizer_size = 1000;
c.minibatch_size = 100;
// randomizer
Int32VectorRandomizer r;
r.Init(c);
r.AddData(v);
KALDI_ASSERT(r.IsFull());
// create vector with consecutive indices
std::vector<int32> mask(1111);
for (int32 i = 0; i < 1111; i++) {
mask[i] = i;
}
r.Randomize(mask); // no shuffling
// make sure we get same data we put to randomizer
int32 i = 0;
for (; !r.Done(); r.Next(), i++) {
KALDI_LOG << i;
std::vector<int32> v2 = r.Value();
AssertEqual(v2.begin(), v2.end(), v.begin() + (i * c.minibatch_size),
v.begin() + ((i + 1) * c.minibatch_size));
}
KALDI_ASSERT(i == 11); // 11 minibatches
KALDI_LOG << "Filling for 2nd time";
// try to fill buffer one more time, and empty it
KALDI_ASSERT(!r.IsFull());
r.AddData(v);
KALDI_ASSERT(r.IsFull());
KALDI_ASSERT(r.NumFrames() == 11 + 1111);
{ // check last 11 rows were copied to the front in the buffer
std::vector<int32> v2 = r.Value();
AssertEqual(v2.begin(), v2.begin() + 11, v.begin() + 1100,
v.begin() + 1100 + 11);
}
KALDI_ASSERT(!r.Done());
for (; !r.Done(); r.Next(), i++) {
KALDI_LOG << i;
std::vector<int32> v2 = r.Value();
}
KALDI_ASSERT(i == 22); // 22 minibatches
}
int main() {
UnitTestRandomizerMask();
UnitTestMatrixRandomizer();
UnitTestVectorRandomizer();
UnitTestStdVectorRandomizer();
std::cout << "Tests succeeded.\n";
}
<|start_filename|>tonic-suite/asr/src/online2/online-timing.cc<|end_filename|>
// online2/online-timing.cc
// Copyright 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "online2/online-timing.h"
namespace kaldi {
OnlineTimingStats::OnlineTimingStats()
: num_utts_(0),
total_audio_(0.0),
total_time_taken_(0.0),
total_time_waited_(0.0),
max_delay_(0.0) {}
void OnlineTimingStats::Print() {
double real_time_factor = total_time_taken_ / total_audio_,
average_wait = (total_time_taken_ - total_audio_) / num_utts_,
idle_proportion = total_time_waited_ / total_audio_,
idle_percent = 100.0 * idle_proportion;
KALDI_LOG << "Timing stats: real-time factor was " << real_time_factor
<< " (note: this cannot be less than one.)";
KALDI_LOG << "Average delay was " << average_wait << " seconds.";
KALDI_LOG << "Percentage of time spent idling was " << idle_percent;
KALDI_LOG << "Longest delay was " << max_delay_ << " seconds for utterance "
<< '\'' << max_delay_utt_ << '\'';
}
OnlineTimer::OnlineTimer(const std::string &utterance_id)
: utterance_id_(utterance_id), waited_(0.0), utterance_length_(0.0) {}
void OnlineTimer::WaitUntil(double cur_utterance_length) {
double elapsed = timer_.Elapsed();
// it's been cur_utterance_length seconds since we would have
// started processing this utterance, in a real-time decoding
// scenario. We've been actually processing it for "elapsed"
// seconds, plus we would have been waiting on some kind of
// semaphore for waited_ seconds. If we have to wait further
// at this point, increase "waited_".
// (I have to think of a better way of explaining this).
double to_wait = cur_utterance_length - (elapsed + waited_);
if (to_wait > 0.0) waited_ += to_wait;
utterance_length_ = cur_utterance_length;
}
double OnlineTimer::Elapsed() { return timer_.Elapsed() + waited_; }
void OnlineTimer::OutputStats(OnlineTimingStats *stats) {
double processing_time = timer_.Elapsed() + waited_,
wait_time = processing_time - utterance_length_;
if (wait_time < 0.0) {
// My first though was to make this a KALDI_ERR, but perhaps
// clocks can go backwards under some weird circumstance, so
// let's just make it a warning.
KALDI_WARN << "Negative wait time " << wait_time << " does not make sense.";
}
stats->num_utts_++;
stats->total_audio_ += utterance_length_;
stats->total_time_taken_ += processing_time;
stats->total_time_waited_ += waited_;
if (wait_time > stats->max_delay_) {
stats->max_delay_ = wait_time;
stats->max_delay_utt_ = utterance_id_;
}
}
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/online2/online-speex-wrapper.cc<|end_filename|>
// online2/online-speex-wrapper.cc
// Copyright 2014 IMSL, PKU-HKUST (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <cstring>
#include "online-speex-wrapper.h"
namespace kaldi {
OnlineSpeexEncoder::OnlineSpeexEncoder(const SpeexOptions &config)
: input_finished_(false) {
speex_frame_size_ = config.speex_bits_frame_size;
speex_encoded_frame_size_ = config.speex_wave_frame_size;
sample_rate_ = config.sample_rate;
if (sizeof(speex_bits_) == 1) {
KALDI_ERR << "OnlineSpeexEncoder called but Speex not installed."
<< "You should run tools/extras/install_speex.sh first, then "
<< "re-run configure in src/ and then make Kaldi again.\n";
}
#ifdef HAVE_SPEEX
speex_state_ =
speex_encoder_init(&speex_wb_mode); // init speex with wideband mode
int32 tmp = config.speex_quality;
speex_encoder_ctl(speex_state_, SPEEX_SET_QUALITY, &tmp);
tmp = (int)(sample_rate_);
speex_encoder_ctl(speex_state_, SPEEX_SET_SAMPLING_RATE, &tmp);
speex_bits_init(&speex_bits_);
#endif
}
OnlineSpeexEncoder::~OnlineSpeexEncoder() {
#ifdef HAVE_SPEEX
speex_bits_destroy(&speex_bits_);
speex_encoder_destroy(speex_state_);
#endif
}
void OnlineSpeexEncoder::AcceptWaveform(int32 sample_rate,
const VectorBase<BaseFloat> &waveform) {
if (waveform.Dim() == 0) {
return; // Nothing to do.
}
if (input_finished_) {
KALDI_ERR << "AcceptWaveform called after InputFinished() was called.";
}
if (sample_rate != sample_rate_) {
KALDI_ERR << "Sampling frequency mismatch, expected " << sample_rate_
<< ", got " << sample_rate;
}
Vector<BaseFloat> appended_wave;
const VectorBase<BaseFloat> &wave_to_use =
(waveform_remainder_.Dim() != 0 ? appended_wave : waveform);
if (waveform_remainder_.Dim() != 0) {
appended_wave.Resize(waveform_remainder_.Dim() + waveform.Dim());
appended_wave.Range(0, waveform_remainder_.Dim())
.CopyFromVec(waveform_remainder_);
appended_wave.Range(waveform_remainder_.Dim(), waveform.Dim())
.CopyFromVec(waveform);
}
waveform_remainder_.Resize(0);
std::vector<char> spx_bits;
Encode(wave_to_use, &spx_bits);
if (spx_bits.size() > 0) {
speex_encoded_char_bits_.insert(speex_encoded_char_bits_.end(),
spx_bits.begin(), spx_bits.end());
}
}
// Deal with the last frame, pad zeros
void OnlineSpeexEncoder::InputFinished() {
input_finished_ = true;
int32 dim = waveform_remainder_.Dim();
if (dim != 0) {
KALDI_ASSERT(dim <= speex_encoded_frame_size_);
Vector<BaseFloat> wave_last(speex_encoded_frame_size_);
std::vector<char> spx_bits;
wave_last.Range(0, dim).CopyFromVec(waveform_remainder_);
Encode(wave_last, &spx_bits);
speex_encoded_char_bits_.insert(speex_encoded_char_bits_.end(),
spx_bits.begin(), spx_bits.end());
}
}
void OnlineSpeexEncoder::Encode(const VectorBase<BaseFloat> &wave,
std::vector<char> *speex_encoder_bits) {
if (wave.Dim() == 0) {
return;
}
int32 to_encode = wave.Dim();
int32 has_encode = 0;
char cbits[200];
std::vector<char> encoded_bits;
while (to_encode > speex_encoded_frame_size_) {
SubVector<BaseFloat> wave_frame(wave, has_encode,
speex_encoded_frame_size_);
int32 nbytes = 0;
#ifdef HAVE_SPEEX
speex_bits_reset(&speex_bits_);
speex_encode(speex_state_, wave_frame.Data(), &speex_bits_);
nbytes = speex_bits_nbytes(&speex_bits_);
if (nbytes != speex_frame_size_) {
KALDI_ERR << "The number of bytes of Speex encoded frame mismatch,"
<< "expected " << speex_frame_size_ << ", got " << nbytes;
}
nbytes = speex_bits_write(&speex_bits_, cbits, 200);
#endif
int32 encoded_bits_len = encoded_bits.size();
encoded_bits.resize(encoded_bits_len + nbytes);
for (int32 i = 0; i < nbytes; i++) {
encoded_bits[i + encoded_bits_len] = cbits[i];
}
has_encode += speex_encoded_frame_size_;
to_encode -= speex_encoded_frame_size_;
}
if (to_encode > 0) {
SubVector<BaseFloat> wave_left(wave, has_encode, to_encode);
int32 dim = waveform_remainder_.Dim();
if (dim != 0) {
waveform_remainder_.Resize(dim + to_encode, kCopyData);
waveform_remainder_.Range(dim, to_encode).CopyFromVec(wave_left);
} else {
waveform_remainder_ = wave_left;
}
}
*speex_encoder_bits = encoded_bits;
}
OnlineSpeexDecoder::OnlineSpeexDecoder(const SpeexOptions &config) {
speex_frame_size_ = config.speex_bits_frame_size;
speex_decoded_frame_size_ = config.speex_wave_frame_size;
if (sizeof(speex_bits_) == 1) {
KALDI_ERR << "OnlineSpeexEncoder called but Speex not installed."
<< "You should run tools/extras/install_speex.sh first, then "
<< "re-run configure in src/ and then make Kaldi again.\n";
}
#ifdef HAVE_SPEEX
speex_state_ =
speex_decoder_init(&speex_wb_mode); // init speex with wideband mode
int32 tmp = config.speex_quality;
speex_decoder_ctl(speex_state_, SPEEX_SET_QUALITY, &tmp);
tmp = (int)config.sample_rate;
speex_decoder_ctl(speex_state_, SPEEX_SET_SAMPLING_RATE, &tmp);
speex_bits_init(&speex_bits_);
#endif
}
OnlineSpeexDecoder::~OnlineSpeexDecoder() {
#ifdef HAVE_SPEEX
speex_decoder_destroy(speex_state_);
speex_bits_destroy(&speex_bits_);
#endif
}
void OnlineSpeexDecoder::AcceptSpeexBits(
const std::vector<char> &spx_enc_bits) {
if (spx_enc_bits.size() == 0) {
return; // Nothing to do
}
std::vector<char> appended_bits;
const std::vector<char> &bits_to_use =
(speex_bits_remainder_.size() != 0 ? appended_bits : spx_enc_bits);
if (speex_bits_remainder_.size() != 0) {
appended_bits.insert(appended_bits.end(), speex_bits_remainder_.begin(),
speex_bits_remainder_.end());
appended_bits.insert(appended_bits.end(), spx_enc_bits.begin(),
spx_enc_bits.end());
}
speex_bits_remainder_.clear();
Vector<BaseFloat> waveform;
Decode(bits_to_use, &waveform);
if (waveform.Dim() == 0) {
// Got nothing, maybe the decode has failed
return;
}
int32 last_wav_size = waveform_.Dim();
waveform_.Resize(last_wav_size + waveform.Dim(), kCopyData);
waveform_.Range(last_wav_size, waveform.Dim()).CopyFromVec(waveform);
}
void OnlineSpeexDecoder::Decode(const std::vector<char> &speex_char_bits,
Vector<BaseFloat> *decoded_wav) {
if (speex_char_bits.size() < speex_frame_size_) {
return; // Nothing to do, should never reach this
}
decoded_wav->Resize(0);
char *cbits = new char[speex_frame_size_ + 10]();
BaseFloat *wav = new BaseFloat[speex_decoded_frame_size_]();
int32 to_decode = speex_char_bits.size();
int32 has_decode = 0;
while (to_decode > speex_frame_size_) {
memcpy(cbits, &speex_char_bits[has_decode], speex_frame_size_);
#ifdef HAVE_SPEEX
speex_bits_read_from(&speex_bits_, cbits, speex_frame_size_);
speex_decode(speex_state_, &speex_bits_, wav);
#endif
int32 dim = decoded_wav->Dim(); // expanding decoded_wav each frame
decoded_wav->Resize(dim + speex_decoded_frame_size_, kCopyData);
// Cannot use CopyFromPtr at this moment
// decoded_wav->Range(dim, speex_decoded_frame_size_).
// CopyFromPtr(wav, speex_decoded_frame_size_);
for (int32 i = 0; i < speex_decoded_frame_size_; i++) {
(*decoded_wav)(i + dim) = wav[i];
}
has_decode += speex_frame_size_;
to_decode -= speex_frame_size_;
}
if (to_decode > 0) {
speex_bits_remainder_.insert(speex_bits_remainder_.end(),
speex_char_bits.begin() + has_decode,
speex_char_bits.end());
}
delete[] cbits;
delete[] wav;
}
}
// namespace kaldi
<|start_filename|>tonic-suite/nlp/src/SENNA_nn.h<|end_filename|>
#ifndef SENNA_NN_H
#define SENNA_NN_H
void SENNA_nn_lookup(float *dest, int dest_stride, const float *wordweights,
int wordsize, int maxwordidx, const int *wordindices,
int nword, int padidx, int npad);
void SENNA_nn_hardtanh(float *output, float *input, int size);
void SENNA_nn_linear(float *output, int output_size, float *weights,
float *biases, float *input, int input_size);
void SENNA_nn_max(float *value_, int *idx_, float *input, int input_size);
void SENNA_nn_temporal_convolution(float *output, int output_frame_size,
float *weights, float *biases, float *input,
int input_frame_size, int n_frames, int k_w);
void SENNA_nn_temporal_max_convolution(float *output, float *bias, float *input,
int input_frame_size, int n_frames,
int k_w);
void SENNA_nn_temporal_max(float *output, float *input, int N, int T);
void SENNA_nn_distance(int *dest, int idx, int max_idx, int sentence_size,
int padding_size);
void SENNA_nn_viterbi(int *path, float *init, float *transition,
float *emission, int N, int T);
#endif
<|start_filename|>tonic-suite/asr/src/ivectorbin/ivector-compute-plda.cc<|end_filename|>
// ivectorbin/ivector-compute-plda.cc
// Copyright 2013 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "ivector/plda.h"
int main(int argc, char *argv[]) {
using namespace kaldi;
typedef kaldi::int32 int32;
try {
const char *usage =
"Computes a Plda object (for Probabilistic Linear Discriminant "
"Analysis)\n"
"from a set of iVectors. Uses speaker information from a spk2utt "
"file\n"
"to compute within and between class variances.\n"
"\n"
"Usage: ivector-compute-plda [options] <spk2utt-rspecifier> "
"<ivector-rspecifier> "
"<plda-out>\n"
"e.g.: \n"
" ivector-compute-plda ark:spk2utt ark,s,cs:ivectors.ark plda\n";
ParseOptions po(usage);
bool binary = true;
PldaEstimationConfig plda_config;
plda_config.Register(&po);
po.Register("binary", &binary, "Write output in binary mode");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string spk2utt_rspecifier = po.GetArg(1),
ivector_rspecifier = po.GetArg(2),
plda_wxfilename = po.GetArg(3);
int64 num_spk_done = 0, num_spk_err = 0, num_utt_done = 0, num_utt_err = 0;
SequentialTokenVectorReader spk2utt_reader(spk2utt_rspecifier);
RandomAccessBaseFloatVectorReader ivector_reader(ivector_rspecifier);
PldaStats plda_stats;
for (; !spk2utt_reader.Done(); spk2utt_reader.Next()) {
std::string spk = spk2utt_reader.Key();
const std::vector<std::string> &uttlist = spk2utt_reader.Value();
if (uttlist.empty()) {
KALDI_ERR << "Speaker with no utterances.";
}
std::vector<Vector<BaseFloat> > ivectors;
ivectors.reserve(uttlist.size());
for (size_t i = 0; i < uttlist.size(); i++) {
std::string utt = uttlist[i];
if (!ivector_reader.HasKey(utt)) {
KALDI_WARN << "No iVector present in input for utterance " << utt;
num_utt_err++;
} else {
ivectors.resize(ivectors.size() + 1);
ivectors.back() = ivector_reader.Value(utt);
num_utt_done++;
}
}
if (ivectors.size() == 0) {
KALDI_WARN << "Not producing output for speaker " << spk
<< " since no utterances had iVectors";
num_spk_err++;
} else {
Matrix<double> ivector_mat(ivectors.size(), ivectors[0].Dim());
for (size_t i = 0; i < ivectors.size(); i++)
ivector_mat.Row(i).CopyFromVec(ivectors[i]);
double weight = 1.0; // The code supports weighting but
// we don't support this at the command-line
// level yet.
plda_stats.AddSamples(weight, ivector_mat);
num_spk_done++;
}
}
KALDI_LOG << "Accumulated stats from " << num_spk_done << " speakers ("
<< num_spk_err << " with no utterances), consisting of "
<< num_utt_done << " utterances (" << num_utt_err
<< " absent from input).";
if (num_spk_done == 0)
KALDI_ERR << "No stats accumulated, unable to estimate PLDA.";
if (num_spk_done == num_utt_done)
KALDI_ERR << "No speakers with multiple utterances, "
<< "unable to estimate PLDA.";
plda_stats.Sort();
PldaEstimator plda_estimator(plda_stats);
Plda plda;
plda_estimator.Estimate(plda_config, &plda);
WriteKaldiObject(plda, plda_wxfilename, binary);
return (num_spk_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/online2/online-feature-pipeline.cc<|end_filename|>
// online2/online-feature-pipeline.cc
// Copyright 2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "online2/online-feature-pipeline.h"
#include "transform/cmvn.h"
namespace kaldi {
OnlineFeaturePipelineConfig::OnlineFeaturePipelineConfig(
const OnlineFeaturePipelineCommandLineConfig &config) {
if (config.feature_type == "mfcc" || config.feature_type == "plp" ||
config.feature_type == "fbank") {
feature_type = config.feature_type;
} else {
KALDI_ERR << "Invalid feature type: " << config.feature_type << ". "
<< "Supported feature types: mfcc, plp.";
}
if (config.mfcc_config != "") {
ReadConfigFromFile(config.mfcc_config, &mfcc_opts);
if (feature_type != "mfcc")
KALDI_WARN << "--mfcc-config option has no effect "
<< "since feature type is set to " << feature_type << ".";
} // else use the defaults.
if (config.plp_config != "") {
ReadConfigFromFile(config.plp_config, &plp_opts);
if (feature_type != "plp")
KALDI_WARN << "--plp-config option has no effect "
<< "since feature type is set to " << feature_type << ".";
} // else use the defaults.
if (config.fbank_config != "") {
ReadConfigFromFile(config.plp_config, &fbank_opts);
if (feature_type != "fbank")
KALDI_WARN << "--fbank-config option has no effect "
<< "since feature type is set to " << feature_type << ".";
} // else use the defaults.
add_pitch = config.add_pitch;
if (config.pitch_config != "") {
ReadConfigFromFile(config.pitch_config, &pitch_opts);
if (!add_pitch)
KALDI_WARN << "--pitch-config option has no effect "
<< "since you did not supply --add-pitch option.";
} // else use the defaults.
if (config.pitch_process_config != "") {
ReadConfigFromFile(config.pitch_process_config, &pitch_process_opts);
if (!add_pitch)
KALDI_WARN << "--pitch-process-config option has no effect "
<< "since you did not supply --add-pitch option.";
} // else use the defaults.
if (config.cmvn_config != "") {
ReadConfigFromFile(config.cmvn_config, &cmvn_opts);
} // else use the defaults.
global_cmvn_stats_rxfilename = config.global_cmvn_stats_rxfilename;
if (global_cmvn_stats_rxfilename == "")
KALDI_ERR << "--global-cmvn-stats option is required.";
add_deltas = config.add_deltas;
if (config.delta_config != "") {
ReadConfigFromFile(config.delta_config, &delta_opts);
if (!add_deltas)
KALDI_WARN << "--delta-config option has no effect "
<< "since you did not supply --add-deltas option.";
} // else use the defaults.
splice_feats = config.splice_feats;
if (config.splice_config != "") {
ReadConfigFromFile(config.splice_config, &splice_opts);
if (!splice_feats)
KALDI_WARN << "--splice-config option has no effect "
<< "since you did not supply --splice-feats option.";
} // else use the defaults.
if (config.add_deltas && config.splice_feats)
KALDI_ERR << "You cannot supply both --add-deltas "
<< "and --splice-feats options";
lda_rxfilename = config.lda_rxfilename;
}
OnlineFeaturePipeline::OnlineFeaturePipeline(
const OnlineFeaturePipelineConfig &config, const Matrix<BaseFloat> &lda_mat,
const Matrix<BaseFloat> &global_cmvn_stats)
: config_(config),
lda_mat_(lda_mat),
global_cmvn_stats_(global_cmvn_stats) {
Init();
}
OnlineFeaturePipeline::OnlineFeaturePipeline(
const OnlineFeaturePipelineConfig &config)
: config_(config) {
if (config.lda_rxfilename != "")
ReadKaldiObject(config.lda_rxfilename, &lda_mat_);
if (config.global_cmvn_stats_rxfilename != "")
ReadKaldiObject(config.global_cmvn_stats_rxfilename, &global_cmvn_stats_);
Init();
}
OnlineFeaturePipeline *OnlineFeaturePipeline::New() const {
return new OnlineFeaturePipeline(config_, lda_mat_, global_cmvn_stats_);
}
OnlineFeatureInterface *OnlineFeaturePipeline::UnadaptedFeature() const {
if (lda_)
return lda_;
else if (splice_or_delta_)
return splice_or_delta_;
else {
KALDI_ASSERT(feature_ != NULL);
return feature_;
}
}
OnlineFeatureInterface *OnlineFeaturePipeline::AdaptedFeature() const {
if (fmllr_)
return fmllr_;
else
return UnadaptedFeature();
}
void OnlineFeaturePipeline::SetCmvnState(const OnlineCmvnState &cmvn_state) {
cmvn_->SetState(cmvn_state);
}
void OnlineFeaturePipeline::GetCmvnState(OnlineCmvnState *cmvn_state) {
int32 frame = cmvn_->NumFramesReady() - 1;
// the following call will crash if no frames are ready.
cmvn_->GetState(frame, cmvn_state);
}
// Init() is to be called from the constructor; it assumes the pointer
// members are all uninitialized but config_ and lda_mat_ are
// initialized.
void OnlineFeaturePipeline::Init() {
if (config_.feature_type == "mfcc") {
base_feature_ = new OnlineMfcc(config_.mfcc_opts);
} else if (config_.feature_type == "plp") {
base_feature_ = new OnlinePlp(config_.plp_opts);
} else if (config_.feature_type == "fbank") {
base_feature_ = new OnlineFbank(config_.fbank_opts);
} else {
KALDI_ERR << "Code error: invalid feature type " << config_.feature_type;
}
{
KALDI_ASSERT(global_cmvn_stats_.NumRows() != 0);
if (config_.add_pitch) {
int32 global_dim = global_cmvn_stats_.NumCols() - 1;
int32 dim = base_feature_->Dim();
KALDI_ASSERT(global_dim >= dim);
if (global_dim > dim) {
Matrix<BaseFloat> last_col(global_cmvn_stats_.ColRange(global_dim, 1));
global_cmvn_stats_.Resize(global_cmvn_stats_.NumRows(), dim + 1,
kCopyData);
global_cmvn_stats_.ColRange(dim, 1).CopyFromMat(last_col);
}
}
Matrix<double> global_cmvn_stats_dbl(global_cmvn_stats_);
OnlineCmvnState initial_state(global_cmvn_stats_dbl);
cmvn_ = new OnlineCmvn(config_.cmvn_opts, initial_state, base_feature_);
}
if (config_.add_pitch) {
pitch_ = new OnlinePitchFeature(config_.pitch_opts);
pitch_feature_ = new OnlineProcessPitch(config_.pitch_process_opts, pitch_);
feature_ = new OnlineAppendFeature(cmvn_, pitch_feature_);
} else {
pitch_ = NULL;
pitch_feature_ = NULL;
feature_ = cmvn_;
}
if (config_.splice_feats && config_.add_deltas) {
KALDI_ERR << "You cannot supply both --add-deltas and "
<< "--splice-feats options.";
} else if (config_.splice_feats) {
splice_or_delta_ = new OnlineSpliceFrames(config_.splice_opts, feature_);
} else if (config_.add_deltas) {
splice_or_delta_ = new OnlineDeltaFeature(config_.delta_opts, feature_);
} else {
splice_or_delta_ = NULL;
}
if (lda_mat_.NumRows() != 0) {
lda_ = new OnlineTransform(
lda_mat_, (splice_or_delta_ != NULL ? splice_or_delta_ : feature_));
} else {
lda_ = NULL;
}
fmllr_ = NULL; // This will be set up if the user calls SetTransform().
}
void OnlineFeaturePipeline::SetTransform(
const MatrixBase<BaseFloat> &transform) {
if (fmllr_ != NULL) { // we already had a transform; delete this
// object.
delete fmllr_;
fmllr_ = NULL;
}
if (transform.NumRows() != 0) {
OnlineFeatureInterface *feat = UnadaptedFeature();
fmllr_ = new OnlineTransform(transform, feat);
}
}
void OnlineFeaturePipeline::FreezeCmvn() {
cmvn_->Freeze(cmvn_->NumFramesReady() - 1);
}
int32 OnlineFeaturePipeline::Dim() const { return AdaptedFeature()->Dim(); }
bool OnlineFeaturePipeline::IsLastFrame(int32 frame) const {
return AdaptedFeature()->IsLastFrame(frame);
}
int32 OnlineFeaturePipeline::NumFramesReady() const {
return AdaptedFeature()->NumFramesReady();
}
void OnlineFeaturePipeline::GetFrame(int32 frame, VectorBase<BaseFloat> *feat) {
AdaptedFeature()->GetFrame(frame, feat);
}
OnlineFeaturePipeline::~OnlineFeaturePipeline() {
// Note: the delete command only deletes pointers that are non-NULL. Not all
// of the pointers below will be non-NULL.
delete fmllr_;
delete lda_;
delete splice_or_delta_;
// Guard against double deleting the cmvn_ ptr
if (pitch_feature_) {
delete feature_; // equal to cmvn_ if pitch feats are not appended
delete pitch_feature_;
delete pitch_;
}
delete cmvn_;
delete base_feature_;
}
void OnlineFeaturePipeline::AcceptWaveform(
BaseFloat sampling_rate, const VectorBase<BaseFloat> &waveform) {
base_feature_->AcceptWaveform(sampling_rate, waveform);
if (pitch_) pitch_->AcceptWaveform(sampling_rate, waveform);
}
void OnlineFeaturePipeline::InputFinished() {
base_feature_->InputFinished();
if (pitch_) pitch_->InputFinished();
}
BaseFloat OnlineFeaturePipelineConfig::FrameShiftInSeconds() const {
if (feature_type == "mfcc") {
return mfcc_opts.frame_opts.frame_shift_ms * 1.0e-03;
} else if (feature_type == "plp") {
return plp_opts.frame_opts.frame_shift_ms * 1.0e-03;
} else {
KALDI_ERR << "Unknown feature type " << feature_type;
return 0.0;
}
}
void OnlineFeaturePipeline::GetAsMatrix(Matrix<BaseFloat> *feats) {
if (pitch_) {
feats->Resize(NumFramesReady(), pitch_feature_->Dim());
for (int32 i = 0; i < NumFramesReady(); i++) {
SubVector<BaseFloat> row(*feats, i);
pitch_feature_->GetFrame(i, &row);
}
}
}
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/sgmmbin/sgmm-init.cc<|end_filename|>
// sgmmbin/sgmm-init.cc
// Copyright 2012 <NAME>
// Copyright 2009-2011 Saarland University (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "sgmm/am-sgmm.h"
#include "hmm/transition-model.h"
#include "tree/context-dep.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
const char *usage =
"Initialize an SGMM from a trained full-covariance UBM and a specified"
" model topology.\n"
"Usage: sgmm-init [options] <topology> <tree> <init-model> <sgmm-out>\n"
"The <init-model> argument can be a UBM (the default case) or another\n"
"SGMM (if the --init-from-sgmm flag is used).\n";
bool binary = true, init_from_sgmm = false;
int32 phn_space_dim = 0, spk_space_dim = 0;
kaldi::ParseOptions po(usage);
po.Register("binary", &binary, "Write output in binary mode");
po.Register("phn-space-dim", &phn_space_dim, "Phonetic space dimension.");
po.Register("spk-space-dim", &spk_space_dim, "Speaker space dimension.");
po.Register("init-from-sgmm", &init_from_sgmm,
"Initialize from another SGMM (instead of a UBM).");
po.Read(argc, argv);
if (po.NumArgs() != 4) {
po.PrintUsage();
exit(1);
}
std::string topo_in_filename = po.GetArg(1),
tree_in_filename = po.GetArg(2),
init_model_filename = po.GetArg(3),
sgmm_out_filename = po.GetArg(4);
ContextDependency ctx_dep;
{
bool binary_in;
Input ki(tree_in_filename.c_str(), &binary_in);
ctx_dep.Read(ki.Stream(), binary_in);
}
HmmTopology topo;
ReadKaldiObject(topo_in_filename, &topo);
TransitionModel trans_model(ctx_dep, topo);
kaldi::AmSgmm sgmm;
if (init_from_sgmm) {
kaldi::AmSgmm init_sgmm;
{
bool binary_read;
TransitionModel tmp_trans;
kaldi::Input ki(init_model_filename, &binary_read);
tmp_trans.Read(ki.Stream(), binary_read);
init_sgmm.Read(ki.Stream(), binary_read);
}
sgmm.CopyGlobalsInitVecs(init_sgmm, phn_space_dim, spk_space_dim,
trans_model.NumPdfs());
} else {
kaldi::FullGmm ubm;
{
bool binary_read;
kaldi::Input ki(init_model_filename, &binary_read);
ubm.Read(ki.Stream(), binary_read);
}
sgmm.InitializeFromFullGmm(ubm, trans_model.NumPdfs(), phn_space_dim,
spk_space_dim);
}
sgmm.ComputeNormalizers();
{
kaldi::Output ko(sgmm_out_filename, binary);
trans_model.Write(ko.Stream(), binary);
sgmm.Write(ko.Stream(), binary, kaldi::kSgmmWriteAll);
}
KALDI_LOG << "Written model to " << sgmm_out_filename;
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/sgmm2/estimate-am-sgmm2.cc<|end_filename|>
// sgmm2/estimate-am-sgmm2.cc
// Copyright 2009-2011 Microsoft Corporation; <NAME>;
// Saarland University (Author: <NAME>);
// <NAME>; <NAME>;
// Copyright 2012-2013 Johns Hopkins University (Author: <NAME>)
// <NAME>; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "sgmm2/am-sgmm2.h"
#include "sgmm2/estimate-am-sgmm2.h"
#include "thread/kaldi-thread.h"
namespace kaldi {
using std::string;
using std::vector;
void MleAmSgmm2Accs::Write(std::ostream &out_stream, bool binary) const {
WriteToken(out_stream, binary, "<SGMMACCS>");
WriteToken(out_stream, binary, "<NUMPDFS>");
WriteBasicType(out_stream, binary, num_pdfs_);
WriteToken(out_stream, binary, "<NUMGROUPS>");
WriteBasicType(out_stream, binary, num_groups_);
WriteToken(out_stream, binary, "<NUMGaussians>");
WriteBasicType(out_stream, binary, num_gaussians_);
WriteToken(out_stream, binary, "<FEATUREDIM>");
WriteBasicType(out_stream, binary, feature_dim_);
WriteToken(out_stream, binary, "<PHONESPACEDIM>");
WriteBasicType(out_stream, binary, phn_space_dim_);
WriteToken(out_stream, binary, "<SPKSPACEDIM>");
WriteBasicType(out_stream, binary, spk_space_dim_);
if (!binary) out_stream << "\n";
if (Y_.size() != 0) {
KALDI_ASSERT(gamma_.size() != 0);
WriteToken(out_stream, binary, "<Y>");
for (int32 i = 0; i < num_gaussians_; i++) {
Y_[i].Write(out_stream, binary);
}
}
if (Z_.size() != 0) {
KALDI_ASSERT(R_.size() != 0);
WriteToken(out_stream, binary, "<Z>");
for (int32 i = 0; i < num_gaussians_; i++) {
Z_[i].Write(out_stream, binary);
}
WriteToken(out_stream, binary, "<R>");
for (int32 i = 0; i < num_gaussians_; i++) {
R_[i].Write(out_stream, binary);
}
}
if (S_.size() != 0) {
KALDI_ASSERT(gamma_.size() != 0);
WriteToken(out_stream, binary, "<S>");
for (int32 i = 0; i < num_gaussians_; i++) {
S_[i].Write(out_stream, binary);
}
}
if (y_.size() != 0) {
KALDI_ASSERT(gamma_.size() != 0);
WriteToken(out_stream, binary, "<y>");
for (int32 j1 = 0; j1 < num_groups_; j1++) {
y_[j1].Write(out_stream, binary);
}
}
if (gamma_.size() != 0) { // These stats are large
// -> write as single precision.
WriteToken(out_stream, binary, "<gamma>");
for (int32 j1 = 0; j1 < num_groups_; j1++) {
Matrix<BaseFloat> gamma_j1(gamma_[j1]);
gamma_j1.Write(out_stream, binary);
}
}
if (t_.NumRows() != 0) {
WriteToken(out_stream, binary, "<t>");
t_.Write(out_stream, binary);
}
if (U_.size() != 0) {
WriteToken(out_stream, binary, "<U>");
for (int32 i = 0; i < num_gaussians_; i++) {
U_[i].Write(out_stream, binary);
}
}
if (gamma_c_.size() != 0) {
WriteToken(out_stream, binary, "<gamma_c>");
for (int32 j2 = 0; j2 < num_pdfs_; j2++) {
gamma_c_[j2].Write(out_stream, binary);
}
}
if (a_.size() != 0) {
WriteToken(out_stream, binary, "<a>");
for (int32 j1 = 0; j1 < num_groups_; j1++) {
a_[j1].Write(out_stream, binary);
}
}
WriteToken(out_stream, binary, "<total_like>");
WriteBasicType(out_stream, binary, total_like_);
WriteToken(out_stream, binary, "<total_frames>");
WriteBasicType(out_stream, binary, total_frames_);
WriteToken(out_stream, binary, "</SGMMACCS>");
}
void MleAmSgmm2Accs::Read(std::istream &in_stream, bool binary, bool add) {
ExpectToken(in_stream, binary, "<SGMMACCS>");
ExpectToken(in_stream, binary, "<NUMPDFS>");
ReadBasicType(in_stream, binary, &num_pdfs_);
ExpectToken(in_stream, binary, "<NUMGROUPS>");
ReadBasicType(in_stream, binary, &num_groups_);
ExpectToken(in_stream, binary, "<NUMGaussians>");
ReadBasicType(in_stream, binary, &num_gaussians_);
ExpectToken(in_stream, binary, "<FEATUREDIM>");
ReadBasicType(in_stream, binary, &feature_dim_);
ExpectToken(in_stream, binary, "<PHONESPACEDIM>");
ReadBasicType(in_stream, binary, &phn_space_dim_);
ExpectToken(in_stream, binary, "<SPKSPACEDIM>");
ReadBasicType(in_stream, binary, &spk_space_dim_);
string token;
ReadToken(in_stream, binary, &token);
while (token != "</SGMMACCS>") {
if (token == "<Y>") {
Y_.resize(num_gaussians_);
for (size_t i = 0; i < Y_.size(); i++) {
Y_[i].Read(in_stream, binary, add);
}
} else if (token == "<Z>") {
Z_.resize(num_gaussians_);
for (size_t i = 0; i < Z_.size(); i++) {
Z_[i].Read(in_stream, binary, add);
}
} else if (token == "<R>") {
R_.resize(num_gaussians_);
if (gamma_s_.Dim() == 0) gamma_s_.Resize(num_gaussians_);
for (size_t i = 0; i < R_.size(); i++) {
R_[i].Read(in_stream, binary, add);
}
} else if (token == "<S>") {
S_.resize(num_gaussians_);
for (size_t i = 0; i < S_.size(); i++) {
S_[i].Read(in_stream, binary, add);
}
} else if (token == "<y>") {
y_.resize(num_groups_);
for (int32 j1 = 0; j1 < num_groups_; j1++) {
y_[j1].Read(in_stream, binary, add);
}
} else if (token == "<gamma>") {
gamma_.resize(num_groups_);
for (int32 j1 = 0; j1 < num_groups_; j1++) {
gamma_[j1].Read(in_stream, binary, add);
}
// Don't read gamma_s, it's just a temporary variable and
// not part of the permanent (non-speaker-specific) accs.
} else if (token == "<a>") {
a_.resize(num_groups_);
for (int32 j1 = 0; j1 < num_groups_; j1++) {
a_[j1].Read(in_stream, binary, add);
}
} else if (token == "<gamma_c>") {
gamma_c_.resize(num_pdfs_);
for (int32 j2 = 0; j2 < num_pdfs_; j2++) {
gamma_c_[j2].Read(in_stream, binary, add);
}
} else if (token == "<t>") {
t_.Read(in_stream, binary, add);
} else if (token == "<U>") {
U_.resize(num_gaussians_);
for (int32 i = 0; i < num_gaussians_; i++) {
U_[i].Read(in_stream, binary, add);
}
} else if (token == "<total_like>") {
double total_like;
ReadBasicType(in_stream, binary, &total_like);
if (add)
total_like_ += total_like;
else
total_like_ = total_like;
} else if (token == "<total_frames>") {
double total_frames;
ReadBasicType(in_stream, binary, &total_frames);
if (add)
total_frames_ += total_frames;
else
total_frames_ = total_frames;
} else {
KALDI_ERR << "Unexpected token '" << token << "' in model file ";
}
ReadToken(in_stream, binary, &token);
}
}
void MleAmSgmm2Accs::Check(const AmSgmm2 &model, bool show_properties) const {
if (show_properties)
KALDI_LOG << "Sgmm2PdfModel: J1 = " << num_groups_ << ", J2 = " << num_pdfs_
<< ", D = " << feature_dim_ << ", S = " << phn_space_dim_
<< ", T = " << spk_space_dim_ << ", I = " << num_gaussians_;
KALDI_ASSERT(num_pdfs_ == model.NumPdfs() && num_pdfs_ > 0);
KALDI_ASSERT(num_groups_ == model.NumGroups() && num_groups_ > 0);
KALDI_ASSERT(num_gaussians_ == model.NumGauss() && num_gaussians_ > 0);
KALDI_ASSERT(feature_dim_ == model.FeatureDim() && feature_dim_ > 0);
KALDI_ASSERT(phn_space_dim_ == model.PhoneSpaceDim() && phn_space_dim_ > 0);
KALDI_ASSERT(spk_space_dim_ == model.SpkSpaceDim());
std::ostringstream debug_str;
if (Y_.size() == 0) {
debug_str << "Y: no. ";
} else {
KALDI_ASSERT(gamma_.size() != 0);
KALDI_ASSERT(Y_.size() == static_cast<size_t>(num_gaussians_));
bool nz = false;
for (int32 i = 0; i < num_gaussians_; i++) {
KALDI_ASSERT(Y_[i].NumRows() == feature_dim_ &&
Y_[i].NumCols() == phn_space_dim_);
if (!nz && Y_[i](0, 0) != 0) {
nz = true;
}
}
debug_str << "Y: yes, " << string(nz ? "nonzero. " : "zero. ");
}
if (Z_.size() == 0) {
KALDI_ASSERT(R_.size() == 0);
debug_str << "Z, R: no. ";
} else {
KALDI_ASSERT(gamma_s_.Dim() == num_gaussians_);
KALDI_ASSERT(Z_.size() == static_cast<size_t>(num_gaussians_));
KALDI_ASSERT(R_.size() == static_cast<size_t>(num_gaussians_));
bool Z_nz = false, R_nz = false;
for (int32 i = 0; i < num_gaussians_; i++) {
KALDI_ASSERT(Z_[i].NumRows() == feature_dim_ &&
Z_[i].NumCols() == spk_space_dim_);
KALDI_ASSERT(R_[i].NumRows() == spk_space_dim_);
if (!Z_nz && Z_[i](0, 0) != 0) {
Z_nz = true;
}
if (!R_nz && R_[i](0, 0) != 0) {
R_nz = true;
}
}
bool gamma_s_nz = !gamma_s_.IsZero();
debug_str << "Z: yes, " << string(Z_nz ? "nonzero. " : "zero. ");
debug_str << "R: yes, " << string(R_nz ? "nonzero. " : "zero. ");
debug_str << "gamma_s: yes, "
<< string(gamma_s_nz ? "nonzero. " : "zero. ");
}
if (S_.size() == 0) {
debug_str << "S: no. ";
} else {
KALDI_ASSERT(gamma_.size() != 0);
bool S_nz = false;
KALDI_ASSERT(S_.size() == static_cast<size_t>(num_gaussians_));
for (int32 i = 0; i < num_gaussians_; i++) {
KALDI_ASSERT(S_[i].NumRows() == feature_dim_);
if (!S_nz && S_[i](0, 0) != 0) {
S_nz = true;
}
}
debug_str << "S: yes, " << string(S_nz ? "nonzero. " : "zero. ");
}
if (y_.size() == 0) {
debug_str << "y: no. ";
} else {
KALDI_ASSERT(gamma_.size() != 0);
bool nz = false;
KALDI_ASSERT(y_.size() == static_cast<size_t>(num_groups_));
for (int32 j1 = 0; j1 < num_groups_; j1++) {
KALDI_ASSERT(y_[j1].NumRows() == model.NumSubstatesForGroup(j1));
KALDI_ASSERT(y_[j1].NumCols() == phn_space_dim_);
if (!nz && y_[j1](0, 0) != 0) {
nz = true;
}
}
debug_str << "y: yes, " << string(nz ? "nonzero. " : "zero. ");
}
if (a_.size() == 0) {
debug_str << "a: no. ";
} else {
debug_str << "a: yes. ";
bool nz = false;
KALDI_ASSERT(a_.size() == static_cast<size_t>(num_groups_));
for (int32 j1 = 0; j1 < num_groups_; j1++) {
KALDI_ASSERT(a_[j1].NumRows() == model.NumSubstatesForGroup(j1) &&
a_[j1].NumCols() == num_gaussians_);
if (!nz && a_[j1].Sum() != 0) nz = true;
}
debug_str << "a: yes, "
<< string(nz ? "nonzero. "
: "zero. "); // TODO: take out "string"
}
double tot_gamma = 0.0;
if (gamma_.size() == 0) {
debug_str << "gamma: no. ";
} else {
debug_str << "gamma: yes. ";
KALDI_ASSERT(gamma_.size() == static_cast<size_t>(num_groups_));
for (int32 j1 = 0; j1 < num_groups_; j1++) {
KALDI_ASSERT(gamma_[j1].NumRows() == model.NumSubstatesForGroup(j1) &&
gamma_[j1].NumCols() == num_gaussians_);
tot_gamma += gamma_[j1].Sum();
}
bool nz = (tot_gamma != 0.0);
KALDI_ASSERT(gamma_c_.size() == num_pdfs_ &&
"gamma_ set up but not gamma_c_.");
debug_str << "gamma: yes, " << string(nz ? "nonzero. " : "zero. ");
}
if (gamma_c_.size() == 0) {
KALDI_ERR << "gamma_c_ not set up."; // required for all accs.
} else {
KALDI_ASSERT(gamma_c_.size() == num_pdfs_);
double tot_gamma_c = 0.0;
for (int32 j2 = 0; j2 < num_pdfs_; j2++) {
KALDI_ASSERT(gamma_c_[j2].Dim() == model.NumSubstatesForPdf(j2));
tot_gamma_c += gamma_c_[j2].Sum();
}
bool nz = (tot_gamma_c != 0.0);
debug_str << "gamma_c: yes, " << string(nz ? "nonzero. " : "zero. ");
if (!gamma_.empty() && !ApproxEqual(tot_gamma_c, tot_gamma))
KALDI_WARN << "Counts from gamma and gamma_c differ " << tot_gamma
<< " vs. " << tot_gamma_c;
}
if (t_.NumRows() == 0) {
debug_str << "t: no. ";
} else {
KALDI_ASSERT(t_.NumRows() == num_gaussians_ &&
t_.NumCols() == spk_space_dim_);
KALDI_ASSERT(!U_.empty()); // t and U are used together.
bool nz = (t_.FrobeniusNorm() != 0);
debug_str << "t: yes, " << string(nz ? "nonzero. " : "zero. ");
}
if (U_.size() == 0) {
debug_str << "U: no. ";
} else {
bool nz = false;
KALDI_ASSERT(U_.size() == num_gaussians_);
for (int32 i = 0; i < num_gaussians_; i++) {
if (!nz && U_[i].FrobeniusNorm() != 0) nz = true;
KALDI_ASSERT(U_[i].NumRows() == spk_space_dim_);
}
KALDI_ASSERT(t_.NumRows() != 0); // t and U are used together.
debug_str << "t: yes, " << string(nz ? "nonzero. " : "zero. ");
}
if (show_properties)
KALDI_LOG << "Subspace GMM model properties: " << debug_str.str();
}
void MleAmSgmm2Accs::ResizeAccumulators(const AmSgmm2 &model,
SgmmUpdateFlagsType flags,
bool have_spk_vecs) {
num_pdfs_ = model.NumPdfs();
num_groups_ = model.NumGroups();
num_gaussians_ = model.NumGauss();
feature_dim_ = model.FeatureDim();
phn_space_dim_ = model.PhoneSpaceDim();
spk_space_dim_ = model.SpkSpaceDim();
total_frames_ = total_like_ = 0;
if (flags & (kSgmmPhoneProjections | kSgmmCovarianceMatrix)) {
Y_.resize(num_gaussians_);
for (int32 i = 0; i < num_gaussians_; i++) {
Y_[i].Resize(feature_dim_, phn_space_dim_);
}
} else {
Y_.clear();
}
if (flags & (kSgmmSpeakerProjections | kSgmmSpeakerWeightProjections)) {
gamma_s_.Resize(num_gaussians_);
} else {
gamma_s_.Resize(0);
}
if (flags & kSgmmSpeakerProjections) {
if (spk_space_dim_ == 0) {
KALDI_ERR << "Cannot set up accumulators for speaker projections "
<< "because speaker subspace has not been set up";
}
Z_.resize(num_gaussians_);
R_.resize(num_gaussians_);
for (int32 i = 0; i < num_gaussians_; i++) {
Z_[i].Resize(feature_dim_, spk_space_dim_);
R_[i].Resize(spk_space_dim_);
}
} else {
Z_.clear();
R_.clear();
}
if (flags & kSgmmCovarianceMatrix) {
S_.resize(num_gaussians_);
for (int32 i = 0; i < num_gaussians_; i++) {
S_[i].Resize(feature_dim_);
}
} else {
S_.clear();
}
if (flags & (kSgmmPhoneVectors | kSgmmPhoneWeightProjections |
kSgmmCovarianceMatrix | kSgmmPhoneProjections)) {
gamma_.resize(num_groups_);
for (int32 j1 = 0; j1 < num_groups_; j1++) {
gamma_[j1].Resize(model.NumSubstatesForGroup(j1), num_gaussians_);
}
} else {
gamma_.clear();
}
if (flags & (kSgmmPhoneVectors | kSgmmPhoneWeightProjections) &&
model.HasSpeakerDependentWeights() && have_spk_vecs) { // SSGMM code.
a_.resize(num_groups_);
for (int32 j1 = 0; j1 < num_groups_; j1++) {
a_[j1].Resize(model.NumSubstatesForGroup(j1), num_gaussians_);
}
} else {
a_.clear();
}
if (flags & kSgmmSpeakerWeightProjections) {
KALDI_ASSERT(model.HasSpeakerDependentWeights() &&
"remove the flag \"u\" if you don't have u set up.");
a_s_.Resize(num_gaussians_);
t_.Resize(num_gaussians_, spk_space_dim_);
U_.resize(num_gaussians_);
for (int32 i = 0; i < num_gaussians_; i++) U_[i].Resize(spk_space_dim_);
} else {
a_s_.Resize(0);
t_.Resize(0, 0);
U_.resize(0);
}
if (true) { // always set up gamma_c_; it's nominally for
// estimation of substate weights, but it's also required when
// GetStateOccupancies() is called.
gamma_c_.resize(num_pdfs_);
for (int32 j2 = 0; j2 < num_pdfs_; j2++) {
gamma_c_[j2].Resize(model.NumSubstatesForPdf(j2));
}
}
if (flags & kSgmmPhoneVectors) {
y_.resize(num_groups_);
for (int32 j1 = 0; j1 < num_groups_; j1++) {
y_[j1].Resize(model.NumSubstatesForGroup(j1), phn_space_dim_);
}
} else {
y_.clear();
}
}
BaseFloat MleAmSgmm2Accs::Accumulate(const AmSgmm2 &model,
const Sgmm2PerFrameDerivedVars &frame_vars,
int32 j2, BaseFloat weight,
Sgmm2PerSpkDerivedVars *spk_vars) {
// Calculate Gaussian posteriors and collect statistics
Matrix<BaseFloat> posteriors;
BaseFloat log_like =
model.ComponentPosteriors(frame_vars, j2, spk_vars, &posteriors);
posteriors.Scale(weight);
BaseFloat count =
AccumulateFromPosteriors(model, frame_vars, posteriors, j2, spk_vars);
// Note: total_frames_ is incremented in AccumulateFromPosteriors().
total_like_ += count * log_like;
return log_like;
}
BaseFloat MleAmSgmm2Accs::AccumulateFromPosteriors(
const AmSgmm2 &model, const Sgmm2PerFrameDerivedVars &frame_vars,
const Matrix<BaseFloat> &posteriors, int32 j2,
Sgmm2PerSpkDerivedVars *spk_vars) {
double tot_count = 0.0;
const vector<int32> &gselect = frame_vars.gselect;
// Intermediate variables
Vector<BaseFloat> gammat(gselect.size()), // sum of gammas over mix-weight.
a_is_part(gselect.size()); //
Vector<BaseFloat> xt_jmi(feature_dim_), mu_jmi(feature_dim_),
zt_jmi(spk_space_dim_);
int32 j1 = model.Pdf2Group(j2);
int32 num_substates = model.NumSubstatesForGroup(j1);
for (int32 m = 0; m < num_substates; m++) {
BaseFloat d_jms = model.GetDjms(j1, m, spk_vars);
BaseFloat gammat_jm = 0.0;
for (int32 ki = 0; ki < static_cast<int32>(gselect.size()); ki++) {
int32 i = gselect[ki];
// Eq. (39): gamma_{jmi}(t) = p (j, m, i|t)
BaseFloat gammat_jmi = RandPrune(posteriors(ki, m), rand_prune_);
if (gammat_jmi == 0.0) continue;
gammat(ki) += gammat_jmi;
if (gamma_s_.Dim() != 0) gamma_s_(i) += gammat_jmi;
gammat_jm += gammat_jmi;
// Accumulate statistics for non-zero gaussian posteriors
tot_count += gammat_jmi;
if (!gamma_.empty()) {
// Eq. (40): gamma_{jmi} = \sum_t gamma_{jmi}(t)
gamma_[j1](m, i) += gammat_jmi;
}
if (!y_.empty()) {
// Eq. (41): y_{jm} = \sum_{t, i} \gamma_{jmi}(t) z_{i}(t)
// Suggestion: move this out of the loop over m
y_[j1].Row(m).AddVec(gammat_jmi, frame_vars.zti.Row(ki));
}
if (!Y_.empty()) {
// Eq. (42): Y_{i} = \sum_{t, j, m} \gamma_{jmi}(t) x_{i}(t) v_{jm}^T
Y_[i].AddVecVec(gammat_jmi, frame_vars.xti.Row(ki),
model.v_[j1].Row(m));
}
// Accumulate for speaker projections
if (!Z_.empty()) {
KALDI_ASSERT(spk_space_dim_ > 0);
// Eq. (43): x_{jmi}(t) = x_k(t) - M{i} v_{jm}
model.GetSubstateMean(j1, m, i, &mu_jmi);
xt_jmi.CopyFromVec(frame_vars.xt);
xt_jmi.AddVec(-1.0, mu_jmi);
// Eq. (44): Z_{i} = \sum_{t, j, m} \gamma_{jmi}(t) x_{jmi}(t) v^{s}'
if (spk_vars->v_s.Dim() != 0) // interpret empty v_s as zero.
Z_[i].AddVecVec(gammat_jmi, xt_jmi, spk_vars->v_s);
// Eq. (49): \gamma_{i}^{(s)} = \sum_{t\in\Tau(s), j, m} gamma_{jmi}
// Will be used when you call CommitStatsForSpk(), to update R_.
}
} // loop over selected Gaussians
if (gammat_jm != 0.0) {
if (!a_.empty()) { // SSGMM code.
KALDI_ASSERT(d_jms > 0);
// below is eq. 40 in the MSR techreport. Caution: there
// was an error in the original techreport. The index i
// in the summation and the quantity \gamma_{jmi}^{(t)}
// should be differently named, e.g. i'.
a_[j1].Row(m).AddVec(gammat_jm / d_jms, spk_vars->b_is);
}
if (a_s_.Dim() != 0) { // [SSGMM]
KALDI_ASSERT(d_jms > 0);
KALDI_ASSERT(!model.w_jmi_.empty());
a_s_.AddVec(gammat_jm / d_jms, model.w_jmi_[j1].Row(m));
}
if (!gamma_c_.empty()) gamma_c_[j2](m) += gammat_jm;
}
} // loop over substates
if (!S_.empty()) {
for (int32 ki = 0; ki < static_cast<int32>(gselect.size()); ki++) {
// Eq. (47): S_{i} = \sum_{t, j, m} \gamma_{jmi}(t) x_{i}(t) x_{i}(t)^T
if (gammat(ki) != 0.0) {
int32 i = gselect[ki];
S_[i].AddVec2(gammat(ki), frame_vars.xti.Row(ki));
}
}
}
total_frames_ += tot_count;
return tot_count;
}
void MleAmSgmm2Accs::CommitStatsForSpk(const AmSgmm2 &model,
const Sgmm2PerSpkDerivedVars &spk_vars) {
const VectorBase<BaseFloat> &v_s = spk_vars.v_s;
if (v_s.Dim() != 0 && !v_s.IsZero() && !R_.empty()) {
for (int32 i = 0; i < num_gaussians_; i++)
// Accumulate Statistics R_{ki}
if (gamma_s_(i) != 0.0) R_[i].AddVec2(gamma_s_(i), Vector<double>(v_s));
}
if (a_s_.Dim() != 0) {
Vector<BaseFloat> tmp(gamma_s_);
// tmp(i) = gamma_s^{(i)} - a_i^{(s)} b_i^{(s)}.
tmp.AddVecVec(-1.0, Vector<BaseFloat>(a_s_), spk_vars.b_is, 1.0);
t_.AddVecVec(1.0, tmp, v_s); // eq. 53 of techreport.
for (int32 i = 0; i < num_gaussians_; i++) {
U_[i].AddVec2(a_s_(i) * spk_vars.b_is(i),
Vector<double>(v_s)); // eq. 54 of techreport.
}
}
gamma_s_.SetZero();
a_s_.SetZero();
}
void MleAmSgmm2Accs::GetStateOccupancies(Vector<BaseFloat> *occs) const {
int32 J2 = gamma_c_.size();
occs->Resize(J2);
for (int32 j2 = 0; j2 < J2; j2++) {
(*occs)(j2) = gamma_c_[j2].Sum();
}
}
void MleAmSgmm2Updater::Update(const MleAmSgmm2Accs &accs, AmSgmm2 *model,
SgmmUpdateFlagsType flags) {
// Q_{i}, quadratic term for phonetic subspace estimation. Dim is [I][S][S]
std::vector<SpMatrix<double> > Q;
// Eq (74): S_{i}^{(means)}, scatter of substate mean vectors for estimating
// the shared covariance matrices. [Actually this variable contains also the
// term -(Y_i M_i^T + M_i Y_I^T).] Dimension is [I][D][D].
std::vector<SpMatrix<double> > S_means;
std::vector<Matrix<double> > log_a;
Vector<double> gamma_i(accs.num_gaussians_);
for (int32 j1 = 0; j1 < accs.num_groups_; j1++)
gamma_i.AddRowSumMat(1.0, accs.gamma_[j1]); // add sum of rows of
// accs.gamma_[j1], to gamma_i.
if (flags & kSgmmPhoneProjections) ComputeQ(accs, *model, &Q);
if (flags & kSgmmCovarianceMatrix) ComputeSMeans(accs, *model, &S_means);
if (!accs.a_.empty()) ComputeLogA(accs, &log_a);
// quantities used in both vector and weights updates...
vector<SpMatrix<double> > H;
// "smoothing" matrices, weighted sums of above.
SpMatrix<double>
H_sm; // weighted sum of H. Used e.g. in renormalizing phonetic space.
if ((flags & (kSgmmPhoneVectors | kSgmmPhoneWeightProjections)) ||
options_.renormalize_V)
model->ComputeH(&H);
BaseFloat tot_impr = 0.0;
if (flags & kSgmmPhoneVectors)
tot_impr += UpdatePhoneVectors(accs, H, log_a, model);
if (flags & kSgmmPhoneProjections) {
if (options_.tau_map_M > 0.0)
tot_impr += MapUpdateM(accs, Q, gamma_i, model); // MAP adaptation of M
else
tot_impr += UpdateM(accs, Q, gamma_i, model);
}
if (flags & kSgmmPhoneWeightProjections)
tot_impr += UpdateW(accs, log_a, gamma_i, model);
if (flags & kSgmmCovarianceMatrix)
tot_impr += UpdateVars(accs, S_means, gamma_i, model);
if (flags & kSgmmSubstateWeights)
tot_impr += UpdateSubstateWeights(accs, model);
if (flags & kSgmmSpeakerProjections)
tot_impr += UpdateN(accs, gamma_i, model);
if (flags & kSgmmSpeakerWeightProjections)
tot_impr += UpdateU(accs, gamma_i, model);
if ((flags & kSgmmSpeakerProjections) && (options_.renormalize_N))
RenormalizeN(accs, gamma_i, model); // if you renormalize N you have to
// alter any speaker vectors you're keeping around, as well.
// So be careful with this option.
if (options_.renormalize_V) RenormalizeV(accs, model, gamma_i, H);
KALDI_LOG << "*Overall auxf improvement, combining all parameters, is "
<< tot_impr;
KALDI_LOG << "***Overall data likelihood is "
<< (accs.total_like_ / accs.total_frames_) << " over "
<< accs.total_frames_ << " frames.";
model->n_.clear(); // has become invalid.
model->w_jmi_.clear(); // has become invalid.
// we updated the v or w quantities.
}
// Compute the Q_{i} (Eq. 64)
void MleAmSgmm2Updater::ComputeQ(const MleAmSgmm2Accs &accs,
const AmSgmm2 &model,
std::vector<SpMatrix<double> > *Q) {
Q->resize(accs.num_gaussians_);
for (int32 i = 0; i < accs.num_gaussians_; i++) {
(*Q)[i].Resize(accs.phn_space_dim_);
for (int32 j1 = 0; j1 < accs.num_groups_; j1++) {
for (int32 m = 0; m < model.NumSubstatesForGroup(j1); m++) {
if (accs.gamma_[j1](m, i) > 0.0) {
(*Q)[i].AddVec2(static_cast<BaseFloat>(accs.gamma_[j1](m, i)),
model.v_[j1].Row(m));
}
}
}
}
}
// Compute the S_i^{(means)} quantities (Eq. 74).
// Note: we seem to have also included in this variable
// the term - (Y_i M_I^T + M_i Y_i^T).
void MleAmSgmm2Updater::ComputeSMeans(const MleAmSgmm2Accs &accs,
const AmSgmm2 &model,
std::vector<SpMatrix<double> > *S_means) {
S_means->resize(accs.num_gaussians_);
Matrix<double> YM_MY(accs.feature_dim_, accs.feature_dim_);
Vector<BaseFloat> mu_jmi(accs.feature_dim_);
for (int32 i = 0; i < accs.num_gaussians_; i++) {
// YM_MY = - (Y_{i} M_{i}^T)
YM_MY.AddMatMat(-1.0, accs.Y_[i], kNoTrans, Matrix<double>(model.M_[i]),
kTrans, 0.0);
// Add its own transpose: YM_MY = - (Y_{i} M_{i}^T + M_{i} Y_{i}^T)
{
Matrix<double> M(YM_MY, kTrans);
YM_MY.AddMat(1.0, M);
}
(*S_means)[i].Resize(accs.feature_dim_, kUndefined);
(*S_means)[i].CopyFromMat(YM_MY); // Sigma_{i} = -(YM' + MY')
for (int32 j1 = 0; j1 < accs.num_groups_; j1++) {
for (int32 m = 0; m < model.NumSubstatesForGroup(j1); m++) {
if (accs.gamma_[j1](m, i) != 0.0) {
// Sigma_{i} += gamma_{jmi} * mu_{jmi}*mu_{jmi}^T
mu_jmi.AddMatVec(1.0, model.M_[i], kNoTrans, model.v_[j1].Row(m),
0.0);
(*S_means)[i].AddVec2(static_cast<BaseFloat>(accs.gamma_[j1](m, i)),
mu_jmi);
}
}
}
KALDI_ASSERT(1.0 / (*S_means)[i](0, 0) != 0.0);
}
}
class UpdatePhoneVectorsClass : public MultiThreadable { // For multi-threaded.
public:
UpdatePhoneVectorsClass(const MleAmSgmm2Updater &updater,
const MleAmSgmm2Accs &accs,
const std::vector<SpMatrix<double> > &H,
const std::vector<Matrix<double> > &log_a,
AmSgmm2 *model, double *auxf_impr)
: updater_(updater),
accs_(accs),
model_(model),
H_(H),
log_a_(log_a),
auxf_impr_ptr_(auxf_impr),
auxf_impr_(0.0) {}
~UpdatePhoneVectorsClass() { *auxf_impr_ptr_ += auxf_impr_; }
inline void operator()() {
// Note: give them local copy of the sums we're computing,
// which will be propagated to the total sums in the destructor.
updater_.UpdatePhoneVectorsInternal(accs_, H_, log_a_, model_, &auxf_impr_,
num_threads_, thread_id_);
}
private:
const MleAmSgmm2Updater &updater_;
const MleAmSgmm2Accs &accs_;
AmSgmm2 *model_;
const std::vector<SpMatrix<double> > &H_;
const std::vector<Matrix<double> > &log_a_;
double *auxf_impr_ptr_;
double auxf_impr_;
};
/**
In this update, smoothing terms are not supported. However, it does compute
the auxiliary function after doing the update, and backtracks if it did not
increase (due to the weight terms, increase is not mathematically
guaranteed). */
double MleAmSgmm2Updater::UpdatePhoneVectors(
const MleAmSgmm2Accs &accs, const vector<SpMatrix<double> > &H,
const vector<Matrix<double> > &log_a, AmSgmm2 *model) const {
KALDI_LOG << "Updating phone vectors";
double count = 0.0, auxf_impr = 0.0; // sum over all states
for (int32 j1 = 0; j1 < accs.num_groups_; j1++)
count += accs.gamma_[j1].Sum();
UpdatePhoneVectorsClass c(*this, accs, H, log_a, model, &auxf_impr);
RunMultiThreaded(c);
double auxf_per_frame = auxf_impr / (count + 1.0e-20);
KALDI_LOG << "**Overall auxf impr for v is " << auxf_per_frame << " over "
<< count << " frames";
return auxf_per_frame;
}
// static
void MleAmSgmm2Updater::ComputeLogA(const MleAmSgmm2Accs &accs,
std::vector<Matrix<double> > *log_a) {
// This computes the logarithm of the statistics a_{jmi} defined
// in Eq. 40 of the SSGMM techreport. Although the log of a_{jmi} never
// explicitly appears in the techreport, it happens to be more convenient
// in the code to use the log of it.
// Note: because of the way a is computed, for each (j,m) the
// entries over i should always be all zero or all nonzero.
int32 num_zeros = 0;
KALDI_ASSERT(accs.a_.size() == accs.num_groups_);
log_a->resize(accs.num_groups_);
for (int32 j1 = 0; j1 < accs.num_groups_; j1++) {
int32 num_substates = accs.a_[j1].NumRows();
KALDI_ASSERT(num_substates > 0);
(*log_a)[j1].Resize(num_substates, accs.num_gaussians_);
for (int32 m = 0; m < num_substates; m++) {
if (accs.a_[j1](m, 0) == 0.0) { // Zero accs.
num_zeros++;
if (accs.gamma_[j1].Row(m).Sum() != 0.0)
KALDI_WARN << "Inconsistency between a and gamma stats. [BAD!]";
// leave the row zero. This means the sub-state saw no stats.
} else {
(*log_a)[j1].Row(m).CopyFromVec(accs.a_[j1].Row(m));
(*log_a)[j1].Row(m).ApplyLog();
}
}
}
if (num_zeros != 0)
KALDI_WARN << num_zeros
<< " sub-states with zero \"a\" (and presumably gamma) stats.";
}
void MleAmSgmm2Updater::UpdatePhoneVectorsInternal(
const MleAmSgmm2Accs &accs, const vector<SpMatrix<double> > &H,
const vector<Matrix<double> > &log_a, AmSgmm2 *model, double *auxf_impr_ptr,
int32 num_threads, int32 thread_id) const {
int32 J1 = accs.num_groups_,
block_size = (J1 + (num_threads - 1)) / num_threads,
j1_start = block_size * thread_id,
j1_end = std::min(accs.num_groups_, j1_start + block_size);
double tot_auxf_impr = 0.0;
for (int32 j1 = j1_start; j1 < j1_end; j1++) {
for (int32 m = 0; m < model->NumSubstatesForGroup(j1); m++) {
double gamma_jm = accs.gamma_[j1].Row(m).Sum();
SpMatrix<double> X_jm(accs.phn_space_dim_); // = \sum_i \gamma_{jmi} H_i
for (int32 i = 0; i < accs.num_gaussians_; i++) {
double gamma_jmi = accs.gamma_[j1](m, i);
if (gamma_jmi != 0.0) X_jm.AddSp(gamma_jmi, H[i]);
}
Vector<double> v_jm_orig(model->v_[j1].Row(m)), v_jm(v_jm_orig);
double exact_auxf_start = 0.0, exact_auxf = 0.0, approx_auxf_impr = 0.0;
int32 backtrack_iter, max_backtrack = 10;
for (backtrack_iter = 0; backtrack_iter < max_backtrack;
backtrack_iter++) {
// Note: the 1st time we go through this loop we have not yet updated
// v_jm and it has the old value; the 2nd time, it has the updated value
// and we will typically break at this point, after verifying that
// the auxf has improved.
// w_jm = softmax([w_{k1}^T ... w_{kD}^T] * v_{jkm}) eq.(7)
Vector<double> w_jm(accs.num_gaussians_);
w_jm.AddMatVec(1.0, Matrix<double>(model->w_), kNoTrans, v_jm, 0.0);
if (!log_a.empty())
w_jm.AddVec(1.0, log_a[j1].Row(m)); // SSGMM techreport eq. 42
w_jm.Add(-w_jm.LogSumExp()); // it is now log w_jm
exact_auxf = VecVec(w_jm, accs.gamma_[j1].Row(m)) +
VecVec(v_jm, accs.y_[j1].Row(m)) -
0.5 * VecSpVec(v_jm, X_jm, v_jm);
if (backtrack_iter == 0) {
exact_auxf_start = exact_auxf;
} else {
if (exact_auxf >= exact_auxf_start) {
break; // terminate backtracking.
} else {
KALDI_LOG << "Backtracking computation of v_jm for j = " << j1
<< " and m = " << m << " because auxf changed by "
<< (exact_auxf - exact_auxf_start) << " [vs. predicted:] "
<< approx_auxf_impr;
v_jm.AddVec(1.0, v_jm_orig);
v_jm.Scale(0.5);
}
}
if (backtrack_iter == 0) { // computing updated value.
w_jm.ApplyExp(); // it is now w_jm
SpMatrix<double> H_jm(X_jm);
Vector<double> g_jm(accs.y_[j1].Row(m));
for (int32 i = 0; i < accs.num_gaussians_; i++) {
double gamma_jmi = accs.gamma_[j1](m, i);
double quadratic_term = std::max(gamma_jmi, gamma_jm * w_jm(i));
double scalar =
gamma_jmi - gamma_jm * w_jm(i) +
quadratic_term * VecVec(model->w_.Row(i), model->v_[j1].Row(m));
g_jm.AddVec(scalar, model->w_.Row(i));
if (quadratic_term > 1.0e-10) {
H_jm.AddVec2(static_cast<BaseFloat>(quadratic_term),
model->w_.Row(i));
}
}
SolverOptions opts;
opts.name = "v";
opts.K = options_.max_cond;
opts.eps = options_.epsilon;
approx_auxf_impr = SolveQuadraticProblem(H_jm, g_jm, opts, &v_jm);
}
}
double exact_auxf_impr = exact_auxf - exact_auxf_start;
tot_auxf_impr += exact_auxf_impr;
if (backtrack_iter == max_backtrack) {
KALDI_WARN << "Backtracked " << max_backtrack
<< " times [not updating]";
} else {
model->v_[j1].Row(m).CopyFromVec(v_jm);
}
if (j1 < 3 && m < 3) {
KALDI_LOG << "Auxf impr for j = " << j1 << " m = " << m << " is "
<< (exact_auxf_impr / gamma_jm + 1.0e-20)
<< " per frame over " << gamma_jm << " frames.";
}
}
}
*auxf_impr_ptr = tot_auxf_impr;
}
void MleAmSgmm2Updater::RenormalizeV(const MleAmSgmm2Accs &accs, AmSgmm2 *model,
const Vector<double> &gamma_i,
const vector<SpMatrix<double> > &H) {
// Compute H^{(sm)}, the "smoothing" matrix-- average of H's.
SpMatrix<double> H_sm(accs.phn_space_dim_);
for (int32 i = 0; i < accs.num_gaussians_; i++) H_sm.AddSp(gamma_i(i), H[i]);
KALDI_ASSERT(gamma_i.Sum() > 0.0);
H_sm.Scale(1.0 / gamma_i.Sum());
SpMatrix<double> Sigma(accs.phn_space_dim_);
int32 count = 0;
for (int32 j1 = 0; j1 < accs.num_groups_; j1++) {
for (int32 m = 0; m < model->NumSubstatesForGroup(j1); m++) {
count++;
Sigma.AddVec2(static_cast<BaseFloat>(1.0), model->v_[j1].Row(m));
}
}
if (!Sigma.IsPosDef()) {
KALDI_LOG << "Not renormalizing v because scatter is not positive definite"
<< " -- maybe first iter?";
return;
}
Sigma.Scale(1.0 / count);
KALDI_LOG << "Scatter of vectors v is : ";
Sigma.PrintEigs("Sigma");
// Want to make variance of v unit and H_sm (like precision matrix) diagonal.
TpMatrix<double> L(accs.phn_space_dim_);
L.Cholesky(Sigma);
TpMatrix<double> LInv(L);
LInv.Invert();
Matrix<double> tmpL(accs.phn_space_dim_, accs.phn_space_dim_);
tmpL.CopyFromTp(L);
SpMatrix<double> H_sm_proj(accs.phn_space_dim_);
H_sm_proj.AddMat2Sp(1.0, tmpL, kTrans, H_sm, 0.0);
// H_sm_proj := L^{T} * H_sm * L.
// This is right because we would transform the vectors themselves
// by L^{-1}, and H_sm is like the inverse of the vectors,
// so it's {L^{-1}}^{-T} = L^T.
Matrix<double> U(accs.phn_space_dim_, accs.phn_space_dim_);
Vector<double> eigs(accs.phn_space_dim_);
H_sm_proj.SymPosSemiDefEig(&eigs, &U,
1.0); // 1.0 means no checking +ve def -> faster
KALDI_LOG << "Note on the next diagnostic: the first number is generally not "
<< "that meaningful as it relates to the static offset";
H_sm_proj.PrintEigs(
"H_sm_proj (Significance of dims in vector space.. note)");
// Transform on vectors is U^T L^{-1}.
// Why? Because transform on H_sm is T =U^T L^T
// and we want T^{-T} by normal rules of vector/covector and we
// have (U^T L^T)^{-T} = (L U)^{-1} = U^T L^{-1}.
Matrix<double> Trans(accs.phn_space_dim_, accs.phn_space_dim_); // T^{-T}
Matrix<double> tmpLInv(accs.phn_space_dim_, accs.phn_space_dim_);
tmpLInv.CopyFromTp(LInv);
Trans.AddMatMat(1.0, U, kTrans, tmpLInv, kNoTrans, 0.0);
Matrix<double> TransInv(Trans);
TransInv.Invert(); // T in above...
#ifdef KALDI_PARANOID
{
SpMatrix<double> H_sm_tmp(accs.phn_space_dim_);
H_sm_tmp.AddMat2Sp(1.0, TransInv, kTrans, H_sm, 0.0);
KALDI_ASSERT(H_sm_tmp.IsDiagonal(0.1));
}
{
SpMatrix<double> Sigma_tmp(accs.phn_space_dim_);
Sigma_tmp.AddMat2Sp(1.0, Trans, kNoTrans, Sigma, 0.0);
KALDI_ASSERT(Sigma_tmp.IsUnit(0.1));
}
#endif
for (int32 j1 = 0; j1 < accs.num_groups_; j1++) {
for (int32 m = 0; m < model->NumSubstatesForGroup(j1); m++) {
Vector<double> tmp(accs.phn_space_dim_);
tmp.AddMatVec(1.0, Trans, kNoTrans, Vector<double>(model->v_[j1].Row(m)),
0.0);
model->v_[j1].Row(m).CopyFromVec(tmp);
}
}
for (int32 i = 0; i < accs.num_gaussians_; i++) {
Vector<double> tmp(accs.phn_space_dim_);
tmp.AddMatVec(1.0, TransInv, kTrans, Vector<double>(model->w_.Row(i)), 0.0);
model->w_.Row(i).CopyFromVec(tmp);
Matrix<double> tmpM(accs.feature_dim_, accs.phn_space_dim_);
// Multiplying on right not left so must not transpose TransInv.
tmpM.AddMatMat(1.0, Matrix<double>(model->M_[i]), kNoTrans, TransInv,
kNoTrans, 0.0);
model->M_[i].CopyFromMat(tmpM);
}
KALDI_LOG << "Renormalized subspace.";
}
double MleAmSgmm2Updater::UpdateM(const MleAmSgmm2Accs &accs,
const std::vector<SpMatrix<double> > &Q,
const Vector<double> &gamma_i,
AmSgmm2 *model) {
double tot_count = 0.0, tot_like_impr = 0.0;
for (int32 i = 0; i < accs.num_gaussians_; i++) {
if (gamma_i(i) < accs.feature_dim_) {
KALDI_WARN << "For component " << i << ": not updating M due to very "
<< "small count (=" << gamma_i(i) << ").";
continue;
}
SolverOptions opts;
opts.name = "M";
opts.K = options_.max_cond;
opts.eps = options_.epsilon;
Matrix<double> Mi(model->M_[i]);
double impr = SolveQuadraticMatrixProblem(
Q[i], accs.Y_[i], SpMatrix<double>(model->SigmaInv_[i]), opts, &Mi);
model->M_[i].CopyFromMat(Mi);
if (i < 10) {
KALDI_VLOG(2) << "Objf impr for projection M for i = " << i << ", is "
<< (impr / (gamma_i(i) + 1.0e-20)) << " over " << gamma_i(i)
<< " frames";
}
tot_count += gamma_i(i);
tot_like_impr += impr;
}
tot_like_impr /= (tot_count + 1.0e-20);
KALDI_LOG << "Overall objective function improvement for model projections "
<< "M is " << tot_like_impr << " over " << tot_count << " frames";
return tot_like_impr;
}
// Estimate the parameters of a Gaussian prior over the M matrices. There are
// as many mean matrices as UBM size and two covariance matrices for the rows
// of M and columns of M. The prior means M_i are fixed to the unadapted values.
// This is what was done in Lu, et al. "Maximum a posteriori adaptation of
// subspace Gaussian mixture models for cross-lingual speech recognition",
// ICASSP 2012.
void MleAmSgmm2Updater::ComputeMPrior(AmSgmm2 *model) {
KALDI_ASSERT(options_.map_M_prior_iters > 0);
int32 Ddim = model->FeatureDim();
int32 Sdim = model->PhoneSpaceDim();
int32 nGaussians = model->NumGauss();
// inverse variance of the columns of M: dim is # of rows
model->col_cov_inv_.Resize(Ddim);
// inverse covariance of the rows of M: dim is # of columns
model->row_cov_inv_.Resize(Sdim);
model->col_cov_inv_.SetUnit();
model->row_cov_inv_.SetUnit();
if (model->M_prior_.size() == 0) {
model->M_prior_.resize(nGaussians);
for (int32 i = 0; i < nGaussians; i++) {
model->M_prior_[i].Resize(Ddim, Sdim);
model->M_prior_[i].CopyFromMat(
model->M_[i]); // We initialize Mpri as this
}
}
if (options_.full_col_cov || options_.full_row_cov) {
Matrix<double> avg_M(Ddim, Sdim); // average of the Gaussian prior means
for (int32 i = 0; i < nGaussians; i++)
avg_M.AddMat(1.0, Matrix<double>(model->M_prior_[i]));
avg_M.Scale(1.0 / nGaussians);
Matrix<double> MDiff(Ddim, Sdim);
for (int32 iter = 0; iter < options_.map_M_prior_iters; iter++) {
{ // diagnostic block.
double prior_like =
-0.5 * nGaussians * (Ddim * Sdim * log(2 * M_PI) +
Sdim * (-model->row_cov_inv_.LogPosDefDet()) +
Ddim * (-model->col_cov_inv_.LogPosDefDet()));
for (int32 i = 0; i < nGaussians; i++) {
MDiff.CopyFromMat(Matrix<double>(model->M_prior_[i]));
MDiff.AddMat(-1.0, avg_M); // MDiff = M_{i} - avg(M)
SpMatrix<double> tmp(Ddim);
// tmp = MDiff.Omega_r^{-1}*MDiff^T.
tmp.AddMat2Sp(1.0, MDiff, kNoTrans,
SpMatrix<double>(model->row_cov_inv_), 0.0);
prior_like -=
0.5 * TraceSpSp(tmp, SpMatrix<double>(model->col_cov_inv_));
}
KALDI_LOG
<< "Before iteration " << iter
<< " of updating prior over M, log like per dimension modeled is "
<< prior_like / (nGaussians * Ddim * Sdim);
}
// First estimate the column covariances (\Omega_r in paper)
if (options_.full_col_cov) {
size_t limited;
model->col_cov_inv_.SetZero();
for (int32 i = 0; i < nGaussians; i++) {
MDiff.CopyFromMat(Matrix<double>(model->M_prior_[i]));
MDiff.AddMat(-1.0, avg_M); // MDiff = M_{i} - avg(M)
// Omega_r += 1/(D*I) * Mdiff * Omega_c^{-1} * Mdiff^T
model->col_cov_inv_.AddMat2Sp(1.0 / (Ddim * nGaussians),
Matrix<BaseFloat>(MDiff), kNoTrans,
model->row_cov_inv_, 1.0);
}
model->col_cov_inv_.PrintEigs("col_cov");
limited = model->col_cov_inv_.LimitCond(options_.max_cond,
true /*invert the matrix*/);
if (limited != 0) {
KALDI_LOG << "Computing column covariances for M: limited " << limited
<< " singular values, max condition is "
<< options_.max_cond;
}
}
// Now estimate the row covariances (\Omega_c in paper)
if (options_.full_row_cov) {
size_t limited;
model->row_cov_inv_.SetZero();
for (int32 i = 0; i < nGaussians; i++) {
MDiff.CopyFromMat(Matrix<double>(model->M_prior_[i]));
MDiff.AddMat(-1.0, avg_M); // MDiff = M_{i} - avg(M)
// Omega_c += 1/(S*I) * Mdiff^T * Omega_r^{-1} * Mdiff.
model->row_cov_inv_.AddMat2Sp(1.0 / (Sdim * nGaussians),
Matrix<BaseFloat>(MDiff), kTrans,
model->col_cov_inv_, 1.0);
}
model->row_cov_inv_.PrintEigs("row_cov");
limited = model->row_cov_inv_.LimitCond(options_.max_cond,
true /*invert the matrix*/);
if (limited != 0) {
KALDI_LOG << "Computing row covariances for M: limited " << limited
<< " singular values, max condition is "
<< options_.max_cond;
}
}
} // end iterations
}
}
// MAP adaptation of M with a matrix-variate Gaussian prior
double MleAmSgmm2Updater::MapUpdateM(const MleAmSgmm2Accs &accs,
const std::vector<SpMatrix<double> > &Q,
const Vector<double> &gamma_i,
AmSgmm2 *model) {
int32 Ddim = model->FeatureDim();
int32 Sdim = model->PhoneSpaceDim();
int32 nGaussians = model->NumGauss();
KALDI_LOG << "Prior smoothing parameter: Tau = " << options_.tau_map_M;
if (model->M_prior_.size() == 0 || model->col_cov_inv_.NumRows() == 0 ||
model->row_cov_inv_.NumRows() == 0) {
KALDI_LOG << "Computing the prior first";
ComputeMPrior(model);
}
Matrix<double> G(Ddim, Sdim);
// \tau \Omega_c^{-1} avg(M) \Omega_r^{-1}, depends on Gaussian index
Matrix<double> prior_term_i(Ddim, Sdim);
SpMatrix<double> P2(model->col_cov_inv_);
SpMatrix<double> Q2(model->row_cov_inv_);
Q2.Scale(options_.tau_map_M);
double totcount = 0.0, tot_like_impr = 0.0;
for (int32 i = 0; i < nGaussians; ++i) {
if (gamma_i(i) < accs.feature_dim_) {
KALDI_WARN << "For component " << i << ": not updating M due to very "
<< "small count (=" << gamma_i(i) << ").";
continue;
}
Matrix<double> tmp(Ddim, Sdim, kSetZero);
tmp.AddSpMat(1.0, SpMatrix<double>(model->col_cov_inv_),
Matrix<double>(model->M_prior_[i]), kNoTrans, 0.0);
prior_term_i.AddMatSp(options_.tau_map_M, tmp, kNoTrans,
SpMatrix<double>(model->row_cov_inv_), 0.0);
Matrix<double> SigmaY(Ddim, Sdim, kSetZero);
SigmaY.AddSpMat(1.0, SpMatrix<double>(model->SigmaInv_[i]), accs.Y_[i],
kNoTrans, 0.0);
G.CopyFromMat(SigmaY); // G = \Sigma_{i}^{-1} Y_{i}
G.AddMat(1.0,
prior_term_i); // G += \tau \Omega_c^{-1} avg(M) \Omega_r^{-1}
SpMatrix<double> P1(model->SigmaInv_[i]);
Matrix<double> Mi(model->M_[i]);
SolverOptions opts;
opts.name = "M";
opts.K = options_.max_cond;
opts.eps = options_.epsilon;
double impr =
SolveDoubleQuadraticMatrixProblem(G, P1, P2, Q[i], Q2, opts, &Mi);
model->M_[i].CopyFromMat(Mi);
if (i < 10) {
KALDI_LOG << "Objf impr for projection M for i = " << i << ", is "
<< (impr / (gamma_i(i) + 1.0e-20)) << " over " << gamma_i(i)
<< " frames";
}
totcount += gamma_i(i);
tot_like_impr += impr;
}
tot_like_impr /= (totcount + 1.0e-20);
KALDI_LOG << "Overall objective function improvement for model projections "
<< "M is " << tot_like_impr << " over " << totcount << " frames";
return tot_like_impr;
}
/// This function gets stats used inside UpdateW, where it accumulates
/// the F_i and g_i quantities. Note: F_i is viewed as a vector of SpMatrix
/// (one for each i); each row of F_i is viewed as an SpMatrix even though
/// it's stored as a vector....
/// Note: on the first iteration w is just a double-precision copy of the matrix
/// model->w_; thereafter it may differ.
/// log_a relates to the SSGMM.
// static
void MleAmSgmm2Updater::UpdateWGetStats(
const MleAmSgmm2Accs &accs, const AmSgmm2 &model, const Matrix<double> &w,
const std::vector<Matrix<double> > &log_a, Matrix<double> *F_i,
Matrix<double> *g_i, double *tot_like, int32 num_threads, int32 thread_id) {
// Accumulate stats from a block of states (this gets called in parallel).
int32 block_size = (accs.num_groups_ + (num_threads - 1)) / num_threads,
j1_start = block_size * thread_id,
j1_end = std::min(accs.num_groups_, j1_start + block_size);
// Unlike in the report the inner most loop is over Gaussians, where
// per-gaussian statistics are accumulated. This is more memory demanding
// but more computationally efficient, as outer product v_{jvm} v_{jvm}^T
// is computed only once for all gaussians.
SpMatrix<double> v_vT(accs.phn_space_dim_);
for (int32 j1 = j1_start; j1 < j1_end; j1++) {
int32 num_substates = model.NumSubstatesForGroup(j1);
Matrix<double> w_j(num_substates, accs.num_gaussians_);
// The linear term and quadratic term for each Gaussian-- two scalars
// for each Gaussian, they appear in the accumulation formulas.
Matrix<double> linear_term(num_substates, accs.num_gaussians_);
Matrix<double> quadratic_term(num_substates, accs.num_gaussians_);
Matrix<double> v_vT_m(
num_substates, (accs.phn_space_dim_ * (accs.phn_space_dim_ + 1)) / 2);
// w_jm = softmax([w_{k1}^T ... w_{kD}^T] * v_{jkm}) eq.(7)
Matrix<double> v_j_double(model.v_[j1]);
w_j.AddMatMat(1.0, v_j_double, kNoTrans, w, kTrans, 0.0);
if (!log_a.empty()) w_j.AddMat(1.0, log_a[j1]); // SSGMM techreport eq. 42
for (int32 m = 0; m < model.NumSubstatesForGroup(j1); m++) {
SubVector<double> w_jm(w_j, m);
double gamma_jm = accs.gamma_[j1].Row(m).Sum();
w_jm.Add(-1.0 * w_jm.LogSumExp());
*tot_like += VecVec(w_jm, accs.gamma_[j1].Row(m));
w_jm.ApplyExp();
v_vT.SetZero();
// v_vT := v_{jkm} v_{jkm}^T
v_vT.AddVec2(static_cast<BaseFloat>(1.0), v_j_double.Row(m));
v_vT_m.Row(m)
.CopyFromPacked(v_vT); // a bit wasteful, but does not dominate.
for (int32 i = 0; i < accs.num_gaussians_; i++) {
// Suggestion: g_jkm can be computed more efficiently
// using the Vector/Matrix routines for all i at once
// linear term around cur value.
linear_term(m, i) = accs.gamma_[j1](m, i) - gamma_jm * w_jm(i);
quadratic_term(m, i) =
std::max(accs.gamma_[j1](m, i), gamma_jm * w_jm(i));
}
} // loop over substates
g_i->AddMatMat(1.0, linear_term, kTrans, v_j_double, kNoTrans, 1.0);
F_i->AddMatMat(1.0, quadratic_term, kTrans, v_vT_m, kNoTrans, 1.0);
} // loop over states
}
// The parallel weight update, in the paper.
double MleAmSgmm2Updater::UpdateW(const MleAmSgmm2Accs &accs,
const std::vector<Matrix<double> > &log_a,
const Vector<double> &gamma_i,
AmSgmm2 *model) {
KALDI_LOG << "Updating weight projections";
// tot_like_{after, before} are totals over multiple iterations,
// not valid likelihoods. but difference is valid (when divided by tot_count).
double tot_predicted_like_impr = 0.0, tot_like_before = 0.0,
tot_like_after = 0.0;
Matrix<double> g_i(accs.num_gaussians_, accs.phn_space_dim_);
// View F_i as a vector of SpMatrix.
Matrix<double> F_i(accs.num_gaussians_,
(accs.phn_space_dim_ * (accs.phn_space_dim_ + 1)) / 2);
Matrix<double> w(model->w_);
double tot_count = gamma_i.Sum();
for (int iter = 0; iter < options_.weight_projections_iters; iter++) {
F_i.SetZero();
g_i.SetZero();
double k_like_before = 0.0;
UpdateWClass c(accs, *model, w, log_a, &F_i, &g_i, &k_like_before);
RunMultiThreaded(c);
Matrix<double> w_orig(w);
double k_predicted_like_impr = 0.0, k_like_after = 0.0;
double min_step = 0.001, step_size;
SolverOptions opts;
opts.name = "w";
opts.K = options_.max_cond;
opts.eps = options_.epsilon;
for (step_size = 1.0; step_size >= min_step; step_size /= 2) {
k_predicted_like_impr = 0.0;
k_like_after = 0.0;
for (int32 i = 0; i < accs.num_gaussians_; i++) {
// auxf is formulated in terms of change in w.
Vector<double> delta_w(accs.phn_space_dim_);
// returns objf impr with step_size = 1,
// but it may not be 1 so we recalculate it.
SpMatrix<double> this_F_i(accs.phn_space_dim_);
this_F_i.CopyFromVec(F_i.Row(i));
SolveQuadraticProblem(this_F_i, g_i.Row(i), opts, &delta_w);
delta_w.Scale(step_size);
double predicted_impr = VecVec(delta_w, g_i.Row(i)) -
0.5 * VecSpVec(delta_w, this_F_i, delta_w);
// should never be negative because
// we checked inside SolveQuadraticProblem.
KALDI_ASSERT(predicted_impr >= -1.0e-05);
if (i < 10)
KALDI_LOG << "Predicted objf impr for w, iter = " << iter
<< ", i = " << i << " is "
<< (predicted_impr / gamma_i(i) + 1.0e-20)
<< " per frame over " << gamma_i(i) << " frames.";
k_predicted_like_impr += predicted_impr;
w.Row(i).AddVec(1.0, delta_w);
}
for (int32 j1 = 0; j1 < accs.num_groups_; j1++) {
int32 M = model->NumSubstatesForGroup(j1);
Matrix<double> w_j(M, accs.num_gaussians_);
w_j.AddMatMat(1.0, Matrix<double>(model->v_[j1]), kNoTrans, w, kTrans,
0.0);
if (!log_a.empty())
w_j.AddMat(1.0, log_a[j1]); // SSGMM techreport eq. 42
for (int32 m = 0; m < M; m++) {
SubVector<double> w_jm(w_j, m);
w_jm.Add(-1.0 * w_jm.LogSumExp());
}
k_like_after += TraceMatMat(w_j, accs.gamma_[j1], kTrans);
}
KALDI_VLOG(2) << "For iteration " << iter << ", updating w gives "
<< "predicted per-frame like impr "
<< (k_predicted_like_impr / tot_count) << ", actual "
<< ((k_like_after - k_like_before) / tot_count) << ", over "
<< tot_count << " frames";
if (k_like_after < k_like_before) {
w.CopyFromMat(w_orig); // Undo what we computed.
if (fabs(k_like_after - k_like_before) / tot_count < 1.0e-05) {
k_like_after = k_like_before;
KALDI_WARN
<< "Not updating weights as not increasing auxf and "
<< "probably due to numerical issues (since small change).";
break;
} else {
KALDI_WARN << "Halving step size for weights as likelihood did "
<< "not increase";
}
} else {
break;
}
}
if (step_size < min_step) {
// Undo any step as we have no confidence that this is right.
w.CopyFromMat(w_orig);
} else {
tot_predicted_like_impr += k_predicted_like_impr;
tot_like_after += k_like_after;
tot_like_before += k_like_before;
}
}
model->w_.CopyFromMat(w);
model->w_jmi_.clear(); // invalidated.
tot_predicted_like_impr /= tot_count;
tot_like_after = (tot_like_after - tot_like_before) / tot_count;
KALDI_LOG << "**Overall objf impr for w is " << tot_predicted_like_impr
<< ", actual " << tot_like_after << ", over " << tot_count
<< " frames";
return tot_like_after;
}
double MleAmSgmm2Updater::UpdateU(const MleAmSgmm2Accs &accs,
const Vector<double> &gamma_i,
AmSgmm2 *model) {
double tot_impr = 0.0;
SolverOptions opts;
opts.name = "u";
opts.K = options_.max_cond;
opts.eps = options_.epsilon;
for (int32 i = 0; i < accs.num_gaussians_; i++) {
if (gamma_i(i) < 200.0) {
KALDI_LOG << "Count is small " << gamma_i(i) << " for gaussian " << i
<< ", not updating u_i.";
continue;
}
Vector<double> u_i(model->u_.Row(i));
Vector<double> delta_u(accs.spk_space_dim_);
double impr =
SolveQuadraticProblem(accs.U_[i], accs.t_.Row(i), opts, &delta_u);
double impr_per_frame = impr / gamma_i(i);
if (impr_per_frame > options_.max_impr_u) {
KALDI_WARN << "Updating speaker weight projections u, for Gaussian index "
<< i << ", impr/frame is " << impr_per_frame << " over "
<< gamma_i(i) << " frames, scaling back to not exceed "
<< options_.max_impr_u;
double scale = options_.max_impr_u / impr_per_frame;
impr *= scale;
delta_u.Scale(scale);
// Note: a linear scaling of "impr" with "scale" is not quite accurate
// in depicting how the quadratic auxiliary function varies as we change
// the scale on "delta", but this does not really matter-- the goal is
// to limit the auxiliary-function change to not be too large.
}
if (i < 10) {
KALDI_LOG << "Objf impr for spk weight-projection u for i = " << (i)
<< ", is " << (impr / (gamma_i(i) + 1.0e-20)) << " over "
<< gamma_i(i) << " frames";
}
u_i.AddVec(1.0, delta_u);
model->u_.Row(i).CopyFromVec(u_i);
tot_impr += impr;
}
KALDI_LOG << "**Overall objf impr for u is " << (tot_impr / gamma_i.Sum())
<< ", over " << gamma_i.Sum() << " frames";
return tot_impr / gamma_i.Sum();
}
double MleAmSgmm2Updater::UpdateN(const MleAmSgmm2Accs &accs,
const Vector<double> &gamma_i,
AmSgmm2 *model) {
double tot_count = 0.0, tot_like_impr = 0.0;
if (accs.spk_space_dim_ == 0 || accs.R_.size() == 0 || accs.Z_.size() == 0) {
KALDI_ERR << "Speaker subspace dim is zero or no stats accumulated";
}
SolverOptions opts;
opts.name = "N";
opts.K = options_.max_cond;
opts.eps = options_.epsilon;
for (int32 i = 0; i < accs.num_gaussians_; i++) {
if (gamma_i(i) < 2 * accs.spk_space_dim_) {
KALDI_WARN << "Not updating speaker basis for i = " << (i)
<< " because count is too small " << (gamma_i(i));
continue;
}
Matrix<double> Ni(model->N_[i]);
double impr = SolveQuadraticMatrixProblem(
accs.R_[i], accs.Z_[i], SpMatrix<double>(model->SigmaInv_[i]), opts,
&Ni);
model->N_[i].CopyFromMat(Ni);
if (i < 10) {
KALDI_LOG << "Objf impr for spk projection N for i = " << (i) << ", is "
<< (impr / (gamma_i(i) + 1.0e-20)) << " over " << gamma_i(i)
<< " frames";
}
tot_count += gamma_i(i);
tot_like_impr += impr;
}
KALDI_LOG << "**Overall objf impr for N is " << (tot_like_impr / tot_count)
<< " over " << tot_count << " frames";
return (tot_like_impr / tot_count);
}
void MleAmSgmm2Updater::RenormalizeN(const MleAmSgmm2Accs &accs,
const Vector<double> &gamma_i,
AmSgmm2 *model) {
KALDI_ASSERT(accs.R_.size() != 0);
double tot_count = gamma_i.Sum();
if (tot_count == 0) {
KALDI_WARN << "Not renormalizing N, since there are no counts.";
return;
}
SpMatrix<double> RTot(accs.spk_space_dim_);
// for (int32 i = 0; i < accs.num_gaussians_; i++) {
// RTot.AddSp(1.0, accs.R_[i]);
// }
for (int32 i = 0; i < accs.num_gaussians_; i++) {
RTot.AddSp(gamma_i(i), accs.R_[i]);
}
RTot.Scale(1.0 / tot_count);
Matrix<double> U(accs.spk_space_dim_, accs.spk_space_dim_);
Vector<double> eigs(accs.spk_space_dim_);
RTot.SymPosSemiDefEig(&eigs, &U);
KALDI_LOG << "Renormalizing N, eigs are: " << (eigs);
Vector<double> sqrteigs(accs.spk_space_dim_);
for (int32 t = 0; t < accs.spk_space_dim_; t++) {
sqrteigs(t) = sqrt(eigs(t));
}
// e.g. diag(eigs)^{-0.5} * U' * RTot * U * diag(eigs)^{-0.5} = 1
// But inverse transpose of this transformation needs to take place on R,
// i.e. not (on left: diag(eigs)^{-0.5} * U')
// but: (inverse it: U . diag(eigs)^{0.5},
// transpose it: diag(eigs)^{0.5} U^T. Need to do this on the right to N
// (because N has the spk vecs on the right), so N := N U diag(eigs)^{0.5}
U.MulColsVec(sqrteigs);
Matrix<double> Ntmp(accs.feature_dim_, accs.spk_space_dim_);
for (int32 i = 0; i < accs.num_gaussians_; i++) {
Ntmp.AddMatMat(1.0, Matrix<double>(model->N_[i]), kNoTrans, U, kNoTrans,
0.0);
model->N_[i].CopyFromMat(Ntmp);
}
}
double MleAmSgmm2Updater::UpdateVars(
const MleAmSgmm2Accs &accs, const std::vector<SpMatrix<double> > &S_means,
const Vector<double> &gamma_i, AmSgmm2 *model) {
KALDI_ASSERT(S_means.size() == static_cast<size_t>(accs.num_gaussians_));
SpMatrix<double> Sigma_i(accs.feature_dim_), Sigma_i_ml(accs.feature_dim_);
double tot_objf_impr = 0.0, tot_t = 0.0;
SpMatrix<double> covfloor(accs.feature_dim_);
Vector<double> objf_improv(accs.num_gaussians_);
// First pass over all (shared) Gaussian components to calculate the
// ML estimate of the covariances, and the total covariance for flooring.
for (int32 i = 0; i < accs.num_gaussians_; i++) {
// Eq. (75): Sigma_{i}^{ml} = 1/gamma_{i} [S_{i} + S_{i}^{(means)} - ...
// Y_{i} M_{i}^T - M_{i} Y_{i}^T]
// Note the S_means already contains the Y_{i} M_{i}^T terms.
Sigma_i_ml.CopyFromSp(S_means[i]);
Sigma_i_ml.AddSp(1.0, accs.S_[i]);
covfloor.AddSp(1.0, Sigma_i_ml);
// inverting small values e.g. 4.41745328e-40 seems to generate inf,
// although would be fixed up later.
if (gamma_i(i) > 1.0e-20) {
Sigma_i_ml.Scale(1 / (gamma_i(i) + 1.0e-20));
} else {
Sigma_i_ml.SetUnit();
}
KALDI_ASSERT(1.0 / Sigma_i_ml(0, 0) != 0.0);
// Eq. (76): Compute the objective function with the old parameter values
objf_improv(i) =
model->SigmaInv_[i].LogPosDefDet() -
TraceSpSp(SpMatrix<double>(model->SigmaInv_[i]), Sigma_i_ml);
model->SigmaInv_[i].CopyFromSp(Sigma_i_ml); // inverted in the next loop.
}
// Compute the covariance floor.
if (gamma_i.Sum() == 0) { // If no count, use identity.
KALDI_WARN << "Updating variances: zero counts. Setting floor to unit.";
covfloor.SetUnit();
} else { // else, use the global average covariance.
covfloor.Scale(options_.cov_floor / gamma_i.Sum());
int32 tmp;
if ((tmp = covfloor.LimitCondDouble(options_.max_cond)) != 0) {
KALDI_WARN << "Covariance flooring matrix is poorly conditioned. Fixed "
<< "up " << tmp << " eigenvalues.";
}
}
if (options_.cov_diag_ratio > 1000) {
KALDI_LOG << "Assuming you want to build a diagonal system since "
<< "cov_diag_ratio is large: making diagonal covFloor.";
for (int32 i = 0; i < covfloor.NumRows(); i++)
for (int32 j = 0; j < i; j++) covfloor(i, j) = 0.0;
}
// Second pass over all (shared) Gaussian components to calculate the
// floored estimate of the covariances, and update the model.
for (int32 i = 0; i < accs.num_gaussians_; i++) {
Sigma_i.CopyFromSp(model->SigmaInv_[i]);
Sigma_i_ml.CopyFromSp(Sigma_i);
// In case of insufficient counts, make the covariance matrix diagonal.
// cov_diag_ratio is 2 by default, set to very large to always get diag-cov
if (gamma_i(i) < options_.cov_diag_ratio * accs.feature_dim_) {
KALDI_WARN << "For Gaussian component " << i << ": Too low count "
<< gamma_i(i)
<< " for covariance matrix estimation. Setting to "
<< "diagonal";
for (int32 d = 0; d < accs.feature_dim_; d++)
for (int32 e = 0; e < d; e++)
Sigma_i(d, e) = 0.0; // SpMatrix, can only set lower triangular part
int floored = Sigma_i.ApplyFloor(covfloor);
if (floored > 0) {
KALDI_WARN << "For Gaussian component " << i << ": Floored " << floored
<< " covariance eigenvalues.";
}
model->SigmaInv_[i].CopyFromSp(Sigma_i);
model->SigmaInv_[i].InvertDouble();
} else { // Updating the full covariance matrix.
try {
int floored = Sigma_i.ApplyFloor(covfloor);
if (floored > 0) {
KALDI_WARN << "For Gaussian component " << i << ": Floored "
<< floored << " covariance eigenvalues.";
}
model->SigmaInv_[i].CopyFromSp(Sigma_i);
model->SigmaInv_[i].InvertDouble();
objf_improv(i) +=
Sigma_i.LogPosDefDet() +
TraceSpSp(SpMatrix<double>(model->SigmaInv_[i]), Sigma_i_ml);
objf_improv(i) *= (-0.5 * gamma_i(i)); // Eq. (76)
tot_objf_impr += objf_improv(i);
tot_t += gamma_i(i);
if (i < 5) {
KALDI_VLOG(2) << "objf impr from variance update ="
<< objf_improv(i) / (gamma_i(i) + 1.0e-20) << " over "
<< (gamma_i(i)) << " frames for i = " << (i);
}
} catch (...) {
KALDI_WARN << "Updating within-class covariance matrix i = " << (i)
<< ", numerical problem";
// This is a catch-all thing in case of unanticipated errors, but
// flooring should prevent this occurring for the most part.
model->SigmaInv_[i].SetUnit(); // Set to unit.
}
}
}
KALDI_LOG << "**Overall objf impr for variance update = "
<< (tot_objf_impr / (tot_t + 1.0e-20)) << " over " << tot_t
<< " frames";
return tot_objf_impr / (tot_t + 1.0e-20);
}
double MleAmSgmm2Updater::UpdateSubstateWeights(const MleAmSgmm2Accs &accs,
AmSgmm2 *model) {
KALDI_LOG << "Updating substate mixture weights";
// Also set the vector gamma_j which is a cache of the state occupancies.
double tot_gamma = 0.0, objf_impr = 0.0;
for (int32 j2 = 0; j2 < accs.num_pdfs_; j2++) {
double gamma_j_sm = 0.0;
int32 num_substates = model->NumSubstatesForPdf(j2);
const Vector<double> &occs(accs.gamma_c_[j2]);
Vector<double> smoothed_occs(occs);
smoothed_occs.Add(options_.tau_c);
gamma_j_sm += smoothed_occs.Sum();
tot_gamma += occs.Sum();
for (int32 m = 0; m < num_substates; m++) {
double cur_weight = model->c_[j2](m);
if (cur_weight <= 0) {
KALDI_WARN << "Zero or negative weight, flooring";
cur_weight = 1.0e-10; // future work(arnab): remove magic numbers
}
model->c_[j2](m) = smoothed_occs(m) / gamma_j_sm;
objf_impr += log(model->c_[j2](m) / cur_weight) * occs(m);
}
}
KALDI_LOG << "**Overall objf impr for c is " << (objf_impr / tot_gamma)
<< ", over " << tot_gamma << " frames.";
return (objf_impr / tot_gamma);
}
MleSgmm2SpeakerAccs::MleSgmm2SpeakerAccs(const AmSgmm2 &model, BaseFloat prune)
: rand_prune_(prune) {
KALDI_ASSERT(model.SpkSpaceDim() != 0);
H_spk_.resize(model.NumGauss());
for (int32 i = 0; i < model.NumGauss(); i++) {
// Eq. (82): H_{i}^{spk} = N_{i}^T \Sigma_{i}^{-1} N_{i}
H_spk_[i].Resize(model.SpkSpaceDim());
H_spk_[i].AddMat2Sp(1.0, Matrix<double>(model.N_[i]), kTrans,
SpMatrix<double>(model.SigmaInv_[i]), 0.0);
}
model.GetNtransSigmaInv(&NtransSigmaInv_);
gamma_s_.Resize(model.NumGauss());
y_s_.Resize(model.SpkSpaceDim());
if (model.HasSpeakerDependentWeights()) a_s_.Resize(model.NumGauss());
}
void MleSgmm2SpeakerAccs::Clear() {
y_s_.SetZero();
gamma_s_.SetZero();
if (a_s_.Dim() != 0) a_s_.SetZero();
}
BaseFloat MleSgmm2SpeakerAccs::Accumulate(
const AmSgmm2 &model, const Sgmm2PerFrameDerivedVars &frame_vars, int32 j2,
BaseFloat weight, Sgmm2PerSpkDerivedVars *spk_vars) {
// Calculate Gaussian posteriors and collect statistics
Matrix<BaseFloat> posteriors;
BaseFloat log_like =
model.ComponentPosteriors(frame_vars, j2, spk_vars, &posteriors);
posteriors.Scale(weight);
AccumulateFromPosteriors(model, frame_vars, posteriors, j2, spk_vars);
return log_like;
}
BaseFloat MleSgmm2SpeakerAccs::AccumulateFromPosteriors(
const AmSgmm2 &model, const Sgmm2PerFrameDerivedVars &frame_vars,
const Matrix<BaseFloat> &posteriors, int32 j2,
Sgmm2PerSpkDerivedVars *spk_vars) {
double tot_count = 0.0;
int32 feature_dim = model.FeatureDim(), spk_space_dim = model.SpkSpaceDim();
KALDI_ASSERT(spk_space_dim != 0);
const vector<int32> &gselect = frame_vars.gselect;
// Intermediate variables
Vector<double> xt_jmi(feature_dim), mu_jmi(feature_dim),
zt_jmi(spk_space_dim);
int32 num_substates = model.NumSubstatesForPdf(j2), j1 = model.Pdf2Group(j2);
bool have_spk_dep_weights = (a_s_.Dim() != 0);
for (int32 m = 0; m < num_substates; m++) {
BaseFloat gammat_jm = 0.0;
for (int32 ki = 0; ki < static_cast<int32>(gselect.size()); ki++) {
int32 i = gselect[ki];
// Eq. (39): gamma_{jmi}(t) = p (j, m, i|t)
BaseFloat gammat_jmi = RandPrune(posteriors(ki, m), rand_prune_);
if (gammat_jmi != 0.0) {
gammat_jm += gammat_jmi;
tot_count += gammat_jmi;
model.GetSubstateMean(j1, m, i, &mu_jmi);
xt_jmi.CopyFromVec(frame_vars.xt);
xt_jmi.AddVec(-1.0, mu_jmi);
// Eq. (48): z{jmi}(t) = N_{i}^{T} \Sigma_{i}^{-1} x_{jmi}(t)
zt_jmi.AddMatVec(1.0, NtransSigmaInv_[i], kNoTrans, xt_jmi, 0.0);
// Eq. (49): \gamma_{i}^{(s)} = \sum_{t\in\Tau(s), j, m} gamma_{jmi}
gamma_s_(i) += gammat_jmi;
// Eq. (50): y^{(s)} = \sum_{t, j, m, i} gamma_{jmi}(t) z_{jmi}(t)
y_s_.AddVec(gammat_jmi, zt_jmi);
}
}
if (have_spk_dep_weights) {
KALDI_ASSERT(!model.w_jmi_.empty());
BaseFloat d_jms = model.GetDjms(j1, m, spk_vars);
if (d_jms == -1.0)
d_jms = 1.0; // Explanation: d_jms is set to -1 when we didn't have
// speaker vectors in training. We treat this the same as the speaker
// vector being
// zero, and d_jms becomes 1 in this case.
a_s_.AddVec(gammat_jm / d_jms, model.w_jmi_[j1].Row(m));
}
}
return tot_count;
}
void MleSgmm2SpeakerAccs::Update(const AmSgmm2 &model, BaseFloat min_count,
Vector<BaseFloat> *v_s,
BaseFloat *objf_impr_out,
BaseFloat *count_out) {
double tot_gamma = gamma_s_.Sum();
if (tot_gamma < min_count) {
KALDI_WARN << "Updating speaker vectors, count is " << tot_gamma << " < "
<< min_count << "not updating.";
if (objf_impr_out) *objf_impr_out = 0.0;
if (count_out) *count_out = 0.0;
return;
}
if (a_s_.Dim() == 0) // No speaker-dependent weights...
UpdateNoU(v_s, objf_impr_out, count_out);
else
UpdateWithU(model, v_s, objf_impr_out, count_out);
}
// Basic update, no SSGMM.
void MleSgmm2SpeakerAccs::UpdateNoU(Vector<BaseFloat> *v_s,
BaseFloat *objf_impr_out,
BaseFloat *count_out) {
double tot_gamma = gamma_s_.Sum();
KALDI_ASSERT(y_s_.Dim() != 0);
int32 T = y_s_.Dim(); // speaker-subspace dim.
int32 num_gauss = gamma_s_.Dim();
if (v_s->Dim() != T) v_s->Resize(T); // will set it to zero.
// Eq. (84): H^{(s)} = \sum_{i} \gamma_{i}(s) H_{i}^{spk}
SpMatrix<double> H_s(T);
for (int32 i = 0; i < num_gauss; i++) H_s.AddSp(gamma_s_(i), H_spk_[i]);
// Don't make these options to SolveQuadraticProblem configurable...
// they really don't make a difference at all unless the matrix in
// question is singular, which wouldn't happen in this case.
Vector<double> v_s_dbl(*v_s);
double tot_objf_impr =
SolveQuadraticProblem(H_s, y_s_, SolverOptions("v_s"), &v_s_dbl);
v_s->CopyFromVec(v_s_dbl);
KALDI_LOG << "*Objf impr for speaker vector is "
<< (tot_objf_impr / tot_gamma) << " over " << tot_gamma
<< " frames.";
if (objf_impr_out) *objf_impr_out = tot_objf_impr;
if (count_out) *count_out = tot_gamma;
}
// Basic update, no SSGMM.
void MleSgmm2SpeakerAccs::UpdateWithU(const AmSgmm2 &model,
Vector<BaseFloat> *v_s_ptr,
BaseFloat *objf_impr_out,
BaseFloat *count_out) {
double tot_gamma = gamma_s_.Sum();
KALDI_ASSERT(y_s_.Dim() != 0);
int32 T = y_s_.Dim(); // speaker-subspace dim.
int32 num_gauss = gamma_s_.Dim();
if (v_s_ptr->Dim() != T) v_s_ptr->Resize(T); // will set it to zero.
// Eq. (84): H^{(s)} = \sum_{i} \gamma_{i}(s) H_{i}^{spk}
SpMatrix<double> H_s(T);
for (int32 i = 0; i < num_gauss; i++) H_s.AddSp(gamma_s_(i), H_spk_[i]);
Vector<double> v_s(*v_s_ptr);
int32 num_iters = 5, // don't set this to 1, as we discard last iter.
num_backtracks = 0, max_backtracks = 10;
Vector<double> auxf(num_iters);
Matrix<double> v_s_per_iter(num_iters, T);
// The update for v^{(s)} is the one described in the technical report
// section 5.1 (eq. 33 and below).
for (int32 iter = 0; iter < num_iters; iter++) { // converges very fast,
// and each iteration is fast, so don't need to make this configurable.
v_s_per_iter.Row(iter).CopyFromVec(v_s);
SpMatrix<double> F(
H_s); // the 2nd-order quadratic term on this iteration...
// F^{(p)} in the techerport.
Vector<double> g(y_s_); // g^{(p)} in the techreport.
g.AddSpVec(-1.0, H_s, v_s, 1.0);
Vector<double> log_b_is(num_gauss); // b_i^{(s)}, indexed by i.
log_b_is.AddMatVec(1.0, Matrix<double>(model.u_), kNoTrans, v_s, 0.0);
Vector<double> tilde_w_is(log_b_is);
Vector<double> log_a_s_(a_s_);
log_a_s_.ApplyLog();
tilde_w_is.AddVec(1.0, log_a_s_);
tilde_w_is.Add(-1.0 * tilde_w_is.LogSumExp()); // normalize.
// currently tilde_w_is is in log form.
auxf(iter) = VecVec(v_s, y_s_) - 0.5 * VecSpVec(v_s, H_s, v_s) +
VecVec(gamma_s_, tilde_w_is); // "new" term (weights)
if (iter > 0 && auxf(iter) < auxf(iter - 1) &&
!ApproxEqual(auxf(iter), auxf(iter - 1))) { // auxf did not improve.
// backtrack halfway, and do this iteration again.
KALDI_WARN << "Backtracking in speaker vector update, on iter " << iter
<< ", auxfs are " << auxf(iter - 1) << " -> " << auxf(iter);
v_s.Scale(0.5);
v_s.AddVec(0.5, v_s_per_iter.Row(iter - 1));
if (++num_backtracks >= max_backtracks) {
KALDI_WARN << "Backtracked " << max_backtracks
<< " times in speaker-vector update.";
// backtrack all the way, and terminate:
v_s_per_iter.Row(num_iters - 1).CopyFromVec(v_s_per_iter.Row(iter - 1));
// the following statement ensures we will get
// the appropriate auxiliary-function.
auxf(num_iters - 1) = auxf(iter - 1);
break;
}
iter--;
}
tilde_w_is.ApplyExp();
for (int32 i = 0; i < num_gauss; i++) {
g.AddVec(gamma_s_(i) - tot_gamma * tilde_w_is(i), model.u_.Row(i));
F.AddVec2(tot_gamma * tilde_w_is(i), model.u_.Row(i));
}
Vector<double> delta(v_s.Dim());
SolveQuadraticProblem(F, g, SolverOptions("v_s"), &delta);
v_s.AddVec(1.0, delta);
}
// so that we only accept things where the auxf has been checked, we
// actually take the penultimate speaker-vector. --> don't set
// num-iters = 1.
v_s_ptr->CopyFromVec(v_s_per_iter.Row(num_iters - 1));
double auxf_change = auxf(num_iters - 1) - auxf(0);
KALDI_LOG << "*Objf impr for speaker vector is " << (auxf_change / tot_gamma)
<< " per frame, over " << tot_gamma << " frames.";
if (objf_impr_out) *objf_impr_out = auxf_change;
if (count_out) *count_out = tot_gamma;
}
MleAmSgmm2Accs::~MleAmSgmm2Accs() {
if (gamma_s_.Sum() != 0.0)
KALDI_ERR << "In destructor of MleAmSgmm2Accs: detected that you forgot to "
"call CommitStatsForSpk()";
}
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/nnet2bin/nnet-combine-egs-discriminative.cc<|end_filename|>
// nnet2bin/nnet-combine-egs-discriminative.cc
// Copyright 2012-2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/transition-model.h"
#include "nnet2/nnet-example-functions.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace kaldi::nnet2;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
const char *usage =
"Copy examples for discriminative neural network training,\n"
"and combine successive examples if their combined length will\n"
"be less than --max-length. This can help to improve efficiency\n"
"(--max-length corresponds to minibatch size)\n"
"\n"
"Usage: nnet-combine-egs-discriminative [options] <egs-rspecifier> "
"<egs-wspecifier>\n"
"\n"
"e.g.\n"
"nnet-combine-egs-discriminative --max-length=512 ark:temp.1.degs "
"ark:1.degs\n";
int32 max_length = 512;
int32 hard_max_length = 2048;
int32 batch_size = 250;
ParseOptions po(usage);
po.Register("max-length", &max_length,
"Maximum length of example that we "
"will create when combining");
po.Register("batch-size", &batch_size,
"Size of batch used when combinging "
"examples");
po.Register("hard-max-length", &hard_max_length,
"Length of example beyond "
"which we will discard (very long examples may cause out of "
"memory errors)");
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
KALDI_ASSERT(hard_max_length >= max_length);
KALDI_ASSERT(batch_size >= 1);
std::string examples_rspecifier = po.GetArg(1),
examples_wspecifier = po.GetArg(2);
SequentialDiscriminativeNnetExampleReader example_reader(
examples_rspecifier);
DiscriminativeNnetExampleWriter example_writer(examples_wspecifier);
int64 num_read = 0, num_written = 0, num_discarded = 0;
while (!example_reader.Done()) {
std::vector<DiscriminativeNnetExample> buffer;
size_t size = batch_size;
buffer.reserve(size);
for (; !example_reader.Done() && buffer.size() < size;
example_reader.Next()) {
buffer.push_back(example_reader.Value());
num_read++;
}
std::vector<DiscriminativeNnetExample> combined;
CombineDiscriminativeExamples(max_length, buffer, &combined);
buffer.clear();
for (size_t i = 0; i < combined.size(); i++) {
const DiscriminativeNnetExample &eg = combined[i];
int32 num_frames = eg.input_frames.NumRows();
if (num_frames > hard_max_length) {
KALDI_WARN << "Discarding segment of length " << num_frames
<< " because it exceeds --hard-max-length="
<< hard_max_length;
num_discarded++;
} else {
std::ostringstream ostr;
ostr << (num_written++);
example_writer.Write(ostr.str(), eg);
}
}
}
KALDI_LOG << "Read " << num_read
<< " discriminative neural-network training"
<< " examples, wrote " << num_written << ", discarded "
<< num_discarded;
return (num_written == 0 ? 1 : 0);
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/lat/push-lattice.h<|end_filename|>
// lat/push-lattice.h
// Copyright 2013 Johns Hopkins University (Author: <NAME>)
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_LAT_PUSH_LATTICE_H_
#define KALDI_LAT_PUSH_LATTICE_H_
#include <vector>
#include <map>
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "fstext/fstext-lib.h"
#include "hmm/transition-model.h"
#include "lat/kaldi-lattice.h"
namespace fst {
/// This function pushes the transition-ids as far towards the start as they
/// will go. It can be useful prior to lattice-align-words (for non-linear
/// lattices). We can't use the generic OpenFst "push" function because
/// it uses the sum as the divisor, which is not appropriate in this case
/// (a+b generally won't divide a or b in this semiring).
/// It returns true on success, false if it failed due to TopSort failing,
/// which should never happen, but we handle it gracefully by just leaving the
/// lattice the same.
/// This function used to be called just PushCompactLattice.
template <class Weight, class IntType>
bool PushCompactLatticeStrings(
MutableFst<ArcTpl<CompactLatticeWeightTpl<Weight, IntType> > > *clat);
/// This function pushes the weights in the CompactLattice so that all states
/// except possibly the start state, have Weight components (of type
/// LatticeWeight) that "sum to one" in the LatticeWeight (i.e. interpreting the
/// weights as negated log-probs). It returns true on success, false if it
/// failed due to TopSort failing, which should never happen, but we handle it
/// gracefully by just leaving the lattice the same.
template <class Weight, class IntType>
bool PushCompactLatticeWeights(
MutableFst<ArcTpl<CompactLatticeWeightTpl<Weight, IntType> > > *clat);
} // namespace fst
#endif // KALDI_LAT_PUSH_LATTICE_H_
<|start_filename|>tonic-suite/asr/src/itf/optimizable-itf.h<|end_filename|>
// itf/optimizable-itf.h
// Copyright 2009-2011 Go Vivace Inc.; Microsoft Corporation; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_ITF_OPTIMIZABLE_ITF_H_
#define KALDI_ITF_OPTIMIZABLE_ITF_H_
#include "base/kaldi-common.h"
#include "matrix/matrix-lib.h"
namespace kaldi {
/// @ingroup Interfaces
/// @{
/// OptimizableInterface provides
/// a virtual class for optimizable objects.
/// E.g. a class that computed a likelihood function and
/// its gradient using some parameter
/// that has to be optimized on data
/// could inherit from it.
template <class Real>
class OptimizableInterface {
public:
/// computes gradient for a parameter params and returns it
/// in gradient_out
virtual void ComputeGradient(const Vector<Real> ¶ms,
Vector<Real> *gradient_out) = 0;
/// computes the function value for a parameter params
/// and returns it
virtual Real ComputeValue(const Vector<Real> ¶ms) = 0;
virtual ~OptimizableInterface() {}
};
/// @} end of "Interfaces"
} // end namespace kaldi
#endif
<|start_filename|>tonic-suite/asr/src/bin/build-tree-two-level.cc<|end_filename|>
// bin/build-tree-two-level.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/hmm-topology.h"
#include "tree/context-dep.h"
#include "tree/build-tree.h"
#include "tree/build-tree-utils.h"
#include "tree/context-dep.h"
#include "tree/clusterable-classes.h"
#include "util/text-utils.h"
namespace kaldi {
void GetSeenPhones(BuildTreeStatsType &stats, int P,
std::vector<int32> *phones_out) {
// Get list of phones that we saw (in the central position P, although it
// shouldn't matter what position).
std::set<int32> phones_set;
for (size_t i = 0; i < stats.size(); i++) {
const EventType &evec = stats[i].first;
for (size_t j = 0; j < evec.size(); j++) {
if (evec[j].first == P) { // "key" is position P
KALDI_ASSERT(evec[j].second != 0);
phones_set.insert(evec[j].second); // insert "value" of this
// phone.
}
}
CopySetToVector(phones_set, phones_out);
}
}
}
int main(int argc, char *argv[]) {
using namespace kaldi;
try {
using namespace kaldi;
typedef kaldi::int32 int32;
const char *usage =
"Trains two-level decision tree. Outputs the larger tree, and a "
"mapping from the\n"
"leaf-ids of the larger tree to those of the smaller tree. Useful, "
"for instance,\n"
"in tied-mixture systems with multiple codebooks.\n"
"\n"
"Usage: build-tree-two-level [options] <tree-stats-in> <roots-file> "
"<questions-file> <topo-file> <tree-out> <mapping-out>\n"
"e.g.: \n"
" build-tree-two-level treeacc roots.txt 1.qst topo tree tree.map\n";
bool binary = true;
int32 P = 1, N = 3;
bool cluster_leaves = true;
int32 max_leaves_first = 1000;
int32 max_leaves_second = 5000;
std::string occs_out_filename;
ParseOptions po(usage);
po.Register("binary", &binary, "Write output in binary mode");
po.Register("context-width", &N,
"Context window size [must match "
"acc-tree-stats]");
po.Register("central-position", &P,
"Central position in context window "
"[must match acc-tree-stats]");
po.Register("max-leaves-first", &max_leaves_first,
"Maximum number of "
"leaves in first-level decision tree.");
po.Register("max-leaves-second", &max_leaves_second,
"Maximum number of "
"leaves in second-level decision tree.");
po.Register("cluster-leaves", &cluster_leaves,
"If true, do a post-clustering"
" of the leaves of the final decision tree.");
po.Read(argc, argv);
if (po.NumArgs() != 6) {
po.PrintUsage();
exit(1);
}
std::string stats_filename = po.GetArg(1), roots_filename = po.GetArg(2),
questions_filename = po.GetArg(3), topo_filename = po.GetArg(4),
tree_out_filename = po.GetArg(5),
map_out_filename = po.GetArg(6);
// Following 2 variables derived from roots file.
// phone_sets is sets of phones that share their roots.
// Just one phone each for normal systems.
std::vector<std::vector<int32> > phone_sets;
std::vector<bool> is_shared_root;
std::vector<bool> is_split_root;
{
Input ki(roots_filename.c_str());
ReadRootsFile(ki.Stream(), &phone_sets, &is_shared_root, &is_split_root);
}
HmmTopology topo;
ReadKaldiObject(topo_filename, &topo);
BuildTreeStatsType stats;
{
bool binary_in;
GaussClusterable gc; // dummy needed to provide type.
Input ki(stats_filename, &binary_in);
ReadBuildTreeStats(ki.Stream(), binary_in, gc, &stats);
}
std::cerr << "Number of separate statistics is " << stats.size() << '\n';
Questions qo;
{
bool binary_in;
try {
Input ki(questions_filename, &binary_in);
qo.Read(ki.Stream(), binary_in);
} catch (const std::exception &e) {
KALDI_ERR << "Error reading questions file " << questions_filename
<< ", error is: " << e.what();
}
}
std::vector<int32> phone2num_pdf_classes;
topo.GetPhoneToNumPdfClasses(&phone2num_pdf_classes);
EventMap *to_pdf = NULL;
std::vector<int32> mapping;
//////// Build the tree. ////////////
to_pdf =
BuildTreeTwoLevel(qo, phone_sets, phone2num_pdf_classes, is_shared_root,
is_split_root, stats, max_leaves_first,
max_leaves_second, cluster_leaves, P, &mapping);
ContextDependency ctx_dep(N, P, to_pdf); // takes ownership
// of pointer "to_pdf", so set it NULL.
to_pdf = NULL;
WriteKaldiObject(ctx_dep, tree_out_filename, binary);
{
Output ko(map_out_filename, binary);
WriteIntegerVector(ko.Stream(), binary, mapping);
}
{ // This block is just doing some checks.
std::vector<int32> all_phones;
for (size_t i = 0; i < phone_sets.size(); i++)
all_phones.insert(all_phones.end(), phone_sets[i].begin(),
phone_sets[i].end());
SortAndUniq(&all_phones);
if (all_phones != topo.GetPhones()) {
std::ostringstream ss;
WriteIntegerVector(ss, false, all_phones);
ss << " vs. ";
WriteIntegerVector(ss, false, topo.GetPhones());
KALDI_WARN << "Mismatch between phone sets provided in roots file, and "
"those in topology: " << ss.str();
}
std::vector<int32> phones_vec; // phones we saw.
GetSeenPhones(stats, P, &phones_vec);
std::vector<int32> unseen_phones; // diagnostic.
for (size_t i = 0; i < all_phones.size(); i++)
if (!std::binary_search(phones_vec.begin(), phones_vec.end(),
all_phones[i]))
unseen_phones.push_back(all_phones[i]);
for (size_t i = 0; i < phones_vec.size(); i++)
if (!std::binary_search(all_phones.begin(), all_phones.end(),
phones_vec[i]))
KALDI_ERR << "Phone " << (phones_vec[i])
<< " appears in stats but is not listed in roots file.";
if (!unseen_phones.empty()) {
std::ostringstream ss;
for (size_t i = 0; i < unseen_phones.size(); i++)
ss << unseen_phones[i] << ' ';
// Note, unseen phones is just a warning as in certain kinds of
// systems, this can be OK (e.g. where phone encodes position and
// stress information).
KALDI_WARN << "Saw no stats for following phones: " << ss.str();
}
}
std::cerr << "Wrote tree and mapping\n";
DeleteBuildTreeStats(&stats);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/sgmmbin/sgmm-info.cc<|end_filename|>
// sgmmbin/sgmm-info.cc
// Copyright 2012 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <iomanip>
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "sgmm/am-sgmm.h"
#include "hmm/transition-model.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
const char *usage =
"Print various information about an SGMM.\n"
"Usage: sgmm-info [options] <model-in> [model-in2 ... ]\n";
bool sgmm_detailed = false;
bool trans_detailed = false;
ParseOptions po(usage);
po.Register("sgmm-detailed", &sgmm_detailed,
"Print detailed information about substates.");
po.Register("trans-detailed", &trans_detailed,
"Print detailed information about transition model.");
po.Read(argc, argv);
if (po.NumArgs() < 1) {
po.PrintUsage();
exit(1);
}
for (int i = 1, max = po.NumArgs(); i <= max; ++i) {
std::string model_in_filename = po.GetArg(i);
AmSgmm am_sgmm;
TransitionModel trans_model;
{
bool binary;
Input ki(model_in_filename, &binary);
trans_model.Read(ki.Stream(), binary);
am_sgmm.Read(ki.Stream(), binary);
}
{
using namespace std;
cout.setf(ios::left);
cout << "\nModel file: " << model_in_filename << endl;
cout << " SGMM information:\n" << setw(40) << " # of HMM states"
<< am_sgmm.NumPdfs() << endl
<< setw(40) << " # of Gaussians per state" << am_sgmm.NumGauss()
<< endl
<< setw(40) << " Dimension of phone vector space"
<< am_sgmm.PhoneSpaceDim() << endl
<< setw(40) << " Dimension of speaker vector space"
<< am_sgmm.SpkSpaceDim() << endl
<< setw(40) << " Dimension of feature vectors"
<< am_sgmm.FeatureDim() << endl;
int32 total_substates = 0;
for (int32 j = 0; j < am_sgmm.NumPdfs(); j++) {
total_substates += am_sgmm.NumSubstates(j);
if (sgmm_detailed) {
cout << " # of substates for state " << setw(13) << j
<< am_sgmm.NumSubstates(j) << endl;
}
}
cout << setw(40) << " Total # of substates " << total_substates
<< endl;
cout << "\nTransition model information:\n" << setw(40)
<< " # of HMM states" << trans_model.NumPdfs() << endl
<< setw(40) << " # of transition states"
<< trans_model.NumTransitionStates() << endl;
int32 total_indices = 0;
for (int32 s = 0; s < trans_model.NumTransitionStates(); s++) {
total_indices += trans_model.NumTransitionIndices(s);
if (trans_detailed) {
cout << " # of transition ids for state " << setw(8) << s
<< trans_model.NumTransitionIndices(s) << endl;
}
}
cout << setw(40) << " Total # of transition ids " << total_indices
<< endl;
}
}
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/latbin/lattice-minimize.cc<|end_filename|>
// latbin/lattice-minimize.cc
// Copyright 2013 Johns Hopkins University (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "fstext/fstext-lib.h"
#include "lat/kaldi-lattice.h"
#include "lat/minimize-lattice.h"
#include "lat/push-lattice.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
using fst::SymbolTable;
using fst::VectorFst;
using fst::StdArc;
const char *usage =
"Minimize lattices, in CompactLattice format. Should be applied to\n"
"determinized lattices (e.g. produced with "
"--determinize-lattice=true)\n"
"Note: by default this program\n"
"pushes the strings and weights prior to minimization."
"Usage: lattice-minimize [options] lattice-rspecifier "
"lattice-wspecifier\n"
" e.g.: lattice-minimize ark:1.lats ark:2.lats\n";
ParseOptions po(usage);
bool push_strings = true;
bool push_weights = true;
po.Register("push-strings", &push_strings,
"If true, push the strings in the "
"lattice to the start.");
po.Register("push-weights", &push_weights,
"If true, push the weights in the "
"lattice to the start.");
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
std::string lats_rspecifier = po.GetArg(1), lats_wspecifier = po.GetArg(2);
SequentialCompactLatticeReader clat_reader(lats_rspecifier);
CompactLatticeWriter clat_writer(lats_wspecifier);
int32 n_done = 0, n_err = 0;
for (; !clat_reader.Done(); clat_reader.Next()) {
std::string key = clat_reader.Key();
CompactLattice clat = clat_reader.Value();
KALDI_VLOG(1) << "Processing lattice for utterance " << key;
if (push_strings && !PushCompactLatticeStrings(&clat)) {
KALDI_WARN << "Failure in pushing lattice strings (bad lattice?), "
<< "for key " << key;
n_err++;
continue;
}
if (push_weights && !PushCompactLatticeWeights(&clat)) {
KALDI_WARN << "Failure in pushing lattice weights (bad lattice?),"
<< "for key " << key;
n_err++;
continue;
}
if (!MinimizeCompactLattice(&clat)) {
KALDI_WARN << "Failure in minimizing lattice (bad lattice?),"
<< "for key " << key;
n_err++;
continue;
}
if (clat.NumStates() == 0) {
KALDI_WARN << "Empty lattice for key " << key;
n_err++;
continue;
}
clat_writer.Write(key, clat);
n_done++;
}
KALDI_LOG << "Minimized " << n_done << " lattices, errors on " << n_err;
return (n_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/matrix/qr.cc<|end_filename|>
// matrix/qr.cc
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <limits>
#include "matrix/sp-matrix.h"
#include "matrix/kaldi-vector.h"
#include "matrix/kaldi-matrix.h"
#include "matrix/matrix-functions.h"
#include "matrix/cblas-wrappers.h"
// This file contains an implementation of the Symmetric QR Algorithm
// for the symmetric eigenvalue problem. See Golub and Van Loan,
// 3rd ed., Algorithm 8.3.3.
namespace kaldi {
/* This is from Golub and Van Loan 3rd ed., sec. 5.1.3,
p210.
x is the input of dimenson 'dim', v is the output of dimension
dim, and beta is a scalar. Note: we use zero-based
not one-based indexing. */
/*
// We are commenting out the function below ("House") because it's not
// needed, but we keep it just to show how we came up with HouseBackward.
template<typename Real>
void House(MatrixIndexT dim, const Real *x, Real *v, Real *beta) {
KALDI_ASSERT(dim > 0);
// To avoid overflow, we first compute the max of x_ (or
// one if that's zero, and we'll replace "x" by x/max(x_i)
// below. The householder vector is anyway invariant to
// the magnitude of x. We could actually avoid this extra loop
// over x if we wanted to be a bit smarter, but anyway this
// doesn't dominate the O(N) performance of the algorithm.
Real s; // s is a scale on x.
{
Real max_x = std::numeric_limits<Real>::min();
for (MatrixIndexT i = 0; i < dim; i++)
max_x = std::max(max_x, (x[i] < 0 ? -x[i] : x[i]));
if (max_x == 0.0) max_x = 1.0;
s = 1.0 / max_x;
}
Real sigma = 0.0;
v[0] = 1.0;
for (MatrixIndexT i = 1; i < dim; i++) {
sigma += (x[i]*s) * (x[i]*s);
v[i] = x[i]*s;
}
if (sigma == 0.0) *beta = 0.0;
else {
// When we say x1 = x[0], we reference the one-based indexing
// in Golub and Van Loan.
Real x1 = x[0] * s, mu = std::sqrt(x1*x1 + sigma);
if (x1 <= 0) {
v[0] = x1 - mu;
} else {
v[0] = -sigma / (x1 + mu);
KALDI_ASSERT(KALDI_ISFINITE(v[dim-1]));
}
Real v1 = v[0];
Real v1sq = v1 * v1;
*beta = 2 * v1sq / (sigma + v1sq);
Real inv_v1 = 1.0 / v1;
if (KALDI_ISINF(inv_v1)) {
// can happen if v1 is denormal.
KALDI_ASSERT(v1 == v1 && v1 != 0.0);
for (MatrixIndexT i = 0; i < dim; i++) v[i] /= v1;
} else {
cblas_Xscal(dim, inv_v1, v, 1);
}
if (KALDI_ISNAN(inv_v1)) {
KALDI_ERR << "NaN encountered in HouseBackward";
}
}
}
*/
// This is a backward version of the "House" routine above:
// backward because it's the last index, not the first index of
// the vector that is "special". This is convenient in
// the Tridiagonalize routine that uses reversed indexes for
// compatibility with the packed lower triangular format.
template <typename Real>
void HouseBackward(MatrixIndexT dim, const Real *x, Real *v, Real *beta) {
KALDI_ASSERT(dim > 0);
// To avoid overflow, we first compute the max of x_ (or
// one if that's zero, and we'll replace "x" by x/max(x_i)
// below. The householder vector is anyway invariant to
// the magnitude of x. We could actually avoid this extra loop
// over x if we wanted to be a bit smarter, but anyway this
// doesn't dominate the O(N) performance of the algorithm.
Real s; // s is a scale on x.
{
Real max_x = std::numeric_limits<Real>::min();
for (MatrixIndexT i = 0; i < dim; i++)
max_x = std::max(max_x, (x[i] < 0 ? -x[i] : x[i]));
s = 1.0 / max_x;
}
Real sigma = 0.0;
v[dim - 1] = 1.0;
for (MatrixIndexT i = 0; i + 1 < dim; i++) {
sigma += (x[i] * s) * (x[i] * s);
v[i] = x[i] * s;
}
KALDI_ASSERT(KALDI_ISFINITE(sigma) &&
"Tridiagonalizing matrix that is too large or has NaNs.");
if (sigma == 0.0)
*beta = 0.0;
else {
Real x1 = x[dim - 1] * s, mu = std::sqrt(x1 * x1 + sigma);
if (x1 <= 0) {
v[dim - 1] = x1 - mu;
} else {
v[dim - 1] = -sigma / (x1 + mu);
KALDI_ASSERT(KALDI_ISFINITE(v[dim - 1]));
}
Real v1 = v[dim - 1];
Real v1sq = v1 * v1;
*beta = 2 * v1sq / (sigma + v1sq);
Real inv_v1 = 1.0 / v1;
if (KALDI_ISINF(inv_v1)) {
// can happen if v1 is denormal.
KALDI_ASSERT(v1 == v1 && v1 != 0.0);
for (MatrixIndexT i = 0; i < dim; i++) v[i] /= v1;
} else {
cblas_Xscal(dim, inv_v1, v, 1);
}
if (KALDI_ISNAN(inv_v1)) {
KALDI_ERR << "NaN encountered in HouseBackward";
}
}
}
/**
This routine tridiagonalizes *this. <NAME> and Van Loan 3rd ed., sec.
8.3.1 (p415). We reverse the order of the indices as it's more natural
with packed lower-triangular matrices to do it this way. There's also
a shift from one-based to zero-based indexing, so the index
k is transformed k -> n - k, and a corresponding transpose...
Let the original *this be A. This algorithms replaces *this with
a tridiagonal matrix T such that T = Q A Q^T for an orthogonal Q.
Caution: Q is transposed vs. Golub and Van Loan.
If Q != NULL it outputs Q.
*/
template <typename Real>
void SpMatrix<Real>::Tridiagonalize(MatrixBase<Real> *Q) {
MatrixIndexT n = this->NumRows();
KALDI_ASSERT(Q == NULL || (Q->NumRows() == n && Q->NumCols() == n));
if (Q != NULL) Q->SetUnit();
Real *data = this->Data();
Real *qdata = (Q == NULL ? NULL : Q->Data());
MatrixIndexT qstride = (Q == NULL ? 0 : Q->Stride());
Vector<Real> tmp_v(n - 1), tmp_p(n);
Real beta, *v = tmp_v.Data(), *p = tmp_p.Data(), *w = p, *x = p;
for (MatrixIndexT k = n - 1; k >= 2; k--) {
MatrixIndexT ksize = ((k + 1) * k) / 2;
// ksize is the packed size of the lower-triangular matrix of size k,
// which is the size of "all rows previous to this one."
Real *Arow = data + ksize; // In Golub+Van Loan it was A(k+1:n, k), we
// have Arow = A(k, 0:k-1).
HouseBackward(k, Arow, v, &beta); // sets v and beta.
cblas_Xspmv(k, beta, data, v, 1, 0.0, p, 1); // p = beta * A(0:k-1,0:k-1) v
Real minus_half_beta_pv = -0.5 * beta * cblas_Xdot(k, p, 1, v, 1);
cblas_Xaxpy(k, minus_half_beta_pv, v, 1, w,
1); // w = p - (beta p^T v/2) v;
// this relies on the fact that w and p are the same pointer.
// We're doing A(k, k-1) = ||Arow||. It happens that this element
// is indexed at ksize + k - 1 in the packed lower-triangular format.
data[ksize + k - 1] = std::sqrt(cblas_Xdot(k, Arow, 1, Arow, 1));
for (MatrixIndexT i = 0; i + 1 < k; i++)
data[ksize + i] = 0; // This is not in Golub and Van Loan but is
// necessary if we're not using parts of A to store the Householder
// vectors.
// We're doing A(0:k-1,0:k-1) -= (v w' + w v')
cblas_Xspr2(k, -1.0, v, 1, w, 1, data);
if (Q != NULL) { // C.f. Golub, Q is H_1 .. H_n-2... in this
// case we apply them in the opposite order so it's H_n-1 .. H_1,
// but also Q is transposed so we really have Q = H_1 .. H_n-1.
// It's a double negative.
// Anyway, we left-multiply Q by each one. The H_n would each be
// diag(I + beta v v', I) but we don't ever touch the last dims.
// We do (in Matlab notation):
// Q(0:k-1,:) = (I - beta v v') * Q, i.e.:
// Q(:,0:i-1) += -beta v (v' Q(:,0:k-1)v .. let x = -beta Q(0:k-1,:)^T v.
cblas_Xgemv(kTrans, k, n, -beta, qdata, qstride, v, 1, 0.0, x, 1);
// now x = -beta Q(:,0:k-1) v.
// The next line does: Q(:,0:k-1) += v x'.
cblas_Xger(k, n, 1.0, v, 1, x, 1, qdata, qstride);
}
}
}
// Instantiate these functions, as it wasn't implemented in sp-matrix.cc
// where we instantiated the whole class.
template void SpMatrix<float>::Tridiagonalize(MatrixBase<float> *Q);
template void SpMatrix<double>::Tridiagonalize(MatrixBase<double> *Q);
/// Create Givens rotations, as in Golub and Van Loan 3rd ed., page 216.
template <typename Real>
inline void Givens(Real a, Real b, Real *c, Real *s) {
if (b == 0) {
*c = 1;
*s = 0;
} else {
if (std::abs(b) > std::abs(a)) {
Real tau = -a / b;
*s = 1 / std::sqrt(1 + tau * tau);
*c = *s *tau;
} else {
Real tau = -b / a;
*c = 1 / std::sqrt(1 + tau * tau);
*s = *c *tau;
}
}
}
// Some internal code for the QR algorithm: one "QR step".
// This is Golub and Van Loan 3rd ed., Algorithm 8.3.2 "Implicit Symmetric QR
// step
// with Wilkinson shift." A couple of differences: this code is
// in zero based arithmetic, and we represent Q transposed from
// their Q for memory locality with row-major-indexed matrices.
template <typename Real>
void QrStep(MatrixIndexT n, Real *diag, Real *off_diag, MatrixBase<Real> *Q) {
KALDI_ASSERT(n >= 2);
// below, "scale" could be any number; we introduce it to keep the
// floating point quantities within a good range.
Real d = (diag[n - 2] - diag[n - 1]) / 2.0, t = off_diag[n - 2],
inv_scale = std::max(std::max(std::abs(d), std::abs(t)),
std::numeric_limits<Real>::min()),
scale = 1.0 / inv_scale, d_scaled = d * scale,
off_diag_n2_scaled = off_diag[n - 2] * scale,
t2_n_n1_scaled = off_diag_n2_scaled * off_diag_n2_scaled,
sgn_d = (d > 0.0 ? 1.0 : -1.0),
mu = diag[n - 1] -
inv_scale * t2_n_n1_scaled /
(d_scaled +
sgn_d * std::sqrt(d_scaled * d_scaled + t2_n_n1_scaled)),
x = diag[0] - mu, z = off_diag[0];
KALDI_ASSERT(KALDI_ISFINITE(x));
Real *Qdata = (Q == NULL ? NULL : Q->Data());
MatrixIndexT Qstride = (Q == NULL ? 0 : Q->Stride()),
Qcols = (Q == NULL ? 0 : Q->NumCols());
for (MatrixIndexT k = 0; k < n - 1; k++) {
Real c, s;
Givens(x, z, &c, &s);
// Rotate dimensions k and k+1 with the Givens matrix G, as
// T <== G^T T G.
// In 2d, a Givens matrix is [ c s; -s c ]. Forget about
// the dimension-indexing issues and assume we have a 2x2
// symmetric matrix [ p q ; q r ]
// We ask our friends at Wolfram Alpha about
// { { c, -s}, {s, c} } * { {p, q}, {q, r} } * { { c, s}, {-s, c} }
// Interpreting the result as [ p', q' ; q', r ]
// p' = c (c p - s q) - s (c q - s r)
// q' = s (c p - s q) + c (c q - s r)
// r' = s (s p + c q) + c (s q + c r)
Real p = diag[k], q = off_diag[k], r = diag[k + 1];
// p is element k,k; r is element k+1,k+1; q is element k,k+1 or k+1,k.
// We'll let the compiler optimize this.
diag[k] = c * (c * p - s * q) - s * (c * q - s * r);
off_diag[k] = s * (c * p - s * q) + c * (c * q - s * r);
diag[k + 1] = s * (s * p + c * q) + c * (s * q + c * r);
// We also have some other elements to think of that
// got rotated in a simpler way: if k>0,
// then element (k, k-1) and (k+1, k-1) get rotated. Here,
// element k+1, k-1 will be present as z; it's the out-of-band
// element that we remembered from last time. This is
// on the left as it's the row indexes that differ, so think of
// this as being premultiplied by G^T. In fact we're multiplying
// T by in some sense the opposite/transpose of the Givens rotation.
if (k > 0) { // Note, in rotations, going backward, (x,y) -> ((cx - sy),
// (sx + cy))
Real &elem_k_km1 = off_diag[k - 1],
elem_kp1_km1 = z; // , tmp = elem_k_km1;
elem_k_km1 = c * elem_k_km1 - s * elem_kp1_km1;
// The next line will set elem_kp1_km1 to zero and we'll never access this
// value, so we comment it out.
// elem_kp1_km1 = s*tmp + c*elem_kp1_km1;
}
if (Q != NULL)
cblas_Xrot(Qcols, Qdata + k * Qstride, 1, Qdata + (k + 1) * Qstride, 1, c,
-s);
if (k < n - 2) {
// Next is the elements (k+2, k) and (k+2, k-1), to be rotated, again
// backwards.
Real &elem_kp2_k = z, &elem_kp2_kp1 = off_diag[k + 1];
// Note: elem_kp2_k == z would start off as zero because it's
// two off the diagonal, and not been touched yet. Therefore
// we eliminate it in expressions below, commenting it out.
// If we didn't do this we should set it to zero first.
elem_kp2_k = -s * elem_kp2_kp1; // + c*elem_kp2_k
elem_kp2_kp1 = c * elem_kp2_kp1; // + s*elem_kp2_k (original value).
// The next part is from the algorithm they describe: x = t_{k+1,k}
x = off_diag[k];
}
}
}
// Internal code for the QR algorithm, where the diagonal
// and off-diagonal of the symmetric matrix are represented as
// vectors of length n and n-1.
template <typename Real>
void QrInternal(MatrixIndexT n, Real *diag, Real *off_diag,
MatrixBase<Real> *Q) {
KALDI_ASSERT(Q == NULL || Q->NumCols() == n); // We may
// later relax the condition that Q->NumCols() == n.
MatrixIndexT counter = 0,
max_iters = 500 + 4 * n, // Should never take this many iters.
large_iters = 100 + 2 * n;
Real epsilon = (pow(2.0, sizeof(Real) == 4 ? -23.0 : -52.0));
for (; counter < max_iters; counter++) { // this takes the place of "until
// q=n"... we'll break out of the
// loop when we converge.
if (counter == large_iters ||
(counter > large_iters && (counter - large_iters) % 50 == 0)) {
KALDI_WARN << "Took " << counter << " iterations in QR (dim is " << n
<< "), doubling epsilon.";
SubVector<Real> d(diag, n), o(off_diag, n - 1);
KALDI_WARN << "Diag, off-diag are " << d << " and " << o;
epsilon *= 2.0;
}
for (MatrixIndexT i = 0; i + 1 < n; i++) {
if (std::abs(off_diag[i]) <=
epsilon * (std::abs(diag[i]) + std::abs(diag[i + 1])))
off_diag[i] = 0.0;
}
// The next code works out p, q, and npq which is n - p - q.
// For the definitions of q and p, see Golub and Van Loan; we
// partition the n dims into pieces of size (p, n-p-q, q) where
// the part of size q is diagonal and the part of size n-p-p is
// "unreduced", i.e. has no zero off-diagonal elements.
MatrixIndexT q = 0;
// Note: below, "n-q < 2" should more clearly be "n-2-q < 0", but that
// causes problems if MatrixIndexT is unsigned.
while (q < n && (n - q < 2 || off_diag[n - 2 - q] == 0.0)) q++;
if (q == n) break; // we're done. It's diagonal.
KALDI_ASSERT(n - q >= 2);
MatrixIndexT npq = 2; // Value of n - p - q, where n - p - q must be
// unreduced. This is the size of "middle" band of elements. If q != n,
// we must have hit a nonzero off-diag element, so the size of this
// band must be at least two.
while (npq + q < n &&
(n - q - npq - 1 < 0 || off_diag[n - q - npq - 1] != 0.0))
npq++;
MatrixIndexT p = n - q - npq;
{ // Checks.
for (MatrixIndexT i = 0; i + 1 < npq; i++)
KALDI_ASSERT(off_diag[p + i] != 0.0);
for (MatrixIndexT i = 0; i + 1 < q; i++)
KALDI_ASSERT(off_diag[p + npq - 1 + i] == 0.0);
if (p > 1) // Something must have stopped npq from growing further..
KALDI_ASSERT(off_diag[p - 1] == 0.0); // so last off-diag elem in
// group of size p must be zero.
}
if (Q != NULL) {
// Do one QR step on the middle part of Q only.
// Qpart will be a subset of the rows of Q.
SubMatrix<Real> Qpart(*Q, p, npq, 0, Q->NumCols());
QrStep(npq, diag + p, off_diag + p, &Qpart);
} else {
QrStep(npq, diag + p, off_diag + p,
static_cast<MatrixBase<Real> *>(NULL));
}
}
if (counter == max_iters) {
KALDI_WARN << "Failure to converge in QR algorithm. "
<< "Exiting with partial output.";
}
}
/**
This is the symmetric QR algorithm, from Golub and <NAME>an 3rd ed.,
Algorithm
8.3.3. Q is transposed w.r.t. there, though.
*/
template <typename Real>
void SpMatrix<Real>::Qr(MatrixBase<Real> *Q) {
KALDI_ASSERT(this->IsTridiagonal());
// We envisage that Q would be square but we don't check for this,
// as there are situations where you might not want this.
KALDI_ASSERT(Q == NULL || Q->NumRows() == this->NumRows());
// Note: the first couple of lines of the algorithm they give would be done
// outside of this function, by calling Tridiagonalize().
MatrixIndexT n = this->NumRows();
Vector<Real> diag(n), off_diag(n - 1);
for (MatrixIndexT i = 0; i < n; i++) {
diag(i) = (*this)(i, i);
if (i > 0) off_diag(i - 1) = (*this)(i, i - 1);
}
QrInternal(n, diag.Data(), off_diag.Data(), Q);
// Now set *this to the value represented by diag and off_diag.
this->SetZero();
for (MatrixIndexT i = 0; i < n; i++) {
(*this)(i, i) = diag(i);
if (i > 0) (*this)(i, i - 1) = off_diag(i - 1);
}
}
template <typename Real>
void SpMatrix<Real>::Eig(VectorBase<Real> *s, MatrixBase<Real> *P) const {
MatrixIndexT dim = this->NumRows();
KALDI_ASSERT(s->Dim() == dim);
KALDI_ASSERT(P == NULL || (P->NumRows() == dim && P->NumCols() == dim));
SpMatrix<Real> A(*this); // Copy *this, since the tridiagonalization
// and QR decomposition are destructive.
// Note: for efficiency of memory access, the tridiagonalization
// algorithm makes the *rows* of P the eigenvectors, not the columns.
// We'll transpose P before we exit.
// Also note: P may be null if you don't want the eigenvectors. This
// will make this function more efficient.
A.Tridiagonalize(P); // Tridiagonalizes.
A.Qr(P); // Diagonalizes.
if (P) P->Transpose();
s->CopyDiagFromPacked(A);
}
template <typename Real>
void SpMatrix<Real>::TopEigs(VectorBase<Real> *s, MatrixBase<Real> *P,
MatrixIndexT lanczos_dim) const {
const SpMatrix<Real> &S(*this); // call this "S" for easy notation.
MatrixIndexT eig_dim = s->Dim(); // Space of dim we want to retain.
if (lanczos_dim <= 0)
lanczos_dim = std::max(eig_dim + 50, eig_dim + eig_dim / 2);
MatrixIndexT dim = this->NumRows();
if (lanczos_dim >= dim) {
// There would be no speed advantage in using this method, so just
// use the regular approach.
Vector<Real> s_tmp(dim);
Matrix<Real> P_tmp(dim, dim);
this->Eig(&s_tmp, &P_tmp);
SortSvd(&s_tmp, &P_tmp);
s->CopyFromVec(s_tmp.Range(0, eig_dim));
P->CopyFromMat(P_tmp.Range(0, dim, 0, eig_dim));
return;
}
KALDI_ASSERT(eig_dim <= dim && eig_dim > 0);
KALDI_ASSERT(P->NumRows() == dim && P->NumCols() == eig_dim); // each column
// is one eigenvector.
Matrix<Real> Q(lanczos_dim, dim); // The rows of Q will be the
// orthogonal vectors of the Krylov subspace.
SpMatrix<Real> T(lanczos_dim); // This will be equal to Q S Q^T,
// i.e. *this projected into the Krylov subspace. Note: only the
// diagonal and off-diagonal fo T are nonzero, i.e. it's tridiagonal,
// but we don't have access to the low-level algorithms that work
// on that type of matrix (since we want to use ATLAS). So we just
// do normal SVD, on a full matrix; it won't typically dominate.
Q.Row(0).SetRandn();
Q.Row(0).Scale(1.0 / Q.Row(0).Norm(2));
for (MatrixIndexT d = 0; d < lanczos_dim; d++) {
Vector<Real> r(dim);
r.AddSpVec(1.0, S, Q.Row(d), 0.0);
// r = S * q_d
MatrixIndexT counter = 0;
Real end_prod;
while (1) { // Normally we'll do this loop only once:
// we repeat to handle cases where r gets very much smaller
// and we want to orthogonalize again.
// We do "full orthogonalization" to preserve stability,
// even though this is usually a waste of time.
Real start_prod = VecVec(r, r);
for (SignedMatrixIndexT e = d; e >= 0; e--) { // e must be signed!
SubVector<Real> q_e(Q, e);
Real prod = VecVec(r, q_e);
if (counter == 0 &&
static_cast<MatrixIndexT>(e) + 1 >= d) // Keep T tridiagonal, which
T(d, e) = prod; // mathematically speaking, it is.
r.AddVec(-prod, q_e); // Subtract component in q_e.
}
if (d + 1 == lanczos_dim) break;
end_prod = VecVec(r, r);
if (end_prod <= 0.1 * start_prod) {
// also handles case where both are 0.
// We're not confident any more that it's completely
// orthogonal to the rest so we want to re-do.
if (end_prod == 0.0) r.SetRandn(); // "Restarting".
counter++;
if (counter > 100) KALDI_ERR << "Loop detected in Lanczos iteration.";
} else {
break;
}
}
if (d + 1 != lanczos_dim) {
// OK, at this point we're satisfied that r is orthogonal
// to all previous rows.
KALDI_ASSERT(end_prod != 0.0); // should have looped.
r.Scale(1.0 / std::sqrt(end_prod)); // make it unit.
Q.Row(d + 1).CopyFromVec(r);
}
}
Matrix<Real> R(lanczos_dim, lanczos_dim);
R.SetUnit();
T.Qr(&R); // Diagonalizes T.
Vector<Real> s_tmp(lanczos_dim);
s_tmp.CopyDiagFromSp(T);
// Now T = R * diag(s_tmp) * R^T.
// The next call sorts the elements of s from greatest to least absolute
// value,
// and moves around the rows of R in the corresponding way. This picks out
// the largest (absolute) eigenvalues.
SortSvd(&s_tmp, static_cast<Matrix<Real> *>(NULL), &R);
// Keep only the initial rows of R, those corresponding to greatest (absolute)
// eigenvalues.
SubMatrix<Real> Rsub(R, 0, eig_dim, 0, lanczos_dim);
SubVector<Real> s_sub(s_tmp, 0, eig_dim);
s->CopyFromVec(s_sub);
// For working out what to do now, just assume the other eigenvalues were
// zero. This is just for purposes of knowing how to get the result, and
// not getting things wrongly transposed.
// We have T = Rsub^T * diag(s_sub) * Rsub.
// Now, T = Q S Q^T, with Q orthogonal, so S = Q^T T Q = Q^T Rsub^T * diag(s)
// * Rsub * Q.
// The output is P and we want S = P * diag(s) * P^T, so we need P = Q^T
// Rsub^T.
P->AddMatMat(1.0, Q, kTrans, Rsub, kTrans, 0.0);
}
// Instantiate the templates for Eig and TopEig.
template void SpMatrix<float>::Eig(VectorBase<float> *,
MatrixBase<float> *) const;
template void SpMatrix<double>::Eig(VectorBase<double> *,
MatrixBase<double> *) const;
template void SpMatrix<float>::TopEigs(VectorBase<float> *, MatrixBase<float> *,
MatrixIndexT) const;
template void SpMatrix<double>::TopEigs(VectorBase<double> *,
MatrixBase<double> *,
MatrixIndexT) const;
// Someone had a problem with the Intel compiler with -O3, with Qr not being
// defined for some strange reason (should automatically happen when
// we instantiate Eig and TopEigs), so we explicitly instantiate it here.
template void SpMatrix<float>::Qr(MatrixBase<float> *Q);
template void SpMatrix<double>::Qr(MatrixBase<double> *Q);
}
// namespace kaldi
<|start_filename|>tonic-suite/asr/src/bin/sum-lda-accs.cc<|end_filename|>
// bin/sum-lda.cc
// Copyright 2014 LINSE/UFSC; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "util/common-utils.h"
#include "gmm/mle-am-diag-gmm.h"
#include "transform/lda-estimate.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
const char *usage =
"Sum stats obtained with acc-lda.\n"
"Usage: sum-lda-accs [options] <stats-out> <stats-in1> <stats-in2> "
"...\n";
bool binary = true;
ParseOptions po(usage);
po.Register("binary", &binary, "Write accumulators in binary mode.");
po.Read(argc, argv);
if (po.NumArgs() < 2) {
po.PrintUsage();
exit(1);
}
LdaEstimate lda;
std::string stats_out_filename = po.GetArg(1);
for (int32 i = 2; i <= po.NumArgs(); i++) {
bool binary_in, add = true;
Input ki(po.GetArg(i), &binary_in);
lda.Read(ki.Stream(), binary_in, add);
}
Output ko(stats_out_filename, binary);
lda.Write(ko.Stream(), binary);
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/featbin/compare-feats.cc<|end_filename|>
// featbin/compare-feats.cc
// Copyright 2009-2011 Microsoft Corporation
// 2013 Johns Hopkins University (author: <NAME>)
// 2014 Mobvoi Inc. (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "matrix/kaldi-matrix.h"
#include "matrix/kaldi-vector.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
const char *usage =
"Computes relative difference between two sets of features\n"
"per dimension and an average difference\n"
"Can be used to figure out how different two sets of features are.\n"
"Inputs must have same dimension. Prints to stdout a similarity\n"
"metric vector that is 1.0 per dimension if the features identical,\n"
"and <1.0 otherwise, and an average overall similarity value.\n"
"\n"
"Usage: compare-feats [options] <in-rspecifier1> <in-rspecifier2>\n"
"e.g.: compare-feats ark:1.ark ark:2.ark\n";
ParseOptions po(usage);
BaseFloat threshold = 0.99;
po.Register("threshold", &threshold,
"Similarity threshold, affects "
"return status");
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
std::string rspecifier1 = po.GetArg(1), rspecifier2 = po.GetArg(2);
int32 num_done = 0, num_err = 0, Dim = 0;
Vector<double> prod1, prod2, cross_prod, similarity_metric;
double overall_similarity = 0;
SequentialBaseFloatMatrixReader feat_reader1(rspecifier1);
RandomAccessBaseFloatMatrixReader feat_reader2(rspecifier2);
for (; !feat_reader1.Done(); feat_reader1.Next()) {
std::string utt = feat_reader1.Key();
Matrix<BaseFloat> feat1(feat_reader1.Value());
if (!feat_reader2.HasKey(utt)) {
KALDI_WARN << "Second table has no feature for utterance " << utt;
num_err++;
continue;
}
Matrix<BaseFloat> feat2(feat_reader2.Value(utt));
if (feat1.NumCols() != feat2.NumCols()) {
KALDI_WARN << "Feature dimensions differ for utterance " << utt << ", "
<< feat1.NumCols() << " vs. " << feat2.NumCols()
<< ", skipping utterance." << utt;
num_err++;
continue;
}
if (num_done == 0) {
Dim = feat1.NumCols();
prod1.Resize(Dim);
prod2.Resize(Dim);
cross_prod.Resize(Dim);
similarity_metric.Resize(Dim);
}
Vector<BaseFloat> feat1_col(feat1.NumRows()), feat2_col(feat2.NumRows());
for (MatrixIndexT i = 0; i < feat1.NumCols(); i++) {
feat1_col.CopyColFromMat(feat1, i);
feat2_col.CopyColFromMat(feat2, i);
prod1(i) += VecVec(feat1_col, feat1_col);
prod2(i) += VecVec(feat2_col, feat2_col);
cross_prod(i) += VecVec(feat1_col, feat2_col);
}
num_done++;
}
KALDI_LOG << "self-product of 1st features for each column dimension: "
<< prod1;
KALDI_LOG << "self-product of 2nd features for each column dimension: "
<< prod2;
KALDI_LOG << "cross-product for each column dimension: " << cross_prod;
prod1.AddVec(1.0, prod2);
similarity_metric.AddVecDivVec(2.0, cross_prod, prod1, 0.0);
KALDI_LOG << "Similarity metric for each dimension " << similarity_metric
<< " (1.0 means identical, the smaller the more different)";
overall_similarity = similarity_metric.Sum() / static_cast<double>(Dim);
KALDI_LOG << "Overall similarity for the two feats is:"
<< overall_similarity
<< " (1.0 means identical, the smaller the more different)";
KALDI_LOG << "Processed " << num_done << " feature files, " << num_err
<< " had errors.";
bool similar = (overall_similarity >= threshold);
if (num_done > 0) {
if (similar) {
KALDI_LOG << "Features are considered similar since "
<< overall_similarity << " >= " << threshold;
} else {
KALDI_LOG << "Features are considered dissimilar since "
<< overall_similarity << " < " << threshold;
}
}
return (num_done > 0 && similar) ? 0 : 1;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
/*
tested with:
compare-feats 'ark:echo foo [ 1.0 2.0 ]|' 'ark:echo foo [ 1.0 2.0 ]|'
*/
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-adapt-map.cc<|end_filename|>
// gmmbin/gmm-adapt-map.cc
// Copyright 2012 Cisco Systems (author: <NAME>)
// Johns Hopkins University (author: <NAME>)
// 2014 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include <string>
#include <vector>
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "gmm/mle-am-diag-gmm.h"
#include "hmm/posterior.h"
int main(int argc, char *argv[]) {
try {
typedef kaldi::int32 int32;
using namespace kaldi;
const char *usage =
"Compute MAP estimates per-utterance (default) or per-speaker for\n"
"the supplied set of speakers (spk2utt option). This will typically\n"
"be piped into gmm-latgen-map\n"
"\n"
"Usage: gmm-adapt-map [options] <model-in> <feature-rspecifier> "
"<posteriors-rspecifier> <map-am-wspecifier>\n";
ParseOptions po(usage);
string spk2utt_rspecifier;
bool binary = true;
MapDiagGmmOptions map_config;
std::string update_flags_str = "mw";
po.Register("spk2utt", &spk2utt_rspecifier,
"rspecifier for speaker to "
"utterance-list map");
po.Register("binary", &binary, "Write output in binary mode");
po.Register("update-flags", &update_flags_str,
"Which GMM parameters will be "
"updated: subset of mvw.");
map_config.Register(&po);
po.Read(argc, argv);
if (po.NumArgs() != 4) {
po.PrintUsage();
exit(1);
}
std::string model_filename = po.GetArg(1),
feature_rspecifier = po.GetArg(2),
posteriors_rspecifier = po.GetArg(3),
map_am_wspecifier = po.GetArg(4);
GmmFlagsType update_flags = StringToGmmFlags(update_flags_str);
RandomAccessPosteriorReader posteriors_reader(posteriors_rspecifier);
MapAmDiagGmmWriter map_am_writer(map_am_wspecifier);
AmDiagGmm am_gmm;
TransitionModel trans_model;
{
bool binary;
Input is(model_filename, &binary);
trans_model.Read(is.Stream(), binary);
am_gmm.Read(is.Stream(), binary);
}
double tot_like = 0.0, tot_like_change = 0.0, tot_t = 0.0,
tot_t_check = 0.0;
int32 num_done = 0, num_err = 0;
if (spk2utt_rspecifier != "") { // per-speaker adaptation
SequentialTokenVectorReader spk2utt_reader(spk2utt_rspecifier);
RandomAccessBaseFloatMatrixReader feature_reader(feature_rspecifier);
for (; !spk2utt_reader.Done(); spk2utt_reader.Next()) {
std::string spk = spk2utt_reader.Key();
AmDiagGmm copy_am_gmm;
copy_am_gmm.CopyFromAmDiagGmm(am_gmm);
AccumAmDiagGmm map_accs;
map_accs.Init(am_gmm, update_flags);
const std::vector<std::string> &uttlist = spk2utt_reader.Value();
// for each speaker, estimate MAP means
std::vector<std::string>::const_iterator iter = uttlist.begin(),
end = uttlist.end();
for (; iter != end; ++iter) {
std::string utt = *iter;
if (!feature_reader.HasKey(utt)) {
KALDI_WARN << "Did not find features for utterance " << utt;
continue;
}
if (!posteriors_reader.HasKey(utt)) {
KALDI_WARN << "Did not find posteriors for utterance " << utt;
num_err++;
continue;
}
const Matrix<BaseFloat> &feats = feature_reader.Value(utt);
const Posterior &posterior = posteriors_reader.Value(utt);
if (posterior.size() != feats.NumRows()) {
KALDI_WARN << "Posteriors has wrong size " << (posterior.size())
<< " vs. " << (feats.NumRows());
num_err++;
continue;
}
BaseFloat file_like = 0.0, file_t = 0.0;
Posterior pdf_posterior;
ConvertPosteriorToPdfs(trans_model, posterior, &pdf_posterior);
for (size_t i = 0; i < posterior.size(); i++) {
for (size_t j = 0; j < pdf_posterior[i].size(); j++) {
int32 pdf_id = pdf_posterior[i][j].first;
BaseFloat weight = pdf_posterior[i][j].second;
file_like += map_accs.AccumulateForGmm(copy_am_gmm, feats.Row(i),
pdf_id, weight);
file_t += weight;
}
}
KALDI_VLOG(2) << "Average like for utterance " << utt << " is "
<< (file_like / file_t) << " over " << file_t
<< " frames.";
tot_like += file_like;
tot_t += file_t;
num_done++;
if (num_done % 10 == 0)
KALDI_VLOG(1) << "Avg like per frame so far is "
<< (tot_like / tot_t);
} // end looping over all utterances of the current speaker
// MAP estimation.
BaseFloat spk_objf_change = 0.0, spk_frames = 0.0;
MapAmDiagGmmUpdate(map_config, map_accs, update_flags, ©_am_gmm,
&spk_objf_change, &spk_frames);
KALDI_LOG << "For speaker " << spk << ", objective function change "
<< "from MAP was " << (spk_objf_change / spk_frames)
<< " over " << spk_frames << " frames.";
tot_like_change += spk_objf_change;
tot_t_check += spk_frames;
// Writing AM for each speaker in a table
map_am_writer.Write(spk, copy_am_gmm);
} // end looping over speakers
} else { // per-utterance adaptation
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string utt = feature_reader.Key();
AmDiagGmm copy_am_gmm;
copy_am_gmm.CopyFromAmDiagGmm(am_gmm);
AccumAmDiagGmm map_accs;
map_accs.Init(am_gmm, update_flags);
map_accs.SetZero(update_flags);
if (!posteriors_reader.HasKey(utt)) {
KALDI_WARN << "Did not find aligned transcription for utterance "
<< utt;
num_err++;
continue;
}
const Matrix<BaseFloat> &feats = feature_reader.Value();
const Posterior &posterior = posteriors_reader.Value(utt);
if (posterior.size() != feats.NumRows()) {
KALDI_WARN << "Posteriors has wrong size " << (posterior.size())
<< " vs. " << (feats.NumRows());
num_err++;
continue;
}
num_done++;
BaseFloat file_like = 0.0, file_t = 0.0;
Posterior pdf_posterior;
ConvertPosteriorToPdfs(trans_model, posterior, &pdf_posterior);
for (size_t i = 0; i < posterior.size(); i++) {
for (size_t j = 0; j < pdf_posterior[i].size(); j++) {
int32 pdf_id = pdf_posterior[i][j].first;
BaseFloat prob = pdf_posterior[i][j].second;
file_like += map_accs.AccumulateForGmm(copy_am_gmm, feats.Row(i),
pdf_id, prob);
file_t += prob;
}
}
KALDI_VLOG(2) << "Average like for utterance " << utt << " is "
<< (file_like / file_t) << " over " << file_t
<< " frames.";
tot_like += file_like;
tot_t += file_t;
if (num_done % 10 == 0)
KALDI_VLOG(1) << "Avg like per frame so far is "
<< (tot_like / tot_t);
// MAP
BaseFloat utt_objf_change = 0.0, utt_frames = 0.0;
MapAmDiagGmmUpdate(map_config, map_accs, update_flags, ©_am_gmm,
&utt_objf_change, &utt_frames);
KALDI_LOG << "For utterance " << utt << ", objective function change "
<< "from MAP was " << (utt_objf_change / utt_frames)
<< " over " << utt_frames << " frames.";
tot_like_change += utt_objf_change;
tot_t_check += utt_frames;
// Writing AM for each utterance in a table
map_am_writer.Write(feature_reader.Key(), copy_am_gmm);
}
}
KALDI_ASSERT(ApproxEqual(tot_t, tot_t_check));
KALDI_LOG << "Done " << num_done << " files, " << num_err << " with errors";
KALDI_LOG << "Overall acoustic likelihood was " << (tot_like / tot_t)
<< " and change in likelihod per frame was "
<< (tot_like_change / tot_t) << " over " << tot_t << " frames.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/tree/build-tree-utils.h<|end_filename|>
// tree/build-tree-utils.h
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_TREE_BUILD_TREE_UTILS_H_
#define KALDI_TREE_BUILD_TREE_UTILS_H_
#include "tree/build-tree-questions.h"
// build-tree-questions.h needed for this typedef:
// typedef std::vector<std::pair<EventType, Clusterable*> > BuildTreeStatsType;
// and for other #includes.
namespace kaldi {
/// \defgroup tree_group_lower Low-level functions for manipulating statistics
/// and event-maps
/// See \ref tree_internals and specifically \ref treei_func for context.
/// \ingroup tree_group
///
/// @{
/// This frees the Clusterable* pointers in "stats", where non-NULL, and sets
/// them to NULL.
/// Does not delete the pointer "stats" itself.
void DeleteBuildTreeStats(BuildTreeStatsType *stats);
/// Writes BuildTreeStats object. This works even if pointers are NULL.
void WriteBuildTreeStats(std::ostream &os, bool binary,
const BuildTreeStatsType &stats);
/// Reads BuildTreeStats object. The "example" argument must be of the same
/// type as the stats on disk, and is needed for access to the correct "Read"
/// function. It was organized this way for easier extensibility (so adding new
/// Clusterable derived classes isn't painful)
void ReadBuildTreeStats(std::istream &is, bool binary,
const Clusterable &example, BuildTreeStatsType *stats);
/// Convenience function e.g. to work out possible values of the phones from
/// just the stats.
/// Returns true if key was always defined inside the stats.
/// May be used with and == NULL to find out of key was always defined.
bool PossibleValues(EventKeyType key, const BuildTreeStatsType &stats,
std::vector<EventValueType> *ans);
/// Splits stats according to the EventMap, indexing them at output by the
/// leaf type. A utility function. NOTE-- pointers in stats_out point to
/// the same memory location as those in stats. No copying of Clusterable*
/// objects happens. Will add to stats in stats_out if non-empty at input.
/// This function may increase the size of vector stats_out as necessary
/// to accommodate stats, but will never decrease the size.
void SplitStatsByMap(const BuildTreeStatsType &stats_in, const EventMap &e,
std::vector<BuildTreeStatsType> *stats_out);
/// SplitStatsByKey splits stats up according to the value of a particular key,
/// which must be always defined and nonnegative. Like MapStats. Pointers to
/// Clusterable* in stats_out are not newly allocated-- they are the same as the
/// ones in stats_in. Generally they will still be owned at stats_in (user can
/// decide where to allocate ownership).
void SplitStatsByKey(const BuildTreeStatsType &stats_in, EventKeyType key,
std::vector<BuildTreeStatsType> *stats_out);
/// Converts stats from a given context-window (N) and central-position (P) to a
/// different N and P, by possibly reducing context. This function does a job
/// that's quite specific to the "normal" stats format we use. See \ref
/// tree_window for background. This function may delete some keys and change
/// others, depending on the N and P values. It expects that at input, all keys
/// will either be -1 or lie between 0 and oldN-1. At output, keys will be
/// either -1 or between 0 and newN-1.
/// Returns false if we could not convert the stats (e.g. because newN is larger
/// than oldN).
bool ConvertStats(int32 oldN, int32 oldP, int32 newN, int32 newP,
BuildTreeStatsType *stats);
/// FilterStatsByKey filters the stats according the value of a specified key.
/// If include_if_present == true, it only outputs the stats whose key is in
/// "values"; otherwise it only outputs the stats whose key is not in "values".
/// At input, "values" must be sorted and unique, and all stats in "stats_in"
/// must have "key" defined. At output, pointers to Clusterable* in stats_out
/// are not newly allocated-- they are the same as the ones in stats_in.
void FilterStatsByKey(
const BuildTreeStatsType &stats_in, EventKeyType key,
std::vector<EventValueType> &values,
bool include_if_present, // true-> retain only if in "values",
// false-> retain only if not in "values".
BuildTreeStatsType *stats_out);
/// Sums stats, or returns NULL stats_in has no non-NULL stats.
/// Stats are newly allocated, owned by caller.
Clusterable *SumStats(const BuildTreeStatsType &stats_in);
/// Sums the normalizer [typically, data-count] over the stats.
BaseFloat SumNormalizer(const BuildTreeStatsType &stats_in);
/// Sums the objective function over the stats.
BaseFloat SumObjf(const BuildTreeStatsType &stats_in);
/// Sum a vector of stats. Leaves NULL as pointer if no stats available.
/// The pointers in stats_out are owned by caller. At output, there may be
/// NULLs in the vector stats_out.
void SumStatsVec(const std::vector<BuildTreeStatsType> &stats_in,
std::vector<Clusterable *> *stats_out);
/// Cluster the stats given the event map return the total objf given those
/// clusters.
BaseFloat ObjfGivenMap(const BuildTreeStatsType &stats_in, const EventMap &e);
/// FindAllKeys puts in *keys the (sorted, unique) list of all key identities in
/// the stats.
/// If type == kAllKeysInsistIdentical, it will insist that this set of keys is
/// the same for all the
/// stats (else exception is thrown).
/// if type == kAllKeysIntersection, it will return the smallest common set of
/// keys present in
/// the set of stats
/// if type== kAllKeysUnion (currently probably not so useful since maps will
/// return "undefined"
/// if key is not present), it will return the union of all the keys present
/// in the stats.
void FindAllKeys(const BuildTreeStatsType &stats, AllKeysType keys_type,
std::vector<EventKeyType> *keys);
/// @}
/**
\defgroup tree_group_intermediate Intermediate-level functions used in building
the tree
These functions are are used in top-level tree-building code (\ref
tree_group_top); see
\ref tree_internals for documentation.
\ingroup tree_group
@{
*/
/// Returns a tree with just one node. Used @ start of tree-building process.
/// Not really used in current recipes.
inline EventMap *TrivialTree(int32 *num_leaves) {
KALDI_ASSERT(*num_leaves == 0); // in envisaged usage.
return new ConstantEventMap((*num_leaves)++);
}
/// DoTableSplit does a complete split on this key (e.g. might correspond to
/// central phone
/// (key = P-1), or HMM-state position (key == kPdfClass == -1). Stats used to
/// work out possible
/// values of the event. "num_leaves" is used to allocate new leaves. All
/// stats must have
/// this key defined, or this function will crash.
EventMap *DoTableSplit(const EventMap &orig, EventKeyType key,
const BuildTreeStatsType &stats, int32 *num_leaves);
/// DoTableSplitMultiple does a complete split on all the keys, in order from
/// keys[0],
/// keys[1]
/// and so on. The stats are used to work out possible values corresponding to
/// the key.
/// "num_leaves" is used to allocate new leaves. All stats must have
/// the keys defined, or this function will crash.
/// Returns a newly allocated event map.
EventMap *DoTableSplitMultiple(const EventMap &orig,
const std::vector<EventKeyType> &keys,
const BuildTreeStatsType &stats,
int32 *num_leaves);
/// "ClusterEventMapGetMapping" clusters the leaves of the EventMap, with
/// "thresh" a delta-likelihood
/// threshold to control how many leaves we combine (might be the same as the
/// delta-like
/// threshold used in splitting.
// The function returns the #leaves we combined. The same leaf-ids of the
// leaves being clustered
// will be used for the clustered leaves (but other than that there is no
// special rule which
// leaf-ids should be used at output).
// It outputs the mapping for leaves, in "mapping", which may be empty at the
// start
// but may also contain mappings for other parts of the tree, which must contain
// disjoint leaves from this part. This is so that Cluster can
// be called multiple times for sub-parts of the tree (with disjoint sets of
// leaves),
// e.g. if we want to avoid sharing across phones. Afterwards you can use Copy
// function
// of EventMap to apply the mapping, i.e. call e_in.Copy(mapping) to get the new
// map.
// Note that the application of Cluster creates gaps in the leaves. You should
// then
// call RenumberEventMap(e_in.Copy(mapping), num_leaves).
// *If you only want to cluster a subset of the leaves (e.g. just non-silence,
// or just
// a particular phone, do this by providing a set of "stats" that correspond to
// just
// this subset of leaves*. Leaves with no stats will not be clustered.
// See build-tree.cc for an example of usage.
int ClusterEventMapGetMapping(const EventMap &e_in,
const BuildTreeStatsType &stats, BaseFloat thresh,
std::vector<EventMap *> *mapping);
/// This is as ClusterEventMapGetMapping but a more convenient interface
/// that exposes less of the internals. It uses a bottom-up clustering to
/// combine the leaves, until the log-likelihood decrease from combinging two
/// leaves exceeds the threshold.
EventMap *ClusterEventMap(const EventMap &e_in, const BuildTreeStatsType &stats,
BaseFloat thresh, int32 *num_removed);
/// This is as ClusterEventMap, but first splits the stats on the keys specified
/// in "keys" (e.g. typically keys = [ -1, P ]), and only clusters within the
/// classes defined by that splitting.
/// Note-- leaves will be non-consecutive at output, use RenumberEventMap.
EventMap *ClusterEventMapRestrictedByKeys(const EventMap &e_in,
const BuildTreeStatsType &stats,
BaseFloat thresh,
const std::vector<EventKeyType> &keys,
int32 *num_removed);
/// This version of ClusterEventMapRestricted restricts the clustering to only
/// allow things that "e_restrict" maps to the same value to be clustered
/// together.
EventMap *ClusterEventMapRestrictedByMap(const EventMap &e_in,
const BuildTreeStatsType &stats,
BaseFloat thresh,
const EventMap &e_restrict,
int32 *num_removed);
/// RenumberEventMap [intended to be used after calling ClusterEventMap]
/// renumbers
/// an EventMap so its leaves are consecutive.
/// It puts the number of leaves in *num_leaves. If later you need the mapping
/// of
/// the leaves, modify the function and add a new argument.
EventMap *RenumberEventMap(const EventMap &e_in, int32 *num_leaves);
/// This function remaps the event-map leaves using this mapping,
/// indexed by the number at leaf.
EventMap *MapEventMapLeaves(const EventMap &e_in,
const std::vector<int32> &mapping);
/// ShareEventMapLeaves performs a quite specific function that allows us to
/// generate trees where, for a certain list of phones, and for all states in
/// the phone, all the pdf's are shared.
/// Each element of "values" contains a list of phones (may be just one phone),
/// all states of which we want shared together). Typically at input, "key"
/// will
/// equal P, the central-phone position, and "values" will contain just one
/// list containing the silence phone.
/// This function renumbers the event map leaves after doing the sharing, to
/// make the event-map leaves contiguous.
EventMap *ShareEventMapLeaves(const EventMap &e_in, EventKeyType key,
std::vector<std::vector<EventValueType> > &values,
int32 *num_leaves);
/// Does a decision-tree split at the leaves of an EventMap.
/// @param orig [in] The EventMap whose leaves we want to split. [may be either
/// a trivial or a
/// non-trivial one].
/// @param stats [in] The statistics for splitting the tree; if you do not want
/// a particular
/// subset of leaves to be split, make sure the stats corresponding to
/// those leaves
/// are not present in "stats".
/// @param qcfg [in] Configuration class that contains initial questions (e.g.
/// sets of phones)
/// for each key and says whether to refine these questions during tree
/// building.
/// @param thresh [in] A log-likelihood threshold (e.g. 300) that can be used to
/// limit the number of leaves; you can use zero and set max_leaves
/// instead.
/// @param max_leaves [in] Will stop leaves being split after they reach this
/// number.
/// @param num_leaves [in,out] A pointer used to allocate leaves; always
/// corresponds to the
/// current number of leaves (is incremented when this is
/// increased).
/// @param objf_impr_out [out] If non-NULL, will be set to the objective
/// improvement due to splitting
/// (not normalized by the number of frames).
/// @param smallest_split_change_out If non-NULL, will be set to the smallest
/// objective-function
/// improvement that we got from splitting any leaf; useful to provide a
/// threshold
/// for ClusterEventMap.
/// @return The EventMap after splitting is returned; pointer is owned by
/// caller.
EventMap *SplitDecisionTree(const EventMap &orig,
const BuildTreeStatsType &stats, Questions &qcfg,
BaseFloat thresh,
int32 max_leaves, // max_leaves<=0 -> no maximum.
int32 *num_leaves, BaseFloat *objf_impr_out,
BaseFloat *smallest_split_change_out);
/// CreateRandomQuestions will initialize a Questions randomly, in a reasonable
/// way [for testing purposes, or when hand-designed questions are not
/// available].
/// e.g. num_quest = 5 might be a reasonable value if num_iters > 0, or
/// num_quest = 20 otherwise.
void CreateRandomQuestions(const BuildTreeStatsType &stats, int32 num_quest,
Questions *cfg_out);
/// FindBestSplitForKey is a function used in DoDecisionTreeSplit.
/// It finds the best split for this key, given these stats.
/// It will return 0 if the key was not always defined for the stats.
BaseFloat FindBestSplitForKey(const BuildTreeStatsType &stats,
const Questions &qcfg, EventKeyType key,
std::vector<EventValueType> *yes_set);
/// GetStubMap is used in tree-building functions to get the initial
/// to-states map, before the decision-tree-building process. It creates
/// a simple map that splits on groups of phones. For the set of phones in
/// phone_sets[i] it creates either: if share_roots[i] == true, a single
/// leaf node, or if share_roots[i] == false, separate root nodes for
/// each HMM-position (it goes up to the highest position for any
/// phone in the set, although it will warn if you share roots between
/// phones with different numbers of states, which is a weird thing to
/// do but should still work. If any phone is present
/// in "phone_sets" but "phone2num_pdf_classes" does not map it to a length,
/// it is an error. Note that the behaviour of the resulting map is
/// undefined for phones not present in "phone_sets".
/// At entry, this function should be called with (*num_leaves == 0).
/// It will number the leaves starting from (*num_leaves).
EventMap *GetStubMap(
int32 P, const std::vector<std::vector<int32> > &phone_sets,
const std::vector<int32> &phone2num_pdf_classes,
const std::vector<bool> &share_roots, // indexed by index into phone_sets.
int32 *num_leaves);
/// Note: GetStubMap with P = 0 can be used to get a standard monophone system.
/// @}
} // end namespace kaldi
#endif
<|start_filename|>tonic-suite/asr/src/decoder/decodable-sum.h<|end_filename|>
// decoder/decodable-sum.h
// Copyright 2009-2011 Saarland University; Microsoft Corporation;
// <NAME>, <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_DECODER_DECODABLE_SUM_H_
#define KALDI_DECODER_DECODABLE_SUM_H_
#include <vector>
#include <utility>
#include "base/kaldi-common.h"
#include "itf/decodable-itf.h"
namespace kaldi {
// The DecodableSum object is a very simple object that just sums
// scores over a number of Decodable objects. They must all have
// the same dimensions.
class DecodableSum : public DecodableInterface {
public:
// Does not take ownership of pointers! They are just
// pointers because they are non-const.
DecodableSum(DecodableInterface *d1, BaseFloat w1, DecodableInterface *d2,
BaseFloat w2) {
decodables_.push_back(std::make_pair(d1, w1));
decodables_.push_back(std::make_pair(d2, w2));
CheckSizes();
}
// Does not take ownership of pointers!
DecodableSum(const std::vector<std::pair<DecodableInterface *, BaseFloat> > &
decodables)
: decodables_(decodables) {
CheckSizes();
}
void CheckSizes() {
KALDI_ASSERT(decodables_.size() >= 1 && decodables_[0].first != NULL);
for (size_t i = 1; i < decodables_.size(); i++)
KALDI_ASSERT(decodables_[i].first != NULL &&
decodables_[i].first->NumIndices() ==
decodables_[0].first->NumIndices());
}
// Note, frames are numbered from zero. But state_index is numbered
// from one (this routine is called by FSTs).
virtual BaseFloat LogLikelihood(int32 frame, int32 state_index) {
BaseFloat sum = 0.0;
// int32 i=1;
for (std::vector<std::pair<DecodableInterface *, BaseFloat> >::iterator
iter = decodables_.begin();
iter != decodables_.end(); ++iter) {
sum += iter->first->LogLikelihood(frame, state_index) * iter->second;
// BaseFloat tmp = iter->first->LogLikelihood(frame, state_index);
// KALDI_LOG << "ITEM " << i << " contributed with loglike=" << tmp << "
// scaled by=" << iter->second;
// i+=1;
// sum += tmp * iter->second;
}
return sum;
}
virtual int32 NumIndices() const {
return decodables_[0].first->NumIndices();
}
virtual bool IsLastFrame(int32 frame) const {
// We require all the decodables have the same #frames. We don't check this
// though.
return decodables_[0].first->IsLastFrame(frame);
}
private:
std::vector<std::pair<DecodableInterface *, BaseFloat> > decodables_;
KALDI_DISALLOW_COPY_AND_ASSIGN(DecodableSum);
};
class DecodableSumScaled : public DecodableSum {
public:
DecodableSumScaled(DecodableInterface *d1, BaseFloat w1,
DecodableInterface *d2, BaseFloat w2, BaseFloat scale)
: DecodableSum(d1, w1, d2, w2), scale_(scale) {}
DecodableSumScaled(
const std::vector<std::pair<DecodableInterface *, BaseFloat> > &
decodables,
BaseFloat scale)
: DecodableSum(decodables), scale_(scale) {}
virtual BaseFloat LogLikelihood(int32 frame, int32 state_index) {
return scale_ * DecodableSum::LogLikelihood(frame, state_index);
}
private:
BaseFloat scale_;
KALDI_DISALLOW_COPY_AND_ASSIGN(DecodableSumScaled);
};
} // namespace kaldi
#endif // KALDI_DECODER_DECODABLE_SUM_H_
<|start_filename|>tonic-suite/asr/src/latbin/lattice-depth-per-frame.cc<|end_filename|>
// latbin/lattice-depth-per-frame.cc
// Copyright 2013 <NAME>
// 2013 Johns Hopkins University (Author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "fstext/fstext-lib.h"
#include "lat/kaldi-lattice.h"
#include "lat/lattice-functions.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
using fst::VectorFst;
using fst::StdArc;
typedef StdArc::StateId StateId;
const char *usage =
"For each lattice, compute a vector of length (num-frames) saying how\n"
"may arcs cross each frame. See also lattice-depth\n"
"Usage: lattice-depth-per-frame <lattice-rspecifier> "
"<depth-wspecifier>\n"
"E.g.: lattice-depth-per-frame ark:- ark,t:-\n";
ParseOptions po(usage);
po.Read(argc, argv);
if (po.NumArgs() != 2) {
po.PrintUsage();
exit(1);
}
std::string lats_rspecifier = po.GetArg(1);
SequentialCompactLatticeReader clat_reader(lats_rspecifier);
std::string depth_wspecifier = po.GetOptArg(2);
Int32VectorWriter lats_depth_writer(depth_wspecifier);
int64 num_done = 0;
for (; !clat_reader.Done(); clat_reader.Next()) {
CompactLattice clat = clat_reader.Value();
std::string key = clat_reader.Key();
TopSortCompactLatticeIfNeeded(&clat);
std::vector<int32> depth_per_frame;
CompactLatticeDepthPerFrame(clat, &depth_per_frame);
lats_depth_writer.Write(key, depth_per_frame);
num_done++;
}
KALDI_LOG << "Done " << num_done << " lattices.";
if (num_done != 0)
return 0;
else
return 1;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/cudamatrix/cu-block-matrix.h<|end_filename|>
// cudamatrix/cu-block-matrix.h
// Copyright 2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_CUDAMATRIX_CU_BLOCK_MATRIX_H_
#define KALDI_CUDAMATRIX_CU_BLOCK_MATRIX_H_
#include <sstream>
#include <vector>
#include "cudamatrix/cu-common.h"
namespace kaldi {
/**
The class CuBlockMatrix holds a vector of objects of type CuMatrix,
say, M_1, M_2, .. M_N
and it represents the matrix diag(M_1, M_2, ... M_N). Note:
the individual matrices do not have to be square. The reason the
class is needed is mostly so that we can efficiently multiply by this
block-diagonal structure in a parallel way.
If we have a GPU available, CuBlockMatrix will store a copy of the
individual CuMatrix quantities M_1 .. M_N on the GPU, but their
'primary' home remains on the CPU.. what we mean by this is that
while the data remains on the GPU, the "primary" version of the
Matrix object that holds the pointers will remain on the CPU.
We just copy it over to the GPU whenever it is changed.
*/
template <typename Real>
class CuBlockMatrix {
public:
friend class CuMatrixBase<Real>;
CuBlockMatrix();
CuBlockMatrix(const std::vector<CuMatrix<Real> > &data);
~CuBlockMatrix() { Destroy(); }
/// Copy constructor
CuBlockMatrix(const CuBlockMatrix &other);
/// Assignment operator
CuBlockMatrix &operator=(const CuBlockMatrix &other);
void Write(std::ostream &os, bool binary) const;
void Read(std::istream &is, bool binary);
MatrixIndexT NumRows() const { return num_rows_; }
MatrixIndexT NumCols() const { return data_.num_cols_; }
MatrixIndexT NumBlocks() const { return block_data_.size(); }
// Returns max num-columns of any block
MatrixIndexT MaxBlockCols() const;
// Returns max num-rows of any block
MatrixIndexT MaxBlockRows() const;
const CuSubMatrix<Real> Block(MatrixIndexT b) const;
CuSubMatrix<Real> Block(
MatrixIndexT b); // return CuMatrixBase to disallow resizes.
/// Does *this = alpha A B + beta * *this, discarding elements of the product
/// outside
/// the block structure of the *this matrix. The transA and transB parameters
/// can be used to substitute A^T for A and B^T for B, respectively.
void AddMatMat(BaseFloat alpha, const CuMatrix<Real> &A,
MatrixTransposeType transA, const CuMatrix<Real> &B,
MatrixTransposeType transB, BaseFloat beta);
/// Copies elements within the block structure from matrix M, discarding
/// others.
/// Note: this has not been implemented in a very efficient way, it's used
/// only
/// for testing.
void CopyFromMat(const CuMatrix<Real> &M);
/// Normalizes the columns of *this so that each one sums to one.
/// On error (e.g. inf's), will set the column to a constant value that
/// sums to one.
void NormalizeColumns();
void Swap(CuBlockMatrix *other);
protected:
CuMatrix<Real> data_; // This is a single matrix into which
// we pack all the blocks (possibly with spaces left over)
struct BlockMatrixData {
MatrixIndexT num_rows;
MatrixIndexT num_cols;
MatrixIndexT row_offset;
MatrixIndexT col_offset;
};
#if HAVE_CUDA == 1
const CuBlockMatrixData *CuData() const { return cu_data_; }
#endif
private:
/// If using GPU and cu_data_ != NULL, free cu_data_ and set it to NULL
void FreeCudaData();
/// If using GPU, allocate and set cu_data_ on the GPU to reflect "data_".
void SetCudaData();
/// Frees and deinitializes everything.
void Destroy();
std::vector<BlockMatrixData> block_data_;
MatrixIndexT num_rows_; // sum of num_rows of elements of block_data_.
#if HAVE_CUDA == 1
CuBlockMatrixData *
cu_data_; // We store the pointers and some additional info
// on the GPU card in a form more suited to
// use by CUDA kernels.
#endif
}; // class CuBlockMatrix
template <typename Real>
std::ostream &operator<<(std::ostream &out, const CuBlockMatrix<Real> &mat);
} // namespace Kaldi
#endif
<|start_filename|>tonic-suite/asr/src/thread/kaldi-thread-test.cc<|end_filename|>
// thread/kaldi-thread-test.cc
// Copyright 2012 Johns Hopkins University (Author: <NAME>)
// <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "thread/kaldi-thread.h"
#include "thread/kaldi-mutex.h"
namespace kaldi {
class MyThreadClass { // Sums up integers from 0 to max_to_count-1.
public:
MyThreadClass(int32 max_to_count, int32 *i)
: max_to_count_(max_to_count), iptr_(i), private_counter_(0) {}
// Use default copy constructor and assignment operators.
void operator()() {
int32 block_size = (max_to_count_ + (num_threads_ - 1)) / num_threads_;
int32 start = block_size * thread_id_,
end = std::min(max_to_count_, start + block_size);
for (int32 j = start; j < end; j++) private_counter_ += j;
}
~MyThreadClass() { *iptr_ += private_counter_; }
static void *run(void *c_in) {
MyThreadClass *c = static_cast<MyThreadClass *>(c_in);
(*c)(); // call operator () on it.
return NULL;
}
public:
int32 thread_id_; // 0 <= thread_number < num_threads
int32 num_threads_;
private:
MyThreadClass() {} // Disallow empty constructor.
int32 max_to_count_;
int32 *iptr_;
int32 private_counter_;
};
void TestThreads() {
g_num_threads = 8;
// run method with temporary threads on 8 threads
// Note: uncomment following line for the possibility of simple benchmarking
// for(int i=0; i<100000; i++)
{
int32 max_to_count = 10000, tot = 0;
MyThreadClass c(max_to_count, &tot);
RunMultiThreaded(c);
KALDI_ASSERT(tot == (10000 * (10000 - 1)) / 2);
}
g_num_threads = 1;
// let's try the same, but with only one thread
{
int32 max_to_count = 10000, tot = 0;
MyThreadClass c(max_to_count, &tot);
RunMultiThreaded(c);
KALDI_ASSERT(tot == (10000 * (10000 - 1)) / 2);
}
}
void TestMutex() {
for (int32 i = 0; i < 4; i++) {
Mutex mut;
for (int32 i = 0; i < 100; i++) {
if (rand() % 2 == 0) {
mut.Lock();
KALDI_ASSERT(!mut.TryLock());
mut.Unlock();
} else {
KALDI_ASSERT(mut.TryLock());
mut.Unlock();
}
}
}
}
} // end namespace kaldi.
int main() {
using namespace kaldi;
TestThreads();
for (int i = 0; i < 20; i++) TestMutex();
}
<|start_filename|>tonic-suite/asr/src/transform/lda-estimate.cc<|end_filename|>
// transform/lda-estimate.cc
// Copyright 2009-2011 <NAME>
// 2013 Johns Hopkins University
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "transform/lda-estimate.h"
namespace kaldi {
void LdaEstimate::Init(int32 num_classes, int32 dimension) {
zero_acc_.Resize(num_classes);
first_acc_.Resize(num_classes, dimension);
total_second_acc_.Resize(dimension);
}
void LdaEstimate::ZeroAccumulators() {
zero_acc_.SetZero();
first_acc_.SetZero();
total_second_acc_.SetZero();
}
void LdaEstimate::Scale(BaseFloat f) {
double d = static_cast<double>(f);
zero_acc_.Scale(d);
first_acc_.Scale(d);
total_second_acc_.Scale(d);
}
void LdaEstimate::Accumulate(const VectorBase<BaseFloat> &data, int32 class_id,
BaseFloat weight) {
KALDI_ASSERT(class_id >= 0);
KALDI_ASSERT(class_id < NumClasses() && data.Dim() == Dim());
Vector<double> data_d(data);
zero_acc_(class_id) += weight;
first_acc_.Row(class_id).AddVec(weight, data_d);
total_second_acc_.AddVec2(weight, data_d);
}
void LdaEstimate::GetStats(SpMatrix<double> *total_covar,
SpMatrix<double> *between_covar,
Vector<double> *total_mean,
double *tot_count) const {
int32 num_class = NumClasses(), dim = Dim();
double sum = zero_acc_.Sum();
*tot_count = sum;
total_covar->Resize(dim);
total_covar->CopyFromSp(total_second_acc_);
total_mean->Resize(dim);
total_mean->AddRowSumMat(1.0, first_acc_);
total_mean->Scale(1.0 / sum);
total_covar->Scale(1.0 / sum);
total_covar->AddVec2(-1.0, *total_mean);
between_covar->Resize(dim);
Vector<double> class_mean(dim);
for (int32 c = 0; c < num_class; c++) {
if (zero_acc_(c) != 0.0) {
class_mean.CopyRowFromMat(first_acc_, c);
class_mean.Scale(1.0 / zero_acc_(c));
between_covar->AddVec2(zero_acc_(c) / sum, class_mean);
}
}
between_covar->AddVec2(-1.0, *total_mean);
}
void LdaEstimate::Estimate(const LdaEstimateOptions &opts, Matrix<BaseFloat> *m,
Matrix<BaseFloat> *mfull) const {
int32 target_dim = opts.dim;
KALDI_ASSERT(target_dim > 0);
// between-class covar is of most rank C-1
KALDI_ASSERT(target_dim <= Dim() &&
(target_dim < NumClasses() || opts.allow_large_dim));
int32 dim = Dim();
double count;
SpMatrix<double> total_covar, bc_covar;
Vector<double> total_mean;
GetStats(&total_covar, &bc_covar, &total_mean, &count);
// within-class covariance
SpMatrix<double> wc_covar(total_covar);
wc_covar.AddSp(-1.0, bc_covar);
TpMatrix<double> wc_covar_sqrt(dim);
try {
wc_covar_sqrt.Cholesky(wc_covar);
} catch (...) {
BaseFloat smooth = 1.0e-03 * wc_covar.Trace() / wc_covar.NumRows();
KALDI_LOG << "Cholesky failed (possibly not +ve definite), so adding "
<< smooth << " to diagonal and trying again.\n";
for (int32 i = 0; i < wc_covar.NumRows(); i++) wc_covar(i, i) += smooth;
wc_covar_sqrt.Cholesky(wc_covar);
}
Matrix<double> wc_covar_sqrt_mat(wc_covar_sqrt);
// copy wc_covar_sqrt to Matrix, because it facilitates further use
wc_covar_sqrt_mat.Invert();
SpMatrix<double> tmp_sp(dim);
tmp_sp.AddMat2Sp(1.0, wc_covar_sqrt_mat, kNoTrans, bc_covar, 0.0);
Matrix<double> tmp_mat(tmp_sp);
Matrix<double> svd_u(dim, dim), svd_vt(dim, dim);
Vector<double> svd_d(dim);
tmp_mat.Svd(&svd_d, &svd_u, &svd_vt);
SortSvd(&svd_d, &svd_u);
KALDI_LOG << "Data count is " << count;
KALDI_LOG << "LDA singular values are " << svd_d;
KALDI_LOG << "Sum of all singular values is " << svd_d.Sum();
KALDI_LOG << "Sum of selected singular values is "
<< SubVector<double>(svd_d, 0, target_dim).Sum();
Matrix<double> lda_mat(dim, dim);
lda_mat.AddMatMat(1.0, svd_u, kTrans, wc_covar_sqrt_mat, kNoTrans, 0.0);
// finally, copy first target_dim rows to m
m->Resize(target_dim, dim);
m->CopyFromMat(lda_mat.Range(0, target_dim, 0, dim));
if (mfull != NULL) {
mfull->Resize(dim, dim);
mfull->CopyFromMat(lda_mat);
}
if (opts.within_class_factor != 1.0) { // This is not the normal code path;
// it's intended for use in neural net inputs.
for (int32 i = 0; i < svd_d.Dim(); i++) {
BaseFloat old_var = 1.0 + svd_d(i), // the total variance of that dim..
new_var =
opts.within_class_factor + svd_d(i), // the variance we want..
scale = sqrt(new_var / old_var);
if (i < m->NumRows()) m->Row(i).Scale(scale);
if (mfull != NULL) mfull->Row(i).Scale(scale);
}
}
if (opts.remove_offset) {
AddMeanOffset(total_mean, m);
if (mfull != NULL) AddMeanOffset(total_mean, mfull);
}
}
// static
void LdaEstimate::AddMeanOffset(const VectorBase<double> &mean_dbl,
Matrix<BaseFloat> *projection) {
Vector<BaseFloat> mean(mean_dbl);
Vector<BaseFloat> neg_projected_mean(projection->NumRows());
// the negative
neg_projected_mean.AddMatVec(-1.0, *projection, kNoTrans, mean, 0.0);
projection->Resize(projection->NumRows(), projection->NumCols() + 1,
kCopyData);
projection->CopyColFromVec(neg_projected_mean, projection->NumCols() - 1);
}
void LdaEstimate::Read(std::istream &in_stream, bool binary, bool add) {
int32 num_classes, dim;
std::string token;
ExpectToken(in_stream, binary, "<LDAACCS>");
ExpectToken(in_stream, binary, "<VECSIZE>");
ReadBasicType(in_stream, binary, &dim);
ExpectToken(in_stream, binary, "<NUMCLASSES>");
ReadBasicType(in_stream, binary, &num_classes);
if (add) {
if (NumClasses() != 0 || Dim() != 0) {
if (num_classes != NumClasses() || dim != Dim()) {
KALDI_ERR << "LdaEstimate::Read, dimension or classes count mismatch, "
<< (NumClasses()) << ", " << (Dim()) << ", "
<< " vs. " << (num_classes) << ", " << (dim);
}
} else {
Init(num_classes, dim);
}
} else {
Init(num_classes, dim);
}
// these are needed for demangling the variances.
Vector<double> tmp_zero_acc;
Matrix<double> tmp_first_acc;
SpMatrix<double> tmp_sec_acc;
ReadToken(in_stream, binary, &token);
while (token != "</LDAACCS>") {
if (token == "<ZERO_ACCS>") {
tmp_zero_acc.Read(in_stream, binary, false);
if (!add) zero_acc_.SetZero();
zero_acc_.AddVec(1.0, tmp_zero_acc);
// zero_acc_.Read(in_stream, binary, add);
} else if (token == "<FIRST_ACCS>") {
tmp_first_acc.Read(in_stream, binary, false);
if (!add) first_acc_.SetZero();
first_acc_.AddMat(1.0, tmp_first_acc);
// first_acc_.Read(in_stream, binary, add);
} else if (token == "<SECOND_ACCS>") {
tmp_sec_acc.Read(in_stream, binary, false);
for (int32 c = 0; c < static_cast<int32>(NumClasses()); c++) {
if (tmp_zero_acc(c) != 0)
tmp_sec_acc.AddVec2(1.0 / tmp_zero_acc(c), tmp_first_acc.Row(c));
}
if (!add) total_second_acc_.SetZero();
total_second_acc_.AddSp(1.0, tmp_sec_acc);
// total_second_acc_.Read(in_stream, binary, add);
} else {
KALDI_ERR << "Unexpected token '" << token << "' in file ";
}
ReadToken(in_stream, binary, &token);
}
}
void LdaEstimate::Write(std::ostream &out_stream, bool binary) const {
WriteToken(out_stream, binary, "<LDAACCS>");
WriteToken(out_stream, binary, "<VECSIZE>");
WriteBasicType(out_stream, binary, static_cast<int32>(Dim()));
WriteToken(out_stream, binary, "<NUMCLASSES>");
WriteBasicType(out_stream, binary, static_cast<int32>(NumClasses()));
WriteToken(out_stream, binary, "<ZERO_ACCS>");
Vector<BaseFloat> zero_acc_bf(zero_acc_);
zero_acc_bf.Write(out_stream, binary);
WriteToken(out_stream, binary, "<FIRST_ACCS>");
Matrix<BaseFloat> first_acc_bf(first_acc_);
first_acc_bf.Write(out_stream, binary);
WriteToken(out_stream, binary, "<SECOND_ACCS>");
SpMatrix<double> tmp_sec_acc(total_second_acc_);
for (int32 c = 0; c < static_cast<int32>(NumClasses()); c++) {
if (zero_acc_(c) != 0)
tmp_sec_acc.AddVec2(-1.0 / zero_acc_(c), first_acc_.Row(c));
}
SpMatrix<BaseFloat> tmp_sec_acc_bf(tmp_sec_acc);
tmp_sec_acc_bf.Write(out_stream, binary);
WriteToken(out_stream, binary, "</LDAACCS>");
}
} // End of namespace kaldi
<|start_filename|>tonic-suite/asr/src/sgmm2/estimate-am-sgmm2.h<|end_filename|>
// sgmm2/estimate-am-sgmm2.h
// Copyright 2009-2011 Microsoft Corporation; <NAME>;
// Saarland University (Author: <NAME>);
// <NAME>; <NAME>;
// Copyright 2012-2013 Johns Hopkins University (Author: <NAME>)
// <NAME>; <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_SGMM2_ESTIMATE_AM_SGMM2_H_
#define KALDI_SGMM2_ESTIMATE_AM_SGMM2_H_ 1
#include <string>
#include <vector>
#include "sgmm2/am-sgmm2.h"
#include "gmm/model-common.h"
#include "itf/options-itf.h"
#include "thread/kaldi-thread.h"
namespace kaldi {
/** \struct MleAmSgmm2Options
* Configuration variables needed in the SGMM estimation process.
*/
struct MleAmSgmm2Options {
/// Smoothing constant for sub-state weights [count to add to each one].
BaseFloat tau_c;
/// Floor covariance matrices Sigma_i to this times average cov.
BaseFloat cov_floor;
/// ratio to dim below which we use diagonal. default 2, set to inf for diag.
BaseFloat cov_diag_ratio;
/// Max on condition of matrices in update beyond which we do not update.
/// Should probably be related to numerical properties of machine
/// or BaseFloat type.
BaseFloat max_cond;
/// Set check_v to true if you want to use the "checking" version of the
/// update
/// for the v's, in which it checks the "real" objective function value and
/// backtracks if necessary;
bool check_v;
bool renormalize_V; // Renormalize the phonetic space.
bool renormalize_N; // Renormalize the speaker space.
/// Number of iters when re-estimating weight projections "w".
int weight_projections_iters;
/// The "sequential" weight update that checks each i in turn.
/// (if false, uses the "parallel" one).
bool use_sequential_weight_update;
BaseFloat epsilon; ///< very small value used to prevent SVD crashing.
BaseFloat max_impr_u; ///< max improvement per frame allowed in update of u.
BaseFloat tau_map_M; ///< For MAP update of the phonetic subspace M
int map_M_prior_iters; ///< num of iterations to update the prior of M
bool full_row_cov; ///< Estimate row covariance instead of using I
bool full_col_cov; ///< Estimate col covariance instead of using I
MleAmSgmm2Options() {
cov_floor = 0.025;
tau_c = 2.0;
cov_diag_ratio = 2.0; // set this to very large to get diagonal-cov models.
max_cond = 1.0e+05;
epsilon = 1.0e-40;
renormalize_V = true;
renormalize_N =
false; // default to false since will invalidate spk vectors
// on disk.
weight_projections_iters = 3;
max_impr_u = 0.25;
map_M_prior_iters = 5;
tau_map_M = 0.0; // No MAP update by default (~500-1000 depending on prior)
full_row_cov = false;
full_col_cov = false;
}
void Register(OptionsItf *po) {
std::string module = "MleAmSgmm2Options: ";
po->Register("tau-c", &tau_c,
module + "Count for smoothing weight update.");
po->Register("cov-floor", &cov_floor,
module + "Covariance floor (fraction of average covariance).");
po->Register(
"cov-diag-ratio", &cov_diag_ratio,
module + "Minimum occ/dim ratio below which use diagonal covariances.");
po->Register("max-cond", &max_cond, module +
"Maximum condition number used to "
"regularize the solution of "
"certain quadratic auxiliary "
"functions.");
po->Register(
"weight-projections-iters", &weight_projections_iters,
module + "Number for iterations for weight projection estimation.");
po->Register("renormalize-v", &renormalize_V,
module +
"If true, renormalize "
"the phonetic-subspace vectors to have meaningful sizes.");
po->Register("renormalize-n", &renormalize_N,
module +
"If true, renormalize "
"the speaker subspace to have meaningful sizes.");
po->Register("max-impr-u", &max_impr_u,
module +
"Maximum objective function "
"improvement per frame allowed in update of u (to "
"maintain stability.");
po->Register("tau-map-M", &tau_map_M, module +
"Smoothing for MAP estimate "
"of M (0 means ML update).");
po->Register(
"map-M-prior-iters", &map_M_prior_iters,
module + "Number of iterations to estimate prior covariances for M.");
po->Register("full-row-cov", &full_row_cov,
module + "Estimate row covariance instead of using I.");
po->Register("full-col-cov", &full_col_cov,
module + "Estimate column covariance instead of using I.");
}
};
/** \class MleAmSgmm2Accs
* Class for the accumulators associated with the phonetic-subspace model
* parameters
*/
class MleAmSgmm2Accs {
public:
explicit MleAmSgmm2Accs(BaseFloat rand_prune = 1.0e-05)
: total_frames_(0.0),
total_like_(0.0),
feature_dim_(0),
phn_space_dim_(0),
spk_space_dim_(0),
num_gaussians_(0),
num_pdfs_(0),
num_groups_(0),
rand_prune_(rand_prune) {}
MleAmSgmm2Accs(const AmSgmm2 &model, SgmmUpdateFlagsType flags,
bool have_spk_vecs, BaseFloat rand_prune = 1.0e-05)
: total_frames_(0.0), total_like_(0.0), rand_prune_(rand_prune) {
ResizeAccumulators(model, flags, have_spk_vecs);
}
~MleAmSgmm2Accs();
void Read(std::istream &in_stream, bool binary, bool add);
void Write(std::ostream &out_stream, bool binary) const;
/// Checks the various accumulators for correct sizes given a model. With
/// wrong sizes, assertion failure occurs. When the show_properties argument
/// is set to true, dimensions and presence/absence of the various
/// accumulators are printed. For use when accumulators are read from file.
void Check(const AmSgmm2 &model, bool show_properties = true) const;
/// Resizes the accumulators to the correct sizes given the model. The flags
/// argument controls which accumulators to resize.
void ResizeAccumulators(const AmSgmm2 &model, SgmmUpdateFlagsType flags,
bool have_spk_vecs);
/// Returns likelihood.
BaseFloat Accumulate(const AmSgmm2 &model,
const Sgmm2PerFrameDerivedVars &frame_vars,
int32 pdf_index, // == j2.
BaseFloat weight, Sgmm2PerSpkDerivedVars *spk_vars);
/// Returns count accumulated (may differ from posteriors.Sum()
/// due to weight pruning).
BaseFloat AccumulateFromPosteriors(const AmSgmm2 &model,
const Sgmm2PerFrameDerivedVars &frame_vars,
const Matrix<BaseFloat> &posteriors,
int32 pdf_index, // == j2.
Sgmm2PerSpkDerivedVars *spk_vars);
/// Accumulates global stats for the current speaker (if applicable). If
/// flags contains kSgmmSpeakerProjections (N), or
/// kSgmmSpeakerWeightProjections (u), must call this after finishing the
/// speaker's data.
void CommitStatsForSpk(const AmSgmm2 &model,
const Sgmm2PerSpkDerivedVars &spk_vars);
/// Accessors
void GetStateOccupancies(Vector<BaseFloat> *occs) const;
int32 FeatureDim() const { return feature_dim_; }
int32 PhoneSpaceDim() const { return phn_space_dim_; }
int32 NumPdfs() const { return num_pdfs_; } // returns J2
int32 NumGroups() const { return num_groups_; } // returns J1
int32 NumGauss() const { return num_gaussians_; }
private:
/// The stats which are not tied to any state.
/// Stats Y_{i} for phonetic-subspace projections M; Dim is [I][D][S].
std::vector<Matrix<double> > Y_;
/// Stats Z_{i} for speaker-subspace projections N. Dim is [I][D][T].
std::vector<Matrix<double> > Z_;
/// R_{i}, quadratic term for speaker subspace estimation. Dim is [I][T][T]
std::vector<SpMatrix<double> > R_;
/// S_{i}^{-}, scatter of adapted feature vectors x_{i}(t). Dim is [I][D][D].
std::vector<SpMatrix<double> > S_;
/// The SGMM state specific stats.
/// Statistics y_{jm} for state vectors v_{jm}. dimension is [J1][#mix][S].
std::vector<Matrix<double> > y_;
/// Gaussian occupancies gamma_{jmi} for each substate and Gaussian index,
/// pooled over groups. Dim is [J1][#mix][I].
std::vector<Matrix<double> > gamma_;
/// [SSGMM] These a_{jmi} quantities are dimensionally the same
/// as the gamma quantities. They're needed to estimate the v_{jm}
/// and w_i quantities in the symmetric SGMM. Dimension is [J1][#mix][S]
std::vector<Matrix<double> > a_;
/// [SSGMM] each row is one of the t_i quantities in the less-exact
/// version of the SSGMM update for the speaker weight projections.
/// Dimension is [I][T]
Matrix<double> t_;
/// [SSGMM], this is a per-speaker variable storing the a_i^{(s)}
/// quantities that we will use in order to compute the non-speaker-
/// specific quantities [see eqs. 53 and 54 in techreport]. Note:
/// there is a separate variable a_s_ in class MleSgmm2SpeakerAccs,
/// which is the same thing but for purposes of computing
/// the speaker-vector v^{(s)}.
Vector<double> a_s_;
/// the U_i quantities from the less-exact version of the SSGMM update for the
/// speaker weight projections. Dimension is [I][T][T]
std::vector<SpMatrix<double> > U_;
/// Sub-state occupancies gamma_{jm}^{(c)} for each sub-state. In the
/// SCTM version of the SGMM, for compactness we store two separate
/// sets of gamma statistics, one to estimate the v_{jm} quantities
/// and one to estimate the sub-state weights c_{jm}.
std::vector<Vector<double> > gamma_c_;
/// gamma_{i}^{(s)}. Per-speaker counts for each Gaussian. Dimension is [I]
/// Needed for stats R_. This can be viewed as a temporary variable; it
/// does not form part of the stats that we eventually dump to disk.
Vector<double> gamma_s_;
double total_frames_, total_like_;
/// Dimensionality of various subspaces
int32 feature_dim_, phn_space_dim_, spk_space_dim_;
int32 num_gaussians_, num_pdfs_, num_groups_; ///< Other model specifications
BaseFloat rand_prune_;
KALDI_DISALLOW_COPY_AND_ASSIGN(MleAmSgmm2Accs);
friend class MleAmSgmm2Updater;
friend class EbwAmSgmm2Updater;
};
/** \class MleAmSgmmUpdater
* Contains the functions needed to update the SGMM parameters.
*/
class MleAmSgmm2Updater {
public:
explicit MleAmSgmm2Updater(const MleAmSgmm2Options &options)
: options_(options) {}
void Reconfigure(const MleAmSgmm2Options &options) { options_ = options; }
void Update(const MleAmSgmm2Accs &accs, AmSgmm2 *model,
SgmmUpdateFlagsType flags);
private:
friend class UpdateWClass;
friend class UpdatePhoneVectorsClass;
friend class EbwEstimateAmSgmm2;
/// Compute the Q_i quantities (Eq. 64).
static void ComputeQ(const MleAmSgmm2Accs &accs, const AmSgmm2 &model,
std::vector<SpMatrix<double> > *Q);
/// Compute the S_means quantities, minus sum: (Y_i M_i^T + M_i Y_I^T).
static void ComputeSMeans(const MleAmSgmm2Accs &accs, const AmSgmm2 &model,
std::vector<SpMatrix<double> > *S_means);
friend class EbwAmSgmm2Updater;
MleAmSgmm2Options options_;
// Called from UpdatePhoneVectors; updates a subset of states
// (relates to multi-threading).
void UpdatePhoneVectorsInternal(const MleAmSgmm2Accs &accs,
const std::vector<SpMatrix<double> > &H,
const std::vector<Matrix<double> > &log_a,
AmSgmm2 *model, double *auxf_impr,
int32 num_threads, int32 thread_id) const;
double UpdatePhoneVectors(const MleAmSgmm2Accs &accs,
const std::vector<SpMatrix<double> > &H,
const std::vector<Matrix<double> > &log_a,
AmSgmm2 *model) const;
double UpdateM(const MleAmSgmm2Accs &accs,
const std::vector<SpMatrix<double> > &Q,
const Vector<double> &gamma_i, AmSgmm2 *model);
void RenormalizeV(const MleAmSgmm2Accs &accs, AmSgmm2 *model,
const Vector<double> &gamma_i,
const std::vector<SpMatrix<double> > &H);
double UpdateN(const MleAmSgmm2Accs &accs, const Vector<double> &gamma_i,
AmSgmm2 *model);
void RenormalizeN(const MleAmSgmm2Accs &accs, const Vector<double> &gamma_i,
AmSgmm2 *model);
double UpdateVars(const MleAmSgmm2Accs &accs,
const std::vector<SpMatrix<double> > &S_means,
const Vector<double> &gamma_i, AmSgmm2 *model);
// Update for the phonetic-subspace weight projections w_i
double UpdateW(const MleAmSgmm2Accs &accs,
const std::vector<Matrix<double> > &log_a,
const Vector<double> &gamma_i, AmSgmm2 *model);
// Update for the speaker-subspace weight projections u_i [SSGMM]
double UpdateU(const MleAmSgmm2Accs &accs, const Vector<double> &gamma_i,
AmSgmm2 *model);
/// Called, multithreaded, inside UpdateW
static void UpdateWGetStats(const MleAmSgmm2Accs &accs, const AmSgmm2 &model,
const Matrix<double> &w,
const std::vector<Matrix<double> > &log_a,
Matrix<double> *F_i, Matrix<double> *g_i,
double *tot_like, int32 num_threads,
int32 thread_id);
double UpdateSubstateWeights(const MleAmSgmm2Accs &accs, AmSgmm2 *model);
static void ComputeLogA(const MleAmSgmm2Accs &accs,
std::vector<Matrix<double> > *log_a); // [SSGMM]
void ComputeMPrior(AmSgmm2 *model); // TODO(arnab): Maybe make this static?
double MapUpdateM(const MleAmSgmm2Accs &accs,
const std::vector<SpMatrix<double> > &Q,
const Vector<double> &gamma_i, AmSgmm2 *model);
KALDI_DISALLOW_COPY_AND_ASSIGN(MleAmSgmm2Updater);
MleAmSgmm2Updater() {} // Prevent unconfigured updater.
};
/** \class MleSgmm2SpeakerAccs
* Class for the accumulators required to update the speaker
* vectors v_s.
* Note: if you have multiple speakers you will want to initialize
* this just once and call Clear() after you're done with each speaker,
* rather than creating a new object for each speaker, since the
* initialization function does nontrivial work.
*/
class MleSgmm2SpeakerAccs {
public:
/// Initialize the object. Error if speaker subspace not set up.
MleSgmm2SpeakerAccs(const AmSgmm2 &model, BaseFloat rand_prune_ = 1.0e-05);
/// Clear the statistics.
void Clear();
/// Accumulate statistics. Returns per-frame log-likelihood.
BaseFloat Accumulate(const AmSgmm2 &model,
const Sgmm2PerFrameDerivedVars &frame_vars,
int32 pdf_index, BaseFloat weight,
Sgmm2PerSpkDerivedVars *spk_vars);
/// Accumulate statistics, given posteriors. Returns total
/// count accumulated, which may differ from posteriors.Sum()
/// due to randomized pruning.
BaseFloat AccumulateFromPosteriors(const AmSgmm2 &model,
const Sgmm2PerFrameDerivedVars &frame_vars,
const Matrix<BaseFloat> &posteriors,
int32 pdf_index,
Sgmm2PerSpkDerivedVars *spk_vars);
/// Update speaker vector. If v_s was empty, will assume it started as zero
/// and will resize it to the speaker-subspace size.
void Update(const AmSgmm2 &model,
BaseFloat min_count, // e.g. 100
Vector<BaseFloat> *v_s, BaseFloat *objf_impr_out,
BaseFloat *count_out);
private:
// Update without speaker-dependent weights (vectors u_i),
// i.e. not symmetric SGMM (SSGMM)
void UpdateNoU(Vector<BaseFloat> *v_s, BaseFloat *objf_impr_out,
BaseFloat *count_out);
// Update for SSGMM
void UpdateWithU(const AmSgmm2 &model, Vector<BaseFloat> *v_s,
BaseFloat *objf_impr_out, BaseFloat *count_out);
/// Statistics for speaker adaptation (vectors), stored per-speaker.
/// Per-speaker stats for vectors, y^{(s)}. Dimension [T].
Vector<double> y_s_;
/// gamma_{i}^{(s)}. Per-speaker counts for each Gaussian. Dimension is [I]
Vector<double> gamma_s_;
/// a_i^{(s)}. For SSGMM.
Vector<double> a_s_;
/// The following variable does not change per speaker, it just
/// relates to the speaker subspace.
/// Eq. (82): H_{i}^{spk} = N_{i}^T \Sigma_{i}^{-1} N_{i}
std::vector<SpMatrix<double> > H_spk_;
/// N_i^T \Sigma_{i}^{-1}. Needed for y^{(s)}
std::vector<Matrix<double> > NtransSigmaInv_;
/// small constant to randomly prune tiny posteriors
BaseFloat rand_prune_;
};
// This class, used in multi-core implementation of the updates of the "w_i"
// quantities, was previously in estimate-am-sgmm.cc, but is being moved to the
// header so it can be used in estimate-am-sgmm-ebw.cc. It is responsible for
// computing, in parallel, the F_i and g_i quantities used in the updates of
// w_i.
class UpdateWClass : public MultiThreadable {
public:
UpdateWClass(const MleAmSgmm2Accs &accs, const AmSgmm2 &model,
const Matrix<double> &w,
const std::vector<Matrix<double> > &log_a, Matrix<double> *F_i,
Matrix<double> *g_i, double *tot_like)
: accs_(accs),
model_(model),
w_(w),
log_a_(log_a),
F_i_ptr_(F_i),
g_i_ptr_(g_i),
tot_like_ptr_(tot_like) {
tot_like_ = 0.0;
F_i_.Resize(F_i->NumRows(), F_i->NumCols());
g_i_.Resize(g_i->NumRows(), g_i->NumCols());
}
~UpdateWClass() {
F_i_ptr_->AddMat(1.0, F_i_, kNoTrans);
g_i_ptr_->AddMat(1.0, g_i_, kNoTrans);
*tot_like_ptr_ += tot_like_;
}
inline void operator()() {
// Note: give them local copy of the sums we're computing,
// which will be propagated to the total sums in the destructor.
MleAmSgmm2Updater::UpdateWGetStats(accs_, model_, w_, log_a_, &F_i_, &g_i_,
&tot_like_, num_threads_, thread_id_);
}
private:
const MleAmSgmm2Accs &accs_;
const AmSgmm2 &model_;
const Matrix<double> &w_;
const std::vector<Matrix<double> > &log_a_;
Matrix<double> *F_i_ptr_;
Matrix<double> *g_i_ptr_;
Matrix<double> F_i_;
Matrix<double> g_i_;
double *tot_like_ptr_;
double tot_like_;
};
} // namespace kaldi
#endif // KALDI_SGMM_ESTIMATE_AM_SGMM_H_
<|start_filename|>tonic-suite/asr/src/nnet2/widen-nnet.cc<|end_filename|>
// nnet2/widen-nnet.cc
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet2/widen-nnet.h"
#include "gmm/model-common.h" // for GetSplitTargets()
#include <numeric> // for std::accumulate
namespace kaldi {
namespace nnet2 {
void AffineComponent::Widen(
int32 new_dim, BaseFloat param_stddev, BaseFloat bias_stddev,
std::vector<NonlinearComponent *> c2, // will usually
// have just
// one element.
AffineComponent *c3) {
int32 old_dim = this->OutputDim(), extra_dim = new_dim - old_dim;
KALDI_ASSERT(!c2.empty());
if (new_dim <= old_dim) {
KALDI_WARN << "Not widening component because new dim " << new_dim
<< " <= old dim " << old_dim;
return;
}
this->bias_params_.Resize(new_dim, kCopyData);
this->bias_params_.Range(old_dim, extra_dim).SetRandn();
this->bias_params_.Range(old_dim, extra_dim).Scale(bias_stddev);
this->linear_params_.Resize(new_dim, InputDim(), kCopyData);
this->linear_params_.Range(old_dim, extra_dim, 0, InputDim()).SetRandn();
this->linear_params_.Range(old_dim, extra_dim, 0, InputDim())
.Scale(param_stddev);
for (size_t i = 0; i < c2.size(); i++) // Change dimension of nonlinear
c2[i]->SetDim(new_dim); // components
// Change dimension of next affine component [extend with zeros,
// so the existing outputs do not change in value]
c3->linear_params_.Resize(c3->OutputDim(), new_dim, kCopyData);
}
void WidenNnet(const NnetWidenConfig &widen_config, Nnet *nnet) {
int32 C = nnet->NumComponents();
int32 num_widened = 0;
for (int32 c = 0; c < C - 3; c++) {
AffineComponent *c1 =
dynamic_cast<AffineComponent *>(&(nnet->GetComponent(c)));
if (c1 == NULL) continue;
std::vector<NonlinearComponent *>
c2; // normally just one element, but allow two right now.
c2.push_back(
dynamic_cast<NonlinearComponent *>(&(nnet->GetComponent(c + 1))));
if (c2.back() == NULL) continue;
c2.push_back(
dynamic_cast<NonlinearComponent *>(&(nnet->GetComponent(c + 2))));
AffineComponent *c3;
if (c2.back() == NULL) {
c2.pop_back();
c3 = dynamic_cast<AffineComponent *>(&(nnet->GetComponent(c + 2)));
} else {
if (c + 3 >= C) continue;
c3 = dynamic_cast<AffineComponent *>(&(nnet->GetComponent(c + 3)));
}
if (c3 == NULL) continue;
BaseFloat param_stddev =
widen_config.param_stddev_factor / sqrt(1.0 * c1->InputDim());
KALDI_LOG << "Widening component " << c << " from " << c1->OutputDim()
<< " to " << widen_config.hidden_layer_dim;
c1->Widen(widen_config.hidden_layer_dim, param_stddev,
widen_config.bias_stddev, c2, c3);
num_widened++;
}
nnet->Check();
KALDI_LOG << "Widened " << num_widened << " components.";
}
} // namespace nnet2
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/nnet2/nnet-nnet.cc<|end_filename|>
// nnet2/nnet-nnet.cc
// Copyright 2011-2012 <NAME>
// 2012-2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "nnet2/nnet-nnet.h"
#include "util/stl-utils.h"
namespace kaldi {
namespace nnet2 {
int32 Nnet::OutputDim() const {
KALDI_ASSERT(!components_.empty());
return components_.back()->OutputDim();
}
int32 Nnet::InputDim() const {
KALDI_ASSERT(!components_.empty());
return components_.front()->InputDim();
}
int32 Nnet::LeftContext() const {
KALDI_ASSERT(!components_.empty());
int32 ans = 0;
for (size_t i = 0; i < components_.size(); i++)
ans += components_[i]->LeftContext();
return ans;
}
int32 Nnet::RightContext() const {
KALDI_ASSERT(!components_.empty());
int32 ans = 0;
for (size_t i = 0; i < components_.size(); i++)
ans += components_[i]->RightContext();
return ans;
}
const Component &Nnet::GetComponent(int32 component) const {
KALDI_ASSERT(static_cast<size_t>(component) < components_.size());
return *(components_[component]);
}
Component &Nnet::GetComponent(int32 component) {
KALDI_ASSERT(static_cast<size_t>(component) < components_.size());
return *(components_[component]);
}
void Nnet::SetZero(bool treat_as_gradient) {
for (size_t i = 0; i < components_.size(); i++) {
UpdatableComponent *uc = dynamic_cast<UpdatableComponent *>(components_[i]);
if (uc != NULL) uc->SetZero(treat_as_gradient);
NonlinearComponent *nc = dynamic_cast<NonlinearComponent *>(components_[i]);
if (nc != NULL) nc->Scale(0.0);
}
}
void Nnet::Write(std::ostream &os, bool binary) const {
Check();
WriteToken(os, binary, "<Nnet>");
int32 num_components = components_.size();
WriteToken(os, binary, "<NumComponents>");
WriteBasicType(os, binary, num_components);
WriteToken(os, binary, "<Components>");
for (int32 c = 0; c < num_components; c++) {
components_[c]->Write(os, binary);
if (!binary) os << std::endl;
}
WriteToken(os, binary, "</Components>");
WriteToken(os, binary, "</Nnet>");
}
void Nnet::Read(std::istream &is, bool binary) {
Destroy();
ExpectToken(is, binary, "<Nnet>");
int32 num_components;
ExpectToken(is, binary, "<NumComponents>");
ReadBasicType(is, binary, &num_components);
ExpectToken(is, binary, "<Components>");
components_.resize(num_components);
for (int32 c = 0; c < num_components; c++)
components_[c] = Component::ReadNew(is, binary);
ExpectToken(is, binary, "</Components>");
ExpectToken(is, binary, "</Nnet>");
SetIndexes();
Check();
}
void Nnet::ZeroStats() {
for (size_t i = 0; i < components_.size(); i++) {
NonlinearComponent *nonlinear_component =
dynamic_cast<NonlinearComponent *>(components_[i]);
if (nonlinear_component != NULL)
nonlinear_component->Scale(0.0); // Zero the stats this way.
}
}
void Nnet::Destroy() {
while (!components_.empty()) {
delete components_.back();
components_.pop_back();
}
}
void Nnet::ComponentDotProducts(const Nnet &other,
VectorBase<BaseFloat> *dot_prod) const {
KALDI_ASSERT(dot_prod->Dim() == NumUpdatableComponents());
int32 index = 0;
for (size_t i = 0; i < components_.size(); i++) {
UpdatableComponent *uc1 =
dynamic_cast<UpdatableComponent *>(components_[i]);
const UpdatableComponent *uc2 =
dynamic_cast<const UpdatableComponent *>(&(other.GetComponent(i)));
KALDI_ASSERT((uc1 != NULL) == (uc2 != NULL));
if (uc1 != NULL) {
(*dot_prod)(index) = uc1->DotProduct(*uc2);
index++;
}
}
KALDI_ASSERT(index == NumUpdatableComponents());
}
Nnet::Nnet(const Nnet &other) : components_(other.components_.size()) {
for (size_t i = 0; i < other.components_.size(); i++)
components_[i] = other.components_[i]->Copy();
SetIndexes();
Check();
}
Nnet::Nnet(const Nnet &other1, const Nnet &other2) {
int32 dim1 = other1.OutputDim(), dim2 = other2.InputDim();
if (dim1 != dim2)
KALDI_ERR << "Concatenating neural nets: dimension mismatch " << dim1
<< " vs. " << dim2;
for (size_t i = 0; i < other1.components_.size(); i++)
components_.push_back(other1.components_[i]->Copy());
for (size_t i = 0; i < other2.components_.size(); i++)
components_.push_back(other2.components_[i]->Copy());
SetIndexes();
Check();
}
Nnet &Nnet::operator=(const Nnet &other) {
Destroy();
components_.resize(other.components_.size());
for (size_t i = 0; i < other.components_.size(); i++)
components_[i] = other.components_[i]->Copy();
SetIndexes();
Check();
return *this;
}
std::string Nnet::Info() const {
std::ostringstream ostr;
ostr << "num-components " << NumComponents() << std::endl;
ostr << "num-updatable-components " << NumUpdatableComponents() << std::endl;
ostr << "left-context " << LeftContext() << std::endl;
ostr << "right-context " << RightContext() << std::endl;
ostr << "input-dim " << InputDim() << std::endl;
ostr << "output-dim " << OutputDim() << std::endl;
ostr << "parameter-dim " << GetParameterDim() << std::endl;
for (int32 i = 0; i < NumComponents(); i++)
ostr << "component " << i << " : " << components_[i]->Info() << std::endl;
return ostr.str();
}
void Nnet::Check() const {
for (size_t i = 0; i + 1 < components_.size(); i++) {
KALDI_ASSERT(components_[i] != NULL);
int32 output_dim = components_[i]->OutputDim(),
next_input_dim = components_[i + 1]->InputDim();
KALDI_ASSERT(output_dim == next_input_dim);
KALDI_ASSERT(components_[i]->Index() == static_cast<int32>(i));
}
}
void Nnet::Init(std::istream &is) {
Destroy();
std::string line;
/* example config file as follows. The things in brackets specify the context
splicing for each layer, and after that is the info about the actual layer.
Imagine the input dim is 13, and the speaker dim is 40, so (13 x 9) + 40 =
527.
The config file might be as follows; the lines beginning with # are
comments.
# layer-type layer-options
AffineLayer 0.01 0.001 527 1000 0.04356
*/
components_.clear();
while (getline(is, line)) {
std::istringstream line_is(line);
line_is >> std::ws; // Eat up whitespace.
if (line_is.peek() == '#' || line_is.eof()) continue; // Comment or empty.
Component *c = Component::NewFromString(line);
KALDI_ASSERT(c != NULL);
components_.push_back(c);
}
SetIndexes();
Check();
}
void Nnet::Init(std::vector<Component *> *components) {
Destroy();
components_.swap(*components);
SetIndexes();
Check();
}
void Nnet::ScaleLearningRates(BaseFloat factor) {
std::ostringstream ostr;
for (int32 c = 0; c < NumComponents(); c++) {
UpdatableComponent *uc = dynamic_cast<UpdatableComponent *>(components_[c]);
if (uc != NULL) { // Updatable component...
uc->SetLearningRate(uc->LearningRate() * factor);
ostr << uc->LearningRate() << " ";
}
}
KALDI_LOG << "Scaled learning rates by " << factor
<< ", new learning rates are " << ostr.str();
}
void Nnet::SetLearningRates(BaseFloat learning_rate) {
for (int32 c = 0; c < NumComponents(); c++) {
UpdatableComponent *uc = dynamic_cast<UpdatableComponent *>(components_[c]);
if (uc != NULL) { // Updatable component...
uc->SetLearningRate(learning_rate);
}
}
KALDI_LOG << "Set learning rates to " << learning_rate;
}
void Nnet::AdjustLearningRates(
const VectorBase<BaseFloat> &old_model_old_gradient,
const VectorBase<BaseFloat> &new_model_old_gradient,
const VectorBase<BaseFloat> &old_model_new_gradient,
const VectorBase<BaseFloat> &new_model_new_gradient,
BaseFloat measure_at, // where to measure gradient, on line between old and
// new model;
// 0.5 < measure_at <= 1.0.
BaseFloat ratio, // e.g. 1.1; ratio by which we change learning rate.
BaseFloat max_learning_rate) {
std::vector<BaseFloat> new_lrates;
KALDI_ASSERT(old_model_old_gradient.Dim() == NumUpdatableComponents() &&
new_model_old_gradient.Dim() == NumUpdatableComponents() &&
old_model_new_gradient.Dim() == NumUpdatableComponents() &&
new_model_new_gradient.Dim() == NumUpdatableComponents());
KALDI_ASSERT(ratio >= 1.0);
KALDI_ASSERT(measure_at > 0.5 && measure_at <= 1.0);
std::string changes_str;
std::string dotprod_str;
BaseFloat inv_ratio = 1.0 / ratio;
int32 index = 0;
for (int32 c = 0; c < NumComponents(); c++) {
UpdatableComponent *uc = dynamic_cast<UpdatableComponent *>(components_[c]);
if (uc == NULL) { // Non-updatable component.
KALDI_ASSERT(old_model_old_gradient(c) == 0.0);
continue;
} else {
BaseFloat grad_dotprod_at_end = new_model_new_gradient(index) -
old_model_new_gradient(index),
grad_dotprod_at_start = new_model_old_gradient(index) -
old_model_old_gradient(index),
grad_dotprod_interp =
measure_at * grad_dotprod_at_end +
(1.0 - measure_at) * grad_dotprod_at_start;
// grad_dotprod_interp will be positive if we want more of the gradient
// term
// -> faster learning rate for this component
BaseFloat lrate = uc->LearningRate();
lrate *= (grad_dotprod_interp > 0 ? ratio : inv_ratio);
changes_str =
changes_str + (grad_dotprod_interp > 0 ? " increase" : " decrease");
dotprod_str =
dotprod_str +
(new_model_new_gradient(index) > 0 ? " positive" : " negative");
if (lrate > max_learning_rate) lrate = max_learning_rate;
new_lrates.push_back(lrate);
uc->SetLearningRate(lrate);
index++;
}
}
KALDI_ASSERT(index == NumUpdatableComponents());
KALDI_VLOG(1) << "Changes to learning rates: " << changes_str;
KALDI_VLOG(1) << "Dot product of model with validation gradient is "
<< dotprod_str;
std::ostringstream lrate_str;
for (size_t i = 0; i < new_lrates.size(); i++)
lrate_str << new_lrates[i] << ' ';
KALDI_VLOG(1) << "Learning rates are " << lrate_str.str();
}
int32 Nnet::NumUpdatableComponents() const {
int32 ans = 0;
for (int32 i = 0; i < NumComponents(); i++)
if (dynamic_cast<const UpdatableComponent *>(&(GetComponent(i))) != NULL)
ans++;
return ans;
}
void Nnet::ScaleComponents(const VectorBase<BaseFloat> &scale_params) {
KALDI_ASSERT(scale_params.Dim() == this->NumUpdatableComponents());
int32 i = 0;
for (int32 j = 0; j < NumComponents(); j++) {
UpdatableComponent *uc =
dynamic_cast<UpdatableComponent *>(&(GetComponent(j)));
if (uc != NULL) {
uc->Scale(scale_params(i));
i++;
}
}
KALDI_ASSERT(i == scale_params.Dim());
}
// Scales all UpdatableComponents and all NonlinearComponents.
void Nnet::Scale(BaseFloat scale) {
for (int32 i = 0; i < NumComponents(); i++) {
UpdatableComponent *uc =
dynamic_cast<UpdatableComponent *>(&(GetComponent(i)));
if (uc != NULL) uc->Scale(scale);
NonlinearComponent *nc =
dynamic_cast<NonlinearComponent *>(&(GetComponent(i)));
if (nc != NULL) nc->Scale(scale);
}
}
void Nnet::CopyStatsFrom(const Nnet &other) {
KALDI_ASSERT(NumComponents() == other.NumComponents());
for (int32 i = 0; i < NumComponents(); i++) {
NonlinearComponent *nc =
dynamic_cast<NonlinearComponent *>(&(GetComponent(i)));
const NonlinearComponent *nc_other =
dynamic_cast<const NonlinearComponent *>(&(other.GetComponent(i)));
if (nc != NULL) {
nc->Scale(0.0);
nc->Add(1.0, *nc_other);
}
}
}
void Nnet::SetLearningRates(const VectorBase<BaseFloat> &learning_rates) {
KALDI_ASSERT(learning_rates.Dim() == this->NumUpdatableComponents());
KALDI_ASSERT(learning_rates.Min() >= 0.0); // we allow zero learning rate.
int32 i = 0;
for (int32 j = 0; j < NumComponents(); j++) {
UpdatableComponent *uc =
dynamic_cast<UpdatableComponent *>(&(GetComponent(j)));
if (uc != NULL) {
uc->SetLearningRate(learning_rates(i));
i++;
}
}
KALDI_ASSERT(i == learning_rates.Dim());
}
void Nnet::GetLearningRates(VectorBase<BaseFloat> *learning_rates) const {
KALDI_ASSERT(learning_rates->Dim() == this->NumUpdatableComponents());
int32 i = 0;
for (int32 j = 0; j < NumComponents(); j++) {
const UpdatableComponent *uc =
dynamic_cast<const UpdatableComponent *>(&(GetComponent(j)));
if (uc != NULL) {
(*learning_rates)(i) = uc->LearningRate();
i++;
}
}
KALDI_ASSERT(i == learning_rates->Dim());
}
void Nnet::Resize(int32 new_size) {
KALDI_ASSERT(new_size <= static_cast<int32>(components_.size()));
for (size_t i = new_size; i < components_.size(); i++) delete components_[i];
components_.resize(new_size);
}
void Nnet::RemoveDropout() {
std::vector<Component *> components;
int32 removed = 0;
for (size_t i = 0; i < components_.size(); i++) {
if (dynamic_cast<DropoutComponent *>(components_[i]) != NULL ||
dynamic_cast<AdditiveNoiseComponent *>(components_[i]) != NULL) {
delete components_[i];
removed++;
} else {
components.push_back(components_[i]);
}
}
components_ = components;
if (removed > 0) KALDI_LOG << "Removed " << removed << " dropout components.";
SetIndexes();
Check();
}
void Nnet::SetDropoutScale(BaseFloat scale) {
size_t n_set = 0;
for (size_t i = 0; i < components_.size(); i++) {
DropoutComponent *dc = dynamic_cast<DropoutComponent *>(components_[i]);
if (dc != NULL) {
dc->SetDropoutScale(scale);
n_set++;
}
}
KALDI_LOG << "Set dropout scale to " << scale << " for " << n_set
<< " components.";
}
void Nnet::RemovePreconditioning() {
for (size_t i = 0; i < components_.size(); i++) {
if (dynamic_cast<AffineComponentPreconditioned *>(components_[i]) != NULL) {
AffineComponent *ac = new AffineComponent(
*(dynamic_cast<AffineComponent *>(components_[i])));
delete components_[i];
components_[i] = ac;
} else if (dynamic_cast<AffineComponentPreconditionedOnline *>(
components_[i]) != NULL) {
AffineComponent *ac = new AffineComponent(
*(dynamic_cast<AffineComponent *>(components_[i])));
delete components_[i];
components_[i] = ac;
}
}
SetIndexes();
Check();
}
void Nnet::SwitchToOnlinePreconditioning(int32 rank_in, int32 rank_out,
int32 update_period,
BaseFloat num_samples_history,
BaseFloat alpha) {
int32 switched = 0;
for (size_t i = 0; i < components_.size(); i++) {
if (dynamic_cast<AffineComponent *>(components_[i]) != NULL) {
AffineComponentPreconditionedOnline *ac =
new AffineComponentPreconditionedOnline(
*(dynamic_cast<AffineComponent *>(components_[i])), rank_in,
rank_out, update_period, num_samples_history, alpha);
delete components_[i];
components_[i] = ac;
switched++;
}
}
KALDI_LOG << "Switched " << switched << " components to use online "
<< "preconditioning, with (input, output) rank = " << rank_in
<< ", " << rank_out
<< " and num_samples_history = " << num_samples_history;
SetIndexes();
Check();
}
void Nnet::AddNnet(const VectorBase<BaseFloat> &scale_params,
const Nnet &other) {
KALDI_ASSERT(scale_params.Dim() == this->NumUpdatableComponents());
int32 i = 0;
for (int32 j = 0; j < NumComponents(); j++) {
UpdatableComponent *uc =
dynamic_cast<UpdatableComponent *>(&(GetComponent(j)));
const UpdatableComponent *uc_other =
dynamic_cast<const UpdatableComponent *>(&(other.GetComponent(j)));
if (uc != NULL) {
KALDI_ASSERT(uc_other != NULL);
BaseFloat alpha = scale_params(i);
uc->Add(alpha, *uc_other);
i++;
}
}
KALDI_ASSERT(i == scale_params.Dim());
}
void Nnet::AddNnet(BaseFloat alpha, const Nnet &other) {
for (int32 i = 0; i < NumComponents(); i++) {
UpdatableComponent *uc =
dynamic_cast<UpdatableComponent *>(&(GetComponent(i)));
const UpdatableComponent *uc_other =
dynamic_cast<const UpdatableComponent *>(&(other.GetComponent(i)));
if (uc != NULL) {
KALDI_ASSERT(uc_other != NULL);
uc->Add(alpha, *uc_other);
}
NonlinearComponent *nc =
dynamic_cast<NonlinearComponent *>(&(GetComponent(i)));
const NonlinearComponent *nc_other =
dynamic_cast<const NonlinearComponent *>(&(other.GetComponent(i)));
if (nc != NULL) {
KALDI_ASSERT(nc_other != NULL);
nc->Add(alpha, *nc_other);
}
}
}
void Nnet::AddNnet(BaseFloat alpha, Nnet *other, BaseFloat beta) {
for (int32 i = 0; i < NumComponents(); i++) {
UpdatableComponent *uc =
dynamic_cast<UpdatableComponent *>(&(GetComponent(i)));
UpdatableComponent *uc_other =
dynamic_cast<UpdatableComponent *>(&(other->GetComponent(i)));
if (uc != NULL) {
KALDI_ASSERT(uc_other != NULL);
uc->Add(alpha, *uc_other);
uc_other->Scale(beta);
}
NonlinearComponent *nc =
dynamic_cast<NonlinearComponent *>(&(GetComponent(i)));
NonlinearComponent *nc_other =
dynamic_cast<NonlinearComponent *>(&(other->GetComponent(i)));
if (nc != NULL) {
KALDI_ASSERT(nc_other != NULL);
nc->Add(alpha, *nc_other);
nc_other->Scale(beta);
}
}
}
void Nnet::Append(Component *new_component) {
components_.push_back(new_component);
SetIndexes();
Check();
}
void Nnet::SetComponent(int32 c, Component *component) {
KALDI_ASSERT(static_cast<size_t>(c) < components_.size());
delete components_[c];
components_[c] = component;
SetIndexes();
Check(); // Check that all the dimensions still match up.
}
int32 Nnet::GetParameterDim() const {
int32 ans = 0;
for (int32 c = 0; c < NumComponents(); c++) {
const UpdatableComponent *uc =
dynamic_cast<const UpdatableComponent *>(&(GetComponent(c)));
if (uc != NULL) ans += uc->GetParameterDim();
}
return ans;
}
void Nnet::Vectorize(VectorBase<BaseFloat> *params) const {
int32 offset = 0;
for (int32 c = 0; c < NumComponents(); c++) {
const UpdatableComponent *uc =
dynamic_cast<const UpdatableComponent *>(&(GetComponent(c)));
if (uc != NULL) {
int32 size = uc->GetParameterDim();
SubVector<BaseFloat> temp(*params, offset, size);
uc->Vectorize(&temp);
offset += size;
}
}
KALDI_ASSERT(offset == GetParameterDim());
}
void Nnet::ResetGenerators() { // resets random-number generators for all
// random
// components.
for (int32 c = 0; c < NumComponents(); c++) {
RandomComponent *rc = dynamic_cast<RandomComponent *>(&(GetComponent(c)));
if (rc != NULL) rc->ResetGenerator();
}
}
void Nnet::UnVectorize(const VectorBase<BaseFloat> ¶ms) {
int32 offset = 0;
for (int32 c = 0; c < NumComponents(); c++) {
UpdatableComponent *uc =
dynamic_cast<UpdatableComponent *>(&(GetComponent(c)));
if (uc != NULL) {
int32 size = uc->GetParameterDim();
uc->UnVectorize(params.Range(offset, size));
offset += size;
}
}
KALDI_ASSERT(offset == GetParameterDim());
}
void Nnet::LimitRankOfLastLayer(int32 dim) {
for (int32 i = components_.size() - 1; i >= 0; i--) {
AffineComponent *a = NULL, *b = NULL,
*c = dynamic_cast<AffineComponent *>(components_[i]);
if (c != NULL) {
c->LimitRank(dim, &a, &b);
delete c;
components_[i] = a;
components_.insert(components_.begin() + i + 1, b);
this->SetIndexes();
this->Check();
return;
}
}
KALDI_ERR << "No affine component found in neural net.";
}
void Nnet::SetIndexes() {
for (size_t i = 0; i < components_.size(); i++) components_[i]->SetIndex(i);
}
void Nnet::Collapse(bool match_updatableness) {
int32 num_collapsed = 0;
bool changed = true;
while (changed) {
changed = false;
for (size_t i = 0; i + 1 < components_.size(); i++) {
AffineComponent *a1 = dynamic_cast<AffineComponent *>(components_[i]),
*a2 = dynamic_cast<AffineComponent *>(components_[i + 1]);
FixedAffineComponent *f1 = dynamic_cast<FixedAffineComponent *>(
components_[i]),
*f2 = dynamic_cast<FixedAffineComponent *>(
components_[i + 1]);
Component *c = NULL;
if (a1 != NULL && a2 != NULL) {
c = a1->CollapseWithNext(*a2);
} else if (a1 != NULL && f2 != NULL && !match_updatableness) {
c = a1->CollapseWithNext(*f2);
} else if (f1 != NULL && a2 != NULL && !match_updatableness) {
c = a2->CollapseWithPrevious(*f1);
}
if (c != NULL) {
delete components_[i];
delete components_[i + 1];
components_[i] = c;
// This was causing valgrind errors, so doing it differently. Either
// a standard-library bug or I misunderstood something.
// components_.erase(components_.begin() + i + i,
// components_.begin() + i + 2);
for (size_t j = i + 1; j + 1 < components_.size(); j++)
components_[j] = components_[j + 1];
components_.pop_back();
changed = true;
num_collapsed++;
}
}
}
this->SetIndexes();
this->Check();
KALDI_LOG << "Collapsed " << num_collapsed << " components.";
}
Nnet *GenRandomNnet(int32 input_dim, int32 output_dim) {
std::vector<Component *> components;
int32 cur_dim = input_dim;
// have up to 4 layers before the final one.
for (size_t i = 0; i < 4; i++) {
if (rand() % 2 == 0) {
// add an affine component.
int32 next_dim = 50 + rand() % 100;
BaseFloat learning_rate = 0.0001, param_stddev = 0.001, bias_stddev = 0.1;
AffineComponent *component = new AffineComponent();
component->Init(learning_rate, cur_dim, next_dim, param_stddev,
bias_stddev);
components.push_back(component);
cur_dim = next_dim;
} else if (rand() % 2 == 0) {
components.push_back(new SigmoidComponent(cur_dim));
} else if (rand() % 2 == 0 && cur_dim < 200) {
int32 left_context = rand() % 3, right_context = rand() % 3;
SpliceComponent *component = new SpliceComponent();
component->Init(cur_dim, left_context, right_context);
components.push_back(component);
cur_dim = cur_dim * (1 + left_context + right_context);
} else {
break;
}
}
{
AffineComponent *component = new AffineComponent();
BaseFloat learning_rate = 0.0001, param_stddev = 0.001, bias_stddev = 0.1;
component->Init(learning_rate, cur_dim, output_dim, param_stddev,
bias_stddev);
components.push_back(component);
cur_dim = output_dim;
}
components.push_back(new SoftmaxComponent(cur_dim));
Nnet *ans = new Nnet();
ans->Init(&components);
return ans;
}
int32 Nnet::LastUpdatableComponent() const {
int32 last_updatable_component = NumComponents();
for (int32 i = NumComponents() - 1; i >= 0; i--)
if (dynamic_cast<UpdatableComponent *>(components_[i]) != NULL)
last_updatable_component = i;
return last_updatable_component;
}
} // namespace nnet2
} // namespace kaldi
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-est-weights-ebw.cc<|end_filename|>
// gmmbin/gmm-est-weights-ebw.cc
// Copyright 2009-2011 <NAME> <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "tree/context-dep.h"
#include "hmm/transition-model.h"
#include "gmm/ebw-diag-gmm.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
const char *usage =
"Do EBW update on weights for MMI, MPE or MCE discriminative "
"training.\n"
"Numerator stats should not be I-smoothed\n"
"Usage: gmm-est-weights-ebw [options] <model-in> <stats-num-in> "
"<stats-den-in> <model-out>\n"
"e.g.: gmm-est-weights-ebw 1.mdl num.acc den.acc 2.mdl\n";
bool binary_write = false;
std::string update_flags_str = "w";
EbwWeightOptions ebw_weight_opts;
ParseOptions po(usage);
po.Register("binary", &binary_write, "Write output in binary mode");
po.Register("update-flags", &update_flags_str,
"Which GMM parameters to "
"update; only \"w\" flag is looked at.");
ebw_weight_opts.Register(&po);
po.Read(argc, argv);
if (po.NumArgs() != 4) {
po.PrintUsage();
exit(1);
}
kaldi::GmmFlagsType update_flags = StringToGmmFlags(update_flags_str);
std::string model_in_filename = po.GetArg(1),
num_stats_filename = po.GetArg(2),
den_stats_filename = po.GetArg(3),
model_out_filename = po.GetArg(4);
AmDiagGmm am_gmm;
TransitionModel trans_model;
{
bool binary_read;
Input ki(model_in_filename, &binary_read);
trans_model.Read(ki.Stream(), binary_read);
am_gmm.Read(ki.Stream(), binary_read);
}
Vector<double> num_transition_accs; // won't be used.
Vector<double> den_transition_accs; // won't be used.
AccumAmDiagGmm num_stats;
AccumAmDiagGmm den_stats;
{
bool binary;
Input ki(num_stats_filename, &binary);
num_transition_accs.Read(ki.Stream(), binary);
num_stats.Read(ki.Stream(), binary,
true); // true == add; doesn't matter here.
}
{
bool binary;
Input ki(den_stats_filename, &binary);
num_transition_accs.Read(ki.Stream(), binary);
den_stats.Read(ki.Stream(), binary,
true); // true == add; doesn't matter here.
}
if (update_flags & kGmmWeights) { // Update weights.
BaseFloat auxf_impr, count;
UpdateEbwWeightsAmDiagGmm(num_stats, den_stats, ebw_weight_opts, &am_gmm,
&auxf_impr, &count);
KALDI_LOG << "Num count " << num_stats.TotCount() << ", den count "
<< den_stats.TotCount();
KALDI_LOG << "Overall auxf impr/frame from weight update is "
<< (auxf_impr / count) << " over " << count << " frames.";
} else {
KALDI_LOG << "Doing nothing because flags do not specify to update the "
"weights.";
}
{
Output ko(model_out_filename, binary_write);
trans_model.Write(ko.Stream(), binary_write);
am_gmm.Write(ko.Stream(), binary_write);
}
KALDI_LOG << "Written model to " << model_out_filename;
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2bin/nnet-get-egs-discriminative.cc<|end_filename|>
// nnet2bin/nnet-get-egs-discriminative.cc
// Copyright 2012-2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "hmm/transition-model.h"
#include "nnet2/nnet-example-functions.h"
#include "nnet2/am-nnet.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
using namespace kaldi::nnet2;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
const char *usage =
"Get examples of data for discriminative neural network training;\n"
"each one corresponds to part of a file, of variable (and "
"configurable\n"
"length.\n"
"\n"
"Usage: nnet-get-egs-discriminative [options] "
"<model|transition-model> "
"<features-rspecifier> <ali-rspecifier> <den-lat-rspecifier> "
"<training-examples-out>\n"
"\n"
"An example [where $feats expands to the actual features]:\n"
"nnet-get-egs-discriminative --acoustic-scale=0.1 \\\n"
" 1.mdl '$feats' 'ark,s,cs:gunzip -c ali.1.gz|' 'ark,s,cs:gunzip -c "
"lat.1.gz|' ark:1.degs\n";
SplitDiscriminativeExampleConfig split_config;
ParseOptions po(usage);
split_config.Register(&po);
po.Read(argc, argv);
if (po.NumArgs() != 5) {
po.PrintUsage();
exit(1);
}
std::string nnet_rxfilename = po.GetArg(1),
feature_rspecifier = po.GetArg(2),
ali_rspecifier = po.GetArg(3), clat_rspecifier = po.GetArg(4),
examples_wspecifier = po.GetArg(5);
TransitionModel trans_model;
AmNnet am_nnet;
{
bool binary;
Input ki(nnet_rxfilename, &binary);
trans_model.Read(ki.Stream(), binary);
am_nnet.Read(ki.Stream(), binary);
}
int32 left_context = am_nnet.GetNnet().LeftContext(),
right_context = am_nnet.GetNnet().RightContext();
// Read in all the training files.
SequentialBaseFloatMatrixReader feat_reader(feature_rspecifier);
RandomAccessInt32VectorReader ali_reader(ali_rspecifier);
RandomAccessCompactLatticeReader clat_reader(clat_rspecifier);
DiscriminativeNnetExampleWriter example_writer(examples_wspecifier);
int32 num_done = 0, num_err = 0;
int64 examples_count = 0; // used in generating id's.
SplitExampleStats stats; // diagnostic.
for (; !feat_reader.Done(); feat_reader.Next()) {
std::string key = feat_reader.Key();
const Matrix<BaseFloat> &feats = feat_reader.Value();
if (!ali_reader.HasKey(key)) {
KALDI_WARN << "No pdf-level posterior for key " << key;
num_err++;
continue;
}
const std::vector<int32> &alignment = ali_reader.Value(key);
if (!clat_reader.HasKey(key)) {
KALDI_WARN << "No denominator lattice for key " << key;
num_err++;
continue;
}
CompactLattice clat = clat_reader.Value(key);
CreateSuperFinal(
&clat); // make sure only one state has a final-prob (of One()).
if (clat.Properties(fst::kTopSorted, true) == 0) {
TopSort(&clat);
}
BaseFloat weight = 1.0;
DiscriminativeNnetExample eg;
if (!LatticeToDiscriminativeExample(alignment, feats, clat, weight,
left_context, right_context, &eg)) {
KALDI_WARN << "Error converting lattice to example.";
num_err++;
continue;
}
std::vector<DiscriminativeNnetExample> egs;
SplitDiscriminativeExample(split_config, trans_model, eg, &egs, &stats);
KALDI_VLOG(2) << "Split lattice " << key << " into " << egs.size()
<< " pieces.";
for (size_t i = 0; i < egs.size(); i++) {
// Note: excised_egs will be of size 0 or 1.
std::vector<DiscriminativeNnetExample> excised_egs;
ExciseDiscriminativeExample(split_config, trans_model, egs[i],
&excised_egs, &stats);
for (size_t j = 0; j < excised_egs.size(); j++) {
std::ostringstream os;
os << (examples_count++);
std::string example_key = os.str();
example_writer.Write(example_key, excised_egs[j]);
}
}
num_done++;
}
if (num_done > 0) stats.Print();
KALDI_LOG << "Finished generating examples, "
<< "successfully processed " << num_done << " feature files, "
<< num_err << " had errors.";
return (num_done == 0 ? 1 : 0);
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/ivectorbin/ivector-extractor-sum-accs.cc<|end_filename|>
// ivectorbin/ivector-extractor-sum-accs.cc
// Copyright 2013 <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "util/common-utils.h"
#include "ivector/ivector-extractor.h"
int main(int argc, char *argv[]) {
try {
typedef kaldi::int32 int32;
using namespace kaldi;
const char *usage =
"Sum accumulators for training of iVector extractor\n"
"Usage: ivector-extractor-sum-accs [options] <stats-in1> "
"<stats-in2> ... <stats-inN> <stats-out>\n";
bool binary = true;
bool parallel = false;
kaldi::ParseOptions po(usage);
po.Register("binary", &binary, "Write output in binary mode");
po.Register("parallel", ¶llel,
"If true, the program makes sure to "
"open all filehandles before reading for any (useful when "
"summing accs from long processes)");
po.Read(argc, argv);
if (po.NumArgs() < 2) {
po.PrintUsage();
exit(1);
}
std::string stats_wxfilename = po.GetArg(po.NumArgs());
IvectorExtractorStats stats;
if (parallel) {
std::vector<kaldi::Input *> inputs(po.NumArgs() - 1);
for (int i = 1; i < po.NumArgs(); i++) {
std::string stats_in_filename = po.GetArg(i);
inputs[i - 1] = new kaldi::Input(stats_in_filename); // Don't try
// to work out binary status yet; this would cause us to wait
// for the output of that process. We delay it till later.
}
for (size_t i = 1; i < po.NumArgs(); i++) {
bool b;
kaldi::InitKaldiInputStream(inputs[i - 1]->Stream(), &b);
bool add = true;
stats.Read(inputs[i - 1]->Stream(), b, add);
delete inputs[i - 1];
}
} else {
for (int32 i = 1; i < po.NumArgs(); i++) {
std::string stats_rxfilename = po.GetArg(i);
KALDI_LOG << "Reading stats from " << stats_rxfilename;
bool binary_in;
Input ki(stats_rxfilename, &binary_in);
bool add = true;
stats.Read(ki.Stream(), binary_in, add);
}
}
WriteKaldiObject(stats, stats_wxfilename, binary);
KALDI_LOG << "Wrote summed stats to " << stats_wxfilename;
return 0;
} catch (const std::exception &e) {
std::cerr << e.what() << '\n';
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/nnet2/mixup-nnet.h<|end_filename|>
// nnet2/mixup-nnet.h
// Copyright 2012 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_NNET2_MIXUP_NNET_H_
#define KALDI_NNET2_MIXUP_NNET_H_
#include "nnet2/nnet-update.h"
#include "nnet2/nnet-compute.h"
#include "itf/options-itf.h"
namespace kaldi {
namespace nnet2 {
struct NnetMixupConfig {
BaseFloat power;
BaseFloat min_count;
int32 num_mixtures;
BaseFloat perturb_stddev;
NnetMixupConfig()
: power(0.25),
min_count(1000.0),
num_mixtures(-1),
perturb_stddev(0.01) {}
void Register(OptionsItf *po) {
po->Register("power", &power,
"Scaling factor used in determining the "
"number of mixture components to use for each HMM state "
"(or group of HMM states)");
po->Register("min-count", &min_count,
"Minimum count for a quasi-Gaussian, "
"enforced while allocating mixtures (obscure parameter).");
po->Register("num-mixtures", &num_mixtures,
"If specified, total number of "
"mixture components to mix up to (should be at least the "
"#leaves in the system");
po->Register("perturb-stddev", &perturb_stddev,
"Standard deviation used "
"when perturbing parameters during mixing up");
}
};
/**
This function does something similar to Gaussian mixture splitting for
GMMs, except applied to the output layer of the neural network.
We create additional outputs, which will be summed over using a
SumGroupComponent.
*/
void MixupNnet(const NnetMixupConfig &mixup_config, Nnet *nnet);
} // namespace nnet2
} // namespace kaldi
#endif
<|start_filename|>tonic-suite/asr/src/cudamatrix/cu-sp-matrix.cc<|end_filename|>
#if HAVE_CUDA == 1
#include <cuda_runtime_api.h>
#include <cublas.h>
#endif
#include "base/timer.h"
#include "cudamatrix/cu-common.h"
#include "cudamatrix/cu-vector.h"
#include "cudamatrix/cu-device.h"
#include "cudamatrix/cu-kernels.h"
#include "cudamatrix/cu-math.h"
#include "cudamatrix/cu-sp-matrix.h"
#include "cudamatrix/cu-matrix.h"
#include "cudamatrix/cublas-wrappers.h"
namespace kaldi {
template <typename Real>
void CuSpMatrix<Real>::CopyFromMat(const CuMatrixBase<Real> &M,
SpCopyType copy_type) {
KALDI_ASSERT(this->num_rows_ == M.NumRows() &&
this->num_rows_ == M.NumCols());
if (this->num_rows_ == 0) return;
#if HAVE_CUDA == 1
if (CuDevice::Instantiate().Enabled()) {
Timer tim;
MatrixIndexT D = this->NumRows();
if (D == 0) return;
switch (copy_type) {
case kTakeMeanAndCheck:
KALDI_ERR << "kTakeMeanAndCheck not supported!";
// The grid/block dimensions have been very roughly tuned for the
// individual cases.
case kTakeMean: {
dim3 dimBlock(CU2DBLOCK, CU2DBLOCK);
dim3 dimGrid(n_blocks(D, CU2DBLOCK), n_blocks(D, CU2DBLOCK));
cuda_take_mean(dimGrid, dimBlock, M.Data(), this->data_, M.Dim());
CU_SAFE_CALL(cudaGetLastError());
} break;
case kTakeLower: {
int32 block_size = std::min(CU1DBLOCK, this->num_rows_);
dim3 dimBlock(1, block_size);
dim3 dimGrid(D, n_blocks(D, block_size));
cuda_take_lower(dimGrid, dimBlock, M.Data(), this->data_, M.Dim());
CU_SAFE_CALL(cudaGetLastError());
} break;
case kTakeUpper: {
dim3 dimBlock(CU2DBLOCK, CU2DBLOCK);
dim3 dimGrid(n_blocks(D, CU2DBLOCK), n_blocks(D, CU2DBLOCK));
cuda_take_upper(dimGrid, dimBlock, M.Data(), this->data_, M.Dim());
CU_SAFE_CALL(cudaGetLastError());
} break;
default:
KALDI_ASSERT("Invalid argument to CuSpMatrix::CopyFromMat");
}
CuDevice::Instantiate().AccuProfile(
"CuSpMatrix::CopyFromMat(from CuMatrixBase)", tim.Elapsed());
} else
#endif
{
Mat().CopyFromMat(M.Mat(), copy_type);
}
}
template <typename Real>
void CuSpMatrix<Real>::Invert() {
#if HAVE_CUDA == 1
if (CuDevice::Instantiate().Enabled()) {
CuMatrix<Real> mat(this->num_rows_, this->num_rows_);
mat.CopyFromSp(*this);
mat.SymInvertPosDef();
this->CopyFromMat(mat);
} else
#endif
{ // Use inversion of CPU-based SpMatrix.
Mat().Invert();
}
}
template <typename Real>
void CuSpMatrix<Real>::AddVec2(const Real alpha, const CuVectorBase<Real> &v) {
KALDI_ASSERT(v.Dim() == this->NumRows());
#if HAVE_CUDA == 1
if (CuDevice::Instantiate().Enabled()) {
Timer tim;
size_t nr = this->num_rows_;
dim3 dimBlock(CU2DBLOCK, CU2DBLOCK);
dim3 dimGrid(n_blocks(nr, CU2DBLOCK), n_blocks(nr, CU2DBLOCK));
cublas_spr('U', this->num_rows_, alpha, v.Data(), 1, this->Data());
CU_SAFE_CALL(cudaGetLastError());
CuDevice::Instantiate().AccuProfile("CuSpMatrix::AddVec2", tim.Elapsed());
} else
#endif
{
Mat().AddVec2(alpha, v.Vec());
}
}
template <typename Real>
void CuSpMatrix<Real>::AddMat2(const Real alpha, const CuMatrixBase<Real> &M,
MatrixTransposeType transM, const Real beta) {
KALDI_ASSERT((transM == kNoTrans && this->NumRows() == M.NumRows()) ||
(transM == kTrans && this->NumRows() == M.NumCols()));
#if HAVE_CUDA == 1
if (CuDevice::Instantiate().Enabled()) {
Timer tim;
MatrixIndexT this_dim = this->NumRows(),
m_other_dim = (transM == kNoTrans ? M.NumCols() : M.NumRows());
if (this_dim == 0) return;
if (alpha == 0.0) {
if (beta != 1.0) this->Scale(beta);
return;
}
char trans = (transM == kTrans ? 'N' : 'T');
CuMatrix<Real> tmp_mat(*this);
cublas_syrk('U', trans, this_dim, m_other_dim, alpha, M.Data(), M.Stride(),
beta, tmp_mat.Data(), tmp_mat.Stride());
this->CopyFromMat(tmp_mat, kTakeLower);
CuDevice::Instantiate().AccuProfile("CuSpMatrix::AddMat2", tim.Elapsed());
} else
#endif
{
Mat().AddMat2(alpha, M.Mat(), transM, beta);
}
}
/**
* C++ templatd wrapper of ANSI-C CUBLAS function GEMM (matrix multiply)
*/
template <typename Real, typename OtherReal>
Real TraceSpSp(const CuSpMatrix<Real> &A, const CuSpMatrix<OtherReal> &B) {
KALDI_ASSERT(A.NumRows() == B.NumRows());
#if HAVE_CUDA == 1
if (CuDevice::Instantiate().Enabled()) {
MatrixIndexT nr = A.NumRows(), size = nr * (nr + 1) / 2;
CuVector<Real> Adiag(nr, kUndefined);
CuVector<OtherReal> Bdiag(nr, kUndefined);
Adiag.CopyDiagFromPacked(A);
Bdiag.CopyDiagFromPacked(B);
CuSubVector<Real> Aall(A.Data(), size);
CuSubVector<OtherReal> Ball(B.Data(), size);
// Below, we subtrace VecVec(Adiag, Bdiag) to remove double-counting
// on the diagonal.
return 2.0 * VecVec(Aall, Ball) - VecVec(Adiag, Bdiag);
} else
#endif
{
return TraceSpSp(A.Mat(), B.Mat());
}
}
template float TraceSpSp(const CuSpMatrix<float> &A,
const CuSpMatrix<float> &B);
template float TraceSpSp(const CuSpMatrix<float> &A,
const CuSpMatrix<double> &B);
template double TraceSpSp(const CuSpMatrix<double> &A,
const CuSpMatrix<float> &B);
template double TraceSpSp(const CuSpMatrix<double> &A,
const CuSpMatrix<double> &B);
template <typename Real>
bool CuSpMatrix<Real>::ApproxEqual(const CuSpMatrix<Real> &B, Real tol) const {
KALDI_ASSERT(this->NumRows() == B.NumRows());
CuSpMatrix<Real> diff(*this);
diff.AddSp(-1.0, B);
Real a = this->FrobeniusNorm(), b = B.FrobeniusNorm(),
d = diff.FrobeniusNorm();
return (d <= tol * std::max(a, b));
}
template <typename Real>
bool CuSpMatrix<Real>::IsUnit(Real tol) const {
// want to return:
// FrobeniusNorm(*this - I) <= tol * NumRows(), i.e.:
// sqrt (trace((*this - I)(*this-I)) <= tol * NumRows()
// trace((*this - I)(*this - I)) <= tol * NumRows()
// trace(*this * *this) + trace(I) - 2 * trace(*this) <= tol * NumRows()
// trace(*this * *this) + dim - 2*this.Trace() <= tol * NumRows()
// Note: we could do this more efficiently still, by slightly changing the
// definition of IsUnit and getting rid of the extra stuff inside TraceSpSp
// that corrects for the diagonal being counted twice.
return (TraceSpSp(*this, *this) + this->NumRows() - 2.0 * this->Trace() <=
tol * this->NumRows());
}
template class CuSpMatrix<float>;
template class CuSpMatrix<double>;
} // namespace
<|start_filename|>tonic-suite/asr/src/makefiles/default_rules.mk<|end_filename|>
ifeq ($(KALDI_FLAVOR), dynamic)
ifeq ($(shell uname), Darwin)
XLDLIBS := $(LDLIBS)
ifdef LIBNAME
LIBFILE = lib$(LIBNAME).dylib
#LDLIBS += -l$(LIBNAME)
endif
LDFLAGS += -L$(KALDILIBDIR) -Wl,-rpath -Wl,$(KALDILIBDIR)
XDEPENDS = $(foreach dep,$(ADDLIBS), $(dir $(dep))/lib$(notdir $(basename $(dep))).dylib )
XLDLIBS += $(foreach dep,$(ADDLIBS), -l$(notdir $(basename $(dep))) )
else
ifeq ($(shell uname), Linux)
ifdef LIBNAME
LIBFILE = lib$(LIBNAME).so
#LDLIBS += -l$(LIBNAME)
endif
LDFLAGS += -Wl,-rpath=$(shell readlink -f $(KALDILIBDIR)) -L.
LDFLAGS += $(foreach dep,$(ADDLIBS), -L$(dir $(dep)) )
XDEPENDS = $(foreach dep,$(ADDLIBS), $(dir $(dep))/lib$(notdir $(basename $(dep))).so )
else # Platform not supported
$(error Dynamic libraries not supported on this platform. Run configure with --static flag. )
endif
endif
LDLIBS += $(foreach dep,$(ADDLIBS), -l$(notdir $(basename $(dep))) )
else
ifdef LIBNAME
LIBFILE = $(LIBNAME).a
endif
XDEPENDS = $(ADDLIBS)
endif
all: $(LIBFILE) $(BINFILES)
$(LIBFILE): $(OBJFILES)
$(AR) -cru $(LIBNAME).a $(OBJFILES)
$(RANLIB) $(LIBNAME).a
ifeq ($(KALDI_FLAVOR), dynamic)
ifeq ($(shell uname), Darwin)
$(CXX) -dynamiclib -o $@ -install_name @rpath/$@ -framework Accelerate $(LDFLAGS) $(XLDLIBS) $(OBJFILES) $(LDLIBS)
rm -f $(KALDILIBDIR)/$@; ln -s $(shell pwd)/$@ $(KALDILIBDIR)/$@
else
ifeq ($(shell uname), Linux)
# Building shared library from static (static was compiled with -fPIC)
$(CXX) -shared -o $@ $(EXTRAFILES) -Wl,--no-undefined -Wl,--as-needed -Wl,-soname=$@,--whole-archive $(LIBNAME).a -Wl,--no-whole-archive $(LDFLAGS) $(XDEPENDS) $(LDLIBS)
rm -f $(KALDILIBDIR)/$@; ln -s $(shell pwd)/$@ $(KALDILIBDIR)/$@
#cp $@ $(KALDILIBDIR)
else # Platform not supported
$(error Dynamic libraries not supported on this platform. Run configure with --static flag. )
endif
endif
endif
$(BINFILES): $(LIBFILE) $(XDEPENDS)
# Rule below would expand to, e.g.:
# ../base/kaldi-base.a:
# make -c ../base kaldi-base.a
# -c option to make is same as changing directory.
%.a:
$(MAKE) -C ${@D} ${@F}
%.so:
$(MAKE) -C ${@D} ${@F}
clean:
-rm -f *.o *.so $(TESTFILES) $(BINFILES) $(TESTOUTPUTS) tmp* *.tmp
$(TESTFILES): $(LIBFILE) $(XDEPENDS)
test_compile: $(TESTFILES)
test: test_compile
@result=0; for x in $(TESTFILES); do printf "Running $$x ..."; ./$$x >/dev/null 2>&1; if [ $$? -ne 0 ]; then echo "... FAIL $$x"; result=1; else echo "... SUCCESS"; fi; done; exit $$result
.valgrind: $(BINFILES) $(TESTFILES)
depend:
-$(CXX) -M $(CXXFLAGS) *.cc > .depend.mk
# removing automatic making of "depend" as it's quite slow.
#.depend.mk: depend
-include .depend.mk
<|start_filename|>tonic-suite/nlp/src/SENNA_VBS.cpp<|end_filename|>
#include <sys/time.h>
#include "SENNA_VBS.h"
#include "SENNA_utils.h"
#include "SENNA_nn.h"
#include "socket.h"
int *SENNA_VBS_forward(SENNA_VBS *vbs, const int *sentence_words,
const int *sentence_caps, const int *sentence_posl,
int sentence_size, int socketfd) {
int idx;
struct timeval tv1, tv2;
gettimeofday(&tv1, NULL);
vbs->input_state = SENNA_realloc(
vbs->input_state, sizeof(float),
(sentence_size + vbs->window_size - 1) *
(vbs->ll_word_size + vbs->ll_caps_size + vbs->ll_posl_size));
vbs->output_state = SENNA_realloc(vbs->output_state, sizeof(float),
sentence_size * vbs->output_state_size);
SENNA_nn_lookup(vbs->input_state,
vbs->ll_word_size + vbs->ll_caps_size + vbs->ll_posl_size,
vbs->ll_word_weight, vbs->ll_word_size, vbs->ll_word_max_idx,
sentence_words, sentence_size, vbs->ll_word_padding_idx,
(vbs->window_size - 1) / 2);
SENNA_nn_lookup(vbs->input_state + vbs->ll_word_size,
vbs->ll_word_size + vbs->ll_caps_size + vbs->ll_posl_size,
vbs->ll_caps_weight, vbs->ll_caps_size, vbs->ll_caps_max_idx,
sentence_caps, sentence_size, vbs->ll_caps_padding_idx,
(vbs->window_size - 1) / 2);
SENNA_nn_lookup(vbs->input_state + vbs->ll_word_size + vbs->ll_caps_size,
vbs->ll_word_size + vbs->ll_caps_size + vbs->ll_posl_size,
vbs->ll_posl_weight, vbs->ll_posl_size, vbs->ll_posl_max_idx,
sentence_posl, sentence_size, vbs->ll_posl_padding_idx,
(vbs->window_size - 1) / 2);
gettimeofday(&tv2, NULL);
vbs->apptime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
gettimeofday(&tv1, NULL);
int input_size = vbs->ll_word_size + vbs->ll_caps_size + vbs->ll_posl_size;
char *input_data = (char *)malloc(
sentence_size * (vbs->window_size * input_size) * sizeof(float));
for (idx = 0; idx < sentence_size; idx++) {
memcpy((char *)(input_data +
idx * (vbs->window_size) * (input_size) * sizeof(float)),
(char *)(vbs->input_state + idx * input_size),
vbs->window_size * input_size * sizeof(float));
}
if (vbs->service) {
SOCKET_send(socketfd, input_data,
sentence_size * (vbs->window_size * input_size * sizeof(float)),
vbs->debug);
SOCKET_receive(socketfd, (char *)(vbs->output_state),
vbs->output_state_size * sizeof(float) * sentence_size,
vbs->debug);
}
/*
for(idx = 0; idx < sentence_size; idx++)
{
if(vbs->service) {
SOCKET_send(socketfd,
(char*)(vbs->input_state+idx*(vbs->ll_word_size+vbs->ll_caps_size+vbs->ll_posl_size)),
vbs->window_size*(vbs->ll_word_size+vbs->ll_caps_size+vbs->ll_posl_size)*sizeof(float),
vbs->debug
);
SOCKET_receive(socketfd,
(char*)(vbs->output_state+idx*vbs->output_state_size),
vbs->output_state_size*sizeof(float),
vbs->debug
);
}
else{
SENNA_nn_linear(vbs->hidden_state,
vbs->hidden_state_size,
vbs->l1_weight,
vbs->l1_bias,
vbs->input_state+idx*(vbs->ll_word_size+vbs->ll_caps_size+vbs->ll_posl_size),
vbs->window_size*(vbs->ll_word_size+vbs->ll_caps_size+vbs->ll_posl_size));
SENNA_nn_hardtanh(vbs->hidden_state,
vbs->hidden_state,
vbs->hidden_state_size);
SENNA_nn_linear(vbs->output_state+idx*vbs->output_state_size,
vbs->output_state_size,
vbs->l2_weight,
vbs->l2_bias,
vbs->hidden_state,
vbs->hidden_state_size);
}
vbs->calls++;
}
*/
gettimeofday(&tv2, NULL);
vbs->dnntime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
gettimeofday(&tv1, NULL);
vbs->labels = SENNA_realloc(vbs->labels, sizeof(int), sentence_size);
for (idx = 0; idx < sentence_size; idx++)
SENNA_nn_max(NULL, &vbs->labels[idx],
vbs->output_state + idx * vbs->output_state_size,
vbs->output_state_size);
gettimeofday(&tv2, NULL);
vbs->apptime +=
(tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec);
return vbs->labels;
}
SENNA_VBS *SENNA_VBS_new(const char *path, const char *subpath) {
SENNA_VBS *vbs = SENNA_malloc(sizeof(SENNA_VBS), 1);
FILE *f;
float dummy;
f = SENNA_fopen(path, subpath, "rb");
SENNA_fread(&vbs->window_size, sizeof(int), 1, f);
SENNA_fread_tensor_2d(&vbs->ll_word_weight, &vbs->ll_word_size,
&vbs->ll_word_max_idx, f);
SENNA_fread_tensor_2d(&vbs->ll_caps_weight, &vbs->ll_caps_size,
&vbs->ll_caps_max_idx, f);
SENNA_fread_tensor_2d(&vbs->ll_posl_weight, &vbs->ll_posl_size,
&vbs->ll_posl_max_idx, f);
SENNA_fread_tensor_2d(&vbs->l1_weight, &vbs->input_state_size,
&vbs->hidden_state_size, f);
SENNA_fread_tensor_1d(&vbs->l1_bias, &vbs->hidden_state_size, f);
SENNA_fread_tensor_2d(&vbs->l2_weight, &vbs->hidden_state_size,
&vbs->output_state_size, f);
SENNA_fread_tensor_1d(&vbs->l2_bias, &vbs->output_state_size, f);
SENNA_fread(&vbs->ll_word_padding_idx, sizeof(int), 1, f);
SENNA_fread(&vbs->ll_caps_padding_idx, sizeof(int), 1, f);
SENNA_fread(&vbs->ll_posl_padding_idx, sizeof(int), 1, f);
SENNA_fread(&dummy, sizeof(float), 1, f);
SENNA_fclose(f);
if ((int)dummy != 777)
SENNA_error("vbs: data corrupted (or not IEEE floating computer)");
vbs->input_state = NULL;
vbs->hidden_state = SENNA_malloc(sizeof(float), vbs->hidden_state_size);
vbs->output_state = NULL;
vbs->labels = NULL;
/* some info if you want verbose */
SENNA_message("vbs: window size: %d", vbs->window_size);
SENNA_message("vbs: vector size in word lookup table: %d", vbs->ll_word_size);
SENNA_message("vbs: word lookup table size: %d", vbs->ll_word_max_idx);
SENNA_message("vbs: vector size in caps lookup table: %d", vbs->ll_caps_size);
SENNA_message("vbs: caps lookup table size: %d", vbs->ll_caps_max_idx);
SENNA_message("vbs: vector size in pos lookup table: %d", vbs->ll_posl_size);
SENNA_message("vbs: pos lookup table size: %d", vbs->ll_posl_max_idx);
SENNA_message("vbs: number of hidden units: %d", vbs->hidden_state_size);
SENNA_message("vbs: number of classes: %d", vbs->output_state_size);
vbs->service = false;
vbs->debug = false;
vbs->socketfd = -1;
vbs->calls = 0;
vbs->dnntime = 0;
vbs->apptime = 0;
return vbs;
}
void SENNA_VBS_free(SENNA_VBS *vbs) {
SENNA_free(vbs->ll_word_weight);
SENNA_free(vbs->ll_caps_weight);
SENNA_free(vbs->ll_posl_weight);
SENNA_free(vbs->l1_weight);
SENNA_free(vbs->l1_bias);
SENNA_free(vbs->l2_weight);
SENNA_free(vbs->l2_bias);
SENNA_free(vbs->input_state);
SENNA_free(vbs->hidden_state);
SENNA_free(vbs->output_state);
SENNA_free(vbs->labels);
SENNA_free(vbs);
}
<|start_filename|>tonic-suite/asr/src/bin/get-silence-probs.cc<|end_filename|>
// bin/get-silence-probs.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "hmm/hmm-utils.h"
int main(int argc, char *argv[]) {
using namespace kaldi;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
try {
const char *usage =
"This program takes two archives of Vector<BaseFloat>, representing\n"
"per-frame log-likelihoods for silence and non-silence models "
"respectively.\n"
"It outputs per-frame silence probabilities in the same format.\n"
"To get non-silence probs instead, use --write-nonsil-probs "
"Usage: get-silence-probs [options] <silence-loglikes-rspecifier> "
" <nonsilence-loglikes-rspecifier> <silence-probs-wspecifier>\n"
"e.g.: get-silence-probs --silence-prior=0.9 --quantize=0.25 "
"ark:sil.likes "
"ark:nonsil.likes ark:sil.probs\n";
ParseOptions po(usage);
BaseFloat sil_prior = 0.5;
BaseFloat quantize = 0.0;
bool write_nonsil_probs = false;
po.Register(
"sil-prior", &sil_prior,
"Prior probability of silence, must be strictly between 0 and 1.");
po.Register("quantize", &quantize,
"If nonzero, quantize probs to this level (to improve "
"compressibility).");
po.Register("write-nonsil-probs", &write_nonsil_probs,
"If true, write non-silence probs instead of silence probs");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
KALDI_ASSERT(sil_prior > 0.0 && sil_prior < 1.0);
KALDI_ASSERT(quantize >= 0.0 && quantize <= 1.0);
double sil_log_bias = log(sil_prior / (1.0 - sil_prior));
std::string silence_likes_rspecifier = po.GetArg(1),
nonsilence_likes_rspecifier = po.GetArg(2),
silence_probs_wspecifier = po.GetArg(3);
SequentialBaseFloatVectorReader silence_likes_reader(
silence_likes_rspecifier);
RandomAccessBaseFloatVectorReader nonsilence_likes_reader(
nonsilence_likes_rspecifier);
BaseFloatVectorWriter silence_probs_writer(silence_probs_wspecifier);
int num_done = 0, num_err = 0;
double tot_frames = 0.0, tot_sil_prob = 0.0;
for (; !silence_likes_reader.Done(); silence_likes_reader.Next()) {
std::string key = silence_likes_reader.Key();
if (!nonsilence_likes_reader.HasKey(key)) {
KALDI_WARN << "No non-silence likes available for utterance " << key;
num_err++;
continue;
}
const Vector<BaseFloat> &sil_likes = silence_likes_reader.Value();
const Vector<BaseFloat> &nonsil_likes =
nonsilence_likes_reader.Value(key);
if (sil_likes.Dim() != nonsil_likes.Dim()) {
KALDI_WARN << "Dimension mismatch between sil and non-sil likes";
num_err++;
continue;
}
int32 num_frames = sil_likes.Dim();
Vector<BaseFloat> sil_probs(num_frames);
for (int32 f = 0; f < num_frames; f++) {
// We're basically just applying Bayes' rule here to get the
// posterior prob of silence.
BaseFloat sil_loglike = sil_likes(f), nonsil_loglike = nonsil_likes(f);
sil_loglike -= nonsil_loglike;
nonsil_loglike = 0; // improve floating-point range.
sil_loglike += sil_log_bias; // relates to prior. Zero if prior==0.5.
if (sil_loglike > 10) {
sil_probs(f) = 1.0; // because the exp below might fail.
} else {
BaseFloat e_sil_loglike = exp(sil_loglike);
BaseFloat sil_prob = e_sil_loglike / (1.0 + e_sil_loglike);
if (!(sil_prob >= 0.0 && sil_prob <= 1.0)) {
KALDI_WARN << "Bad silence prob (NaNs found?), setting to 0.5";
sil_prob = 0.5;
}
sil_probs(f) = sil_prob;
}
if (quantize != 0.0) {
int64 i = static_cast<int64>(0.5 + (sil_probs(f) / quantize));
sil_probs(f) = quantize * i;
}
}
tot_frames += num_frames;
tot_sil_prob += sil_probs.Sum();
if (write_nonsil_probs) { // sil_prob <-- 1.0 - sil_prob
sil_probs.Scale(-1.0);
sil_probs.Add(1.0);
}
silence_probs_writer.Write(key, sil_probs);
num_done++;
}
KALDI_LOG << "Done " << num_done << " utterances, " << num_err
<< " with errors.";
KALDI_LOG << "Average silence prob is " << (tot_sil_prob / tot_frames)
<< " over " << tot_frames << " frames.";
return (num_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/gmmbin/gmm-compute-likes.cc<|end_filename|>
// gmmbin/gmm-compute-likes.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "gmm/am-diag-gmm.h"
#include "hmm/transition-model.h"
#include "fstext/fstext-lib.h"
#include "base/timer.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
using fst::SymbolTable;
using fst::VectorFst;
using fst::StdArc;
const char *usage =
"Compute log-likelihoods from GMM-based model\n"
"(outputs matrices of log-likelihoods indexed by (frame, pdf)\n"
"Usage: gmm-compute-likes [options] model-in features-rspecifier "
"likes-wspecifier\n";
ParseOptions po(usage);
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string model_in_filename = po.GetArg(1),
feature_rspecifier = po.GetArg(2),
loglikes_wspecifier = po.GetArg(3);
AmDiagGmm am_gmm;
{
bool binary;
TransitionModel trans_model; // not needed.
Input ki(model_in_filename, &binary);
trans_model.Read(ki.Stream(), binary);
am_gmm.Read(ki.Stream(), binary);
}
BaseFloatMatrixWriter loglikes_writer(loglikes_wspecifier);
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
int32 num_done = 0;
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string key = feature_reader.Key();
const Matrix<BaseFloat> &features(feature_reader.Value());
Matrix<BaseFloat> loglikes(features.NumRows(), am_gmm.NumPdfs());
for (int32 i = 0; i < features.NumRows(); i++) {
for (int32 j = 0; j < am_gmm.NumPdfs(); j++) {
SubVector<BaseFloat> feat_row(features, i);
loglikes(i, j) = am_gmm.LogLikelihood(j, feat_row);
}
}
loglikes_writer.Write(key, loglikes);
num_done++;
}
KALDI_LOG << "gmm-compute-likes: computed likelihoods for " << num_done
<< " utterances.";
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/latbin/lattice-lmrescore.cc<|end_filename|>
// latbin/lattice-lmrescore.cc
// Copyright 2009-2011 Microsoft Corporation
// 2014 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "fstext/fstext-lib.h"
#include "lat/kaldi-lattice.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
using fst::SymbolTable;
using fst::VectorFst;
using fst::StdArc;
using fst::ReadFstKaldi;
const char *usage =
"Add lm_scale * [cost of best path through LM FST] to graph-cost of\n"
"paths through lattice. Does this by composing with LM FST, then\n"
"lattice-determinizing (it has to negate weights first if lm_scale<0)\n"
"Usage: lattice-lmrescore [options] lattice-rspecifier lm-fst-in "
"lattice-wspecifier\n"
" e.g.: lattice-lmrescore --lm-scale=-1.0 ark:in.lats 'fstproject "
"--project_output=true data/lang/G.fst|' ark:out.lats\n";
ParseOptions po(usage);
BaseFloat lm_scale = 1.0;
int32 num_states_cache = 50000;
po.Register(
"lm-scale", &lm_scale,
"Scaling factor for language model costs; frequently 1.0 or -1.0");
po.Register(
"num-states-cache", &num_states_cache,
"Number of states we cache when mapping LM FST to lattice type. "
"More -> more memory but faster.");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string lats_rspecifier = po.GetArg(1), fst_rxfilename = po.GetArg(2),
lats_wspecifier = po.GetArg(3);
VectorFst<StdArc> *std_lm_fst = ReadFstKaldi(fst_rxfilename);
if (std_lm_fst->Properties(fst::kILabelSorted, true) == 0) {
// Make sure LM is sorted on ilabel.
fst::ILabelCompare<StdArc> ilabel_comp;
fst::ArcSort(std_lm_fst, ilabel_comp);
}
// mapped_fst is the LM fst interpreted using the LatticeWeight semiring,
// with all the cost on the first member of the pair (since it's a graph
// weight).
fst::CacheOptions cache_opts(true, num_states_cache);
fst::StdToLatticeMapper<BaseFloat> mapper;
fst::MapFst<StdArc, LatticeArc, fst::StdToLatticeMapper<BaseFloat> > lm_fst(
*std_lm_fst, mapper, cache_opts);
delete std_lm_fst;
// The next fifteen or so lines are a kind of optimization and
// can be ignored if you just want to understand what is going on.
// Change the options for TableCompose to match the input
// (because it's the arcs of the LM FST we want to do lookup
// on).
fst::TableComposeOptions compose_opts(fst::TableMatcherOptions(), true,
fst::SEQUENCE_FILTER,
fst::MATCH_INPUT);
// The following is an optimization for the TableCompose
// composition: it stores certain tables that enable fast
// lookup of arcs during composition.
fst::TableComposeCache<fst::Fst<LatticeArc> > lm_compose_cache(
compose_opts);
// Read as regular lattice-- this is the form we need it in for efficient
// composition and determinization.
SequentialLatticeReader lattice_reader(lats_rspecifier);
// Write as compact lattice.
CompactLatticeWriter compact_lattice_writer(lats_wspecifier);
int32 n_done = 0, n_fail = 0;
for (; !lattice_reader.Done(); lattice_reader.Next()) {
std::string key = lattice_reader.Key();
Lattice lat = lattice_reader.Value();
lattice_reader.FreeCurrent();
if (lm_scale != 0.0) {
// Only need to modify it if LM scale nonzero.
// Before composing with the LM FST, we scale the lattice weights
// by the inverse of "lm_scale". We'll later scale by "lm_scale".
// We do it this way so we can determinize and it will give the
// right effect (taking the "best path" through the LM) regardless
// of the sign of lm_scale.
fst::ScaleLattice(fst::GraphLatticeScale(1.0 / lm_scale), &lat);
ArcSort(&lat, fst::OLabelCompare<LatticeArc>());
Lattice composed_lat;
// Could just do, more simply: Compose(lat, lm_fst, &composed_lat);
// and not have lm_compose_cache at all.
// The command below is faster, though; it's constant not
// logarithmic in vocab size.
TableCompose(lat, lm_fst, &composed_lat, &lm_compose_cache);
Invert(&composed_lat); // make it so word labels are on the input.
CompactLattice determinized_lat;
DeterminizeLattice(composed_lat, &determinized_lat);
fst::ScaleLattice(fst::GraphLatticeScale(lm_scale), &determinized_lat);
if (determinized_lat.Start() == fst::kNoStateId) {
KALDI_WARN << "Empty lattice for utterance " << key
<< " (incompatible LM?)";
n_fail++;
} else {
compact_lattice_writer.Write(key, determinized_lat);
n_done++;
}
} else {
// zero scale so nothing to do.
n_done++;
CompactLattice compact_lat;
ConvertLattice(lat, &compact_lat);
compact_lattice_writer.Write(key, compact_lat);
}
}
KALDI_LOG << "Done " << n_done << " lattices, failed for " << n_fail;
return (n_done != 0 ? 0 : 1);
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/cudamatrix/cu-kernels-ansi.h<|end_filename|>
// cudamatrix/cu-kernels-ansi.h
// Copyright 2009-2012 <NAME>
// 2013 Johns Hopkins University (author: <NAME>)
// 2013 <NAME>
// 2013 <NAME>
// 2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_CUDAMATRIX_CU_KERNELS_ANSI_H_
#define KALDI_CUDAMATRIX_CU_KERNELS_ANSI_H_
#include "cudamatrix/cu-matrixdim.h"
#if HAVE_CUDA == 1
extern "C" {
/*********************************************************
* int32 CUDA kernel calls (no template wrapper)
*/
void cudaI32_set_const(dim3 Gr, dim3 Bl, int32_cuda *mat, int32_cuda value,
MatrixDim d);
/*********************************************************
* float CUDA kernel calls
*/
/*
* CuMatrix
*/
void cudaF_copy_upp_low(dim3 Gr, dim3 Bl, float *A, MatrixDim dimA);
void cudaF_copy_low_upp(dim3 Gr, dim3 Bl, float *A, MatrixDim dimA);
void cudaF_add_diag_vec_mat(dim3 Gr, dim3 Bl, float alpha, float *mat,
MatrixDim mat_dim, const float *vec,
const float *mat2, int mat2_row_stride,
int mat2_col_stride, float beta);
void cudaF_copy_from_tp_trans(dim3 Gr, dim3 Bl, float *A, const float *B,
MatrixDim dmat);
void cudaFD_copy_from_tp_trans(dim3 Gr, dim3 Bl, float *A, const double *B,
MatrixDim dmat);
void cudaF_copy_from_tp(dim3 Gr, dim3 Bl, float *A, const float *B,
MatrixDim dmat);
void cudaFD_copy_from_tp(dim3 Gr, dim3 Bl, float *A, const double *B,
MatrixDim dmat);
void cudaF_copy_col_from_vec(int Gr, int Bl, float *mat, const float *v,
int col, MatrixDim d);
void cudaF_apply_exp(dim3 Gr, dim3 Bl, float *mat, MatrixDim d);
void cudaF_apply_pow(dim3 Gr, dim3 Bl, float *mat, float power, MatrixDim d);
void cudaF_apply_pow_abs(dim3 Gr, dim3 Bl, float *mat, float power,
bool include_sign, MatrixDim d);
void cudaF_apply_heaviside(dim3 Gr, dim3 Bl, float *mat, MatrixDim d);
void cudaF_apply_floor(dim3 Gr, dim3 Bl, float *mat, float floor_val,
MatrixDim d);
void cudaF_copy_cols(dim3 Gr, dim3 Bl, float *dst, const float *src,
const MatrixIndexT_cuda *reorder, MatrixDim dst_dim,
int src_stride);
void cudaF_copy_rows(dim3 Gr, dim3 Bl, float *dst, const float *src,
const MatrixIndexT_cuda *reorder, MatrixDim dst_dim,
int src_stride);
void cudaF_apply_ceiling(dim3 Gr, dim3 Bl, float *mat, float ceiling_val,
MatrixDim d);
void cudaF_set_diag(int Gr, int Bl, float *mat, float value, MatrixDim d);
void cudaF_set_diag_packed(int Gr, int Bl, float *mat, float value, int dim);
void cudaF_add_diag_packed(int Gr, int Bl, float *mat, float value, int dim);
void cudaF_set_const(dim3 Gr, dim3 Bl, float *mat, float value, MatrixDim d);
void cudaF_set_zero_above_diag(dim3 Gr, dim3 Bl, float *mat, MatrixDim d);
void cudaF_add(dim3 Gr, dim3 Bl, float *mat, float value, MatrixDim d);
void cudaF_add_vec2(dim3 Gr, dim3 Bl, float *mat, const float *vec,
const float alpha, int dim);
void cudaF_scale_diag(int Gr, int Bl, float *mat, float value, int dim);
void cudaF_scale(dim3 Gr, dim3 Bl, float *mat, float value, MatrixDim d);
void cudaF_apply_log(dim3 Gr, dim3 Bl, float *mat, MatrixDim d);
void cudaF_mul_elements(dim3 Gr, dim3 Bl, float *mat, const float *A,
MatrixDim dst_d, int src_stride);
void cudaF_max(dim3 Gr, dim3 Bl, float *mat, const float *A, MatrixDim dst_d,
int src_stride);
void cudaF_mul_cols_vec(dim3 Gr, dim3 Bl, float *mat, const float *scale,
MatrixDim d);
void cudaF_mul_rows_vec(dim3 Gr, dim3 Bl, float *mat, const float *scale,
MatrixDim d);
void cudaF_mul_rows_group_mat(dim3 Gr, dim3 Bl, float *y, const float *x,
MatrixDim d, int src_stride, int group_size);
void cudaF_calc_pnorm_deriv(dim3 Gr, dim3 Bl, float *y, const float *x1,
const float *x2, MatrixDim d, int src_stride,
int group_size, float power);
void cudaF_div_rows_vec(dim3 Gr, dim3 Bl, float *mat, const float *vec_div,
MatrixDim d);
void cudaF_add_mat(dim3 Gr, dim3 Bl, float alpha, const float *src, float *dst,
MatrixDim d, int src_stride, int A_trans);
void cudaF_add_mat_mat_div_mat(dim3 Gr, dim3 Bl, const float *A, const float *B,
const float *C, float *dst, MatrixDim d);
void cudaF_add_vec_to_cols(dim3 Gr, dim3 Bl, float alpha, const float *col,
float beta, float *dst, MatrixDim d);
void cudaF_add_vec_to_rows(dim3 Gr, dim3 Bl, float alpha, const float *row,
float beta, float *dst, MatrixDim d);
/*
* CuVector
*/
void cudaF_replace_value(int Gr, int Bl, float *v, int dim, float orig,
float changed);
void cudaF_set_bias_params(int Gr, int Bl, float *v, const float *a,
float param_1, float param_2, float param_3,
int *flag, int dim);
void cudaF_copy_from_vec_df(int Gr, int Bl, double *v_out, const float *v_in,
int dim);
void cudaF_copy_from_vec_fd(int Gr, int Bl, float *v_out, const float *v_in,
int dim);
void cudaF_vec_mul_elements(int Gr, int Bl, float *v, const float *a, int dim);
void cudaF_vec_soft_max(int Gr, int Bl, float *v, int dim);
void cudaF_vec_min(const float *v, float *value, int dim);
void cudaF_vec_max(const float *v, float *value, int dim);
void cudaF_trace_mat_mat_trans(const float *A, const float *B, MatrixDim dA,
int B_stride, float *value);
void cudaF_trace_mat_mat(const float *A, const float *B, MatrixDim dA,
int B_stride, float *value);
void cudaF_add_diag_mat_mat(int Gr, int Bl, float alpha, float *v, int v_dim,
const float *M, int M_cols, int M_row_stride,
int M_col_stride, const float *N, int N_row_stride,
int N_col_stride, int threads_per_element,
float beta);
void cudaF_add_vec_vec(int Gr, int Bl, float alpha, float *v, const float *x,
const float *y, float beta, int dim);
void cudaF_copy_col_from_mat(int Gr, int Bl, float *v, int col,
const float *mat, MatrixDim dmat, int dim);
void cudaF_copy_col_from_mat_df(int Gr, int Bl, double *v, int col,
const float *mat, MatrixDim dmat, int dim);
void cudaF_copy_col_from_mat_fd(int Gr, int Bl, float *v, int col,
const float *mat, MatrixDim dmat, int dim);
void cudaF_vec_sum(int Gr, int Bl, float *v, float *value, int dim, int inc);
void cudaF_pvec_sum(int Gr, int Bl, float *vec, float *pvec_sum, int dim,
int size);
void cudaF_vec_copy_diag_from_packed(int Gr, int Bl, float *dst,
const float *src, int dim);
void cudaF_vec_apply_floor(int Gr, int Bl, float *v, float floor_val,
float *num, int dim);
void cudaF_vec_apply_exp(int Gr, int Bl, float *v, int dim);
void cudaF_vec_apply_log(int Gr, int Bl, float *v, float *flag, int dim);
void cudaF_trace(int Gr, int Bl, float *mat, float *value, int dim);
void cudaF_add_row_sum_mat(dim3 Gr, dim3 Bl, const float *mat, float *vec_sum,
MatrixDim d);
void cudaF_add_col_sum_mat(dim3 Gr, dim3 Bl, const float *mat, float *vec_sum,
MatrixDim d);
void cudaF_invert_elements(dim3 Gr, dim3 Bl, float *data, MatrixDim d);
// Note: B_trans is nonzero if B is transposed.
void cudaF_add_mat_blockmat(dim3 Gr, dim3 Bl, float *data, MatrixDim d,
const float *Adata, int A_num_rows, int A_num_cols,
int A_row_stride, int A_col_stride,
const CuBlockMatrixData *B_cu_data,
int B_num_blocks, float alpha, float beta,
int B_trans);
void cudaF_block_add_mat_mat(dim3 Gr, dim3 Bl, CuBlockMatrixData *B_cu_data,
int num_blocks, const float *C_data,
int C_num_cols, int C_row_stride, int C_col_stride,
const float *D_data, int D_row_stride,
int D_col_stride, float alpha, float beta);
/*
* cu::
*/
void cudaF_softmax(size_t Gr, size_t Bl, float *y, const float *x, MatrixDim d);
void cudaF_softmax_reduce(size_t Gr, size_t Bl, float *y, const float *x,
MatrixDim d, int src_stride);
void cudaF_softmax_part(dim3 Gr, dim3 Bl, const float *X,
const int32_cuda *vec_ids, float *Y, MatrixDim d);
void cudaF_soft_hinge(dim3 Gr, dim3 Bl, float *y, const float *x, MatrixDim d,
int src_stride);
void cudaF_group_pnorm(dim3 Gr, dim3 Bl, float *y, const float *x, MatrixDim d,
int src_stride, int group_size, float power);
void cudaF_sigmoid(dim3 Gr, dim3 Bl, float *y, const float *x, MatrixDim d,
int src_stride);
void cudaF_diff_sigmoid(dim3 Gr, dim3 Bl, float *eout, const float *e,
const float *y, MatrixDim d, int src_stride);
void cudaF_tanh(dim3 Gr, dim3 Bl, float *y, const float *x, MatrixDim d,
int src_stride);
void cudaF_diff_tanh(dim3 Gr, dim3 Bl, float *eout, const float *e,
const float *y, MatrixDim d);
void cudaF_regularize_l1(dim3 Gr, dim3 Bl, float *wei, float *grad, float l1,
float lr, MatrixDim d);
void cudaF_find_row_max_id(dim3 Gr, dim3 Bl, const float *mat, float *vec_val,
int32_cuda *vec_id, int32_cuda voff, MatrixDim d);
void cudaF_diff_xent(dim3 Gr, dim3 Bl, const int32_cuda *vec_tgt,
float *mat_net_out, float *vec_log_post, MatrixDim d);
void cudaF_copy_rows_from_vec(dim3 Gr, dim3 Bl, float *mat_out, MatrixDim d_out,
const float *v_in);
void cudaF_randomize(dim3 Gr, dim3 Bl, float *y, const float *x,
const int32_cuda *copy_from, MatrixDim d_out,
MatrixDim d_in);
void cudaF_splice(dim3 Gr, dim3 Bl, float *y, const float *x,
const int32_cuda *off, MatrixDim d_out, MatrixDim d_in);
void cudaF_one(int Gr, int Bl, float *x, int dim);
void cudaF_copy(dim3 Gr, dim3 Bl, float *y, const float *x,
const int32_cuda *copy_from, MatrixDim d_out, MatrixDim d_in);
void cudaF_copy_from_sp(dim3 Gr, dim3 Bl, const float *x, float *y,
MatrixDim d_out);
void cudaF_take_lower(dim3 Gr, dim3 Bl, const float *x, float *y,
MatrixDim d_in);
void cudaF_take_upper(dim3 Gr, dim3 Bl, const float *x, float *y,
MatrixDim d_in);
void cudaF_take_mean(dim3 Gr, dim3 Bl, const float *x, float *y,
MatrixDim d_in);
void cudaF_matrix_add_elements(dim3 Gr, dim3 Bl, float *data, MatrixDim dim,
float alpha, MatrixElement<float> *x, int s);
void cudaF_comp_obj_deriv(dim3 Gr, dim3 Bl, MatrixElement<float> *x, int s,
const float *z, MatrixDim d, float *z2, MatrixDim d2,
float *t);
void cudaF_transpose_matrix(dim3 Gr, dim3 Bl, float *mat, MatrixDim d);
void cudaF_sy_add_tr2(dim3 Gr, dim3 Bl, float alpha, float beta, const float *T,
MatrixDim tdim, float *S, MatrixDim sdim);
void cudaF_sum_column_ranges(dim3 Gr, dim3 Bl, float *data, MatrixDim dim,
const float *src_data, MatrixDim src_dim,
const Int32Pair *indices);
void cudaF_matrix_lookup(dim3 Gr, dim3 Bl, const float *data, MatrixDim dim,
const Int32Pair *indices, int indices_size,
float *output);
void cudaF_equal_element_mask(dim3 Gr, dim3 Bl, const float *mat1,
const float *mat2, float *mask,
MatrixDim mat1_dim, int mat2_stride,
int mask_stride);
/*********************************************************
* double CUDA kernel calls
*/
/*
* CuMatrix
*/
void cudaD_copy_upp_low(dim3 Gr, dim3 Bl, double *A, MatrixDim dimB);
void cudaD_copy_low_upp(dim3 Gr, dim3 Bl, double *A, MatrixDim dimA);
void cudaD_add_diag_vec_mat(dim3 Gr, dim3 Bl, double alpha, double *mat,
MatrixDim mat_dim, const double *vec,
const double *mat2, int mat2_row_stride,
int mat2_col_stride, double beta);
void cudaD_copy_from_tp_trans(dim3 Gr, dim3 Bl, double *A, const double *B,
MatrixDim dmat);
void cudaDF_copy_from_tp_trans(dim3 Gr, dim3 Bl, double *A, const float *B,
MatrixDim dmat);
void cudaD_copy_from_tp(dim3 Gr, dim3 Bl, double *A, const double *B,
MatrixDim dmat);
void cudaDF_copy_from_tp(dim3 Gr, dim3 Bl, double *A, const float *B,
MatrixDim dmat);
void cudaD_copy_col_from_vec(int Gr, int Bl, double *mat, const double *v,
int col, MatrixDim d);
void cudaD_apply_exp(dim3 Gr, dim3 Bl, double *mat, MatrixDim d);
void cudaD_apply_pow(dim3 Gr, dim3 Bl, double *mat, double power, MatrixDim d);
void cudaD_apply_pow_abs(dim3 Gr, dim3 Bl, double *mat, double power,
bool include_sign, MatrixDim d);
void cudaD_apply_heaviside(dim3 Gr, dim3 Bl, double *mat, MatrixDim d);
void cudaD_apply_floor(dim3 Gr, dim3 Bl, double *mat, double floor_val,
MatrixDim d);
void cudaD_copy_cols(dim3 Gr, dim3 Bl, double *dst, const double *src,
const MatrixIndexT_cuda *reorder, MatrixDim dst_dim,
int src_stride);
void cudaD_copy_rows(dim3 Gr, dim3 Bl, double *dst, const double *src,
const MatrixIndexT_cuda *reorder, MatrixDim dst_dim,
int src_stride);
void cudaD_apply_ceiling(dim3 Gr, dim3 Bl, double *mat, double ceiling_val,
MatrixDim d);
void cudaD_set_diag(int Gr, int Bl, double *mat, double value, MatrixDim d);
void cudaD_set_diag_packed(int Gr, int Bl, double *mat, double value, int dim);
void cudaD_add_diag_packed(int Gr, int Bl, double *mat, double value, int dim);
void cudaD_set_const(dim3 Gr, dim3 Bl, double *mat, double value, MatrixDim d);
void cudaD_set_zero_above_diag(dim3 Gr, dim3 Bl, double *mat, MatrixDim d);
void cudaD_add(dim3 Gr, dim3 Bl, double *mat, double value, MatrixDim d);
void cudaD_add_vec2(dim3 Gr, dim3 Bl, double *mat, const double *vec,
const double alpha, int dim);
void cudaD_scale_diag(int Gr, int Bl, double *mat, double value, int dim);
void cudaD_scale(dim3 Gr, dim3 Bl, double *mat, double value, MatrixDim d);
void cudaD_apply_log(dim3 Gr, dim3 Bl, double *mat, MatrixDim d);
void cudaD_mul_elements(dim3 Gr, dim3 Bl, double *mat, const double *A,
MatrixDim dst_d, int src_stride);
void cudaD_max(dim3 Gr, dim3 Bl, double *mat, const double *A, MatrixDim dst_d,
int src_stride);
void cudaD_mul_cols_vec(dim3 Gr, dim3 Bl, double *mat, const double *scale,
MatrixDim d);
void cudaD_mul_rows_vec(dim3 Gr, dim3 Bl, double *mat, const double *scale,
MatrixDim d);
void cudaD_mul_rows_group_mat(dim3 Gr, dim3 Bl, double *y, const double *x,
MatrixDim d, int src_stride, int group_size);
void cudaD_calc_pnorm_deriv(dim3 Gr, dim3 Bl, double *y, const double *x1,
const double *x2, MatrixDim d, int src_stride,
int group_size, double power);
void cudaD_div_rows_vec(dim3 Gr, dim3 Bl, double *mat, const double *vec_div,
MatrixDim d);
void cudaD_add_mat(dim3 Gr, dim3 Bl, double alpha, const double *src,
double *dst, MatrixDim d, int src_stride, int A_trans);
void cudaD_add_mat_mat_div_mat(dim3 Gr, dim3 Bl, const double *A,
const double *B, const double *C, double *dst,
MatrixDim d);
void cudaD_add_vec_to_cols(dim3 Gr, dim3 Bl, double alpha, const double *col,
double beta, double *dst, MatrixDim d);
void cudaD_add_vec_to_rows(dim3 Gr, dim3 Bl, double alpha, const double *row,
double beta, double *dst, MatrixDim d);
/*
* CuVector
*/
void cudaD_replace_value(int Gr, int Bl, double *v, int dim, double orig,
double changed);
void cudaD_set_bias_params(int Gr, int Bl, double *v, const double *a,
double param_1, double param_2, double param_3,
int *flag, int dim);
void cudaD_copy_from_vec_df(int Gr, int Bl, double *v_out, const double *v_in,
int dim);
void cudaD_copy_from_vec_fd(int Gr, int Bl, float *v_out, const double *v_in,
int dim);
void cudaD_vec_mul_elements(int Gr, int Bl, double *v, const double *a,
int dim);
void cudaD_vec_soft_max(int Gr, int Bl, double *v, int dim);
void cudaD_vec_min(const double *v, double *value, int dim);
void cudaD_vec_max(const double *v, double *value, int dim);
void cudaD_trace_mat_mat_trans(const double *A, const double *B, MatrixDim dA,
int B_stride, double *value);
void cudaD_trace_mat_mat(const double *A, const double *B, MatrixDim dA,
int B_stride, double *value);
void cudaD_add_diag_mat_mat(int Gr, int Bl, double alpha, double *v, int v_dim,
const double *M, int M_cols, int M_row_stride,
int M_col_stride, const double *N, int N_row_stride,
int N_col_stride, int threads_per_element,
double beta);
void cudaD_add_vec_vec(int Gr, int Bl, double alpha, double *v, const double *x,
const double *y, double beta, int dim);
void cudaD_copy_col_from_mat(int Gr, int Bl, double *v, int col,
const double *mat, MatrixDim dmat, int dim);
void cudaD_copy_col_from_mat_df(int Gr, int Bl, double *v, int col,
const double *mat, MatrixDim dmat, int dim);
void cudaD_copy_col_from_mat_fd(int Gr, int Bl, float *v, int col,
const double *mat, MatrixDim dmat, int dim);
void cudaD_vec_sum(int Gr, int Bl, double *v, double *value, int dim, int inc);
void cudaD_pvec_sum(int Gr, int Bl, double *vec, double *pvec_sum, int dim,
int size);
void cudaD_vec_copy_diag_from_packed(int Gr, int Bl, double *dst,
const double *src, int dim);
void cudaD_vec_apply_floor(int Gr, int Bl, double *v, double floor_val,
float *num, int dim);
void cudaD_vec_apply_exp(int Gr, int Bl, double *v, int dim);
void cudaD_vec_apply_log(int Gr, int Bl, double *v, double *flag, int dim);
void cudaD_trace(int Gr, int Bl, double *mat, double *value, int dim);
void cudaD_add_row_sum_mat(dim3 Gr, dim3 Bl, const double *mat, double *vec_sum,
MatrixDim d);
void cudaD_add_col_sum_mat(dim3 Gr, dim3 Bl, const double *mat, double *vec_sum,
MatrixDim d);
void cudaD_invert_elements(dim3 Gr, dim3 Bl, double *data, MatrixDim d);
// note: B_trans is nonzero if B is tranposed.
void cudaD_add_mat_blockmat(dim3 Gr, dim3 Bl, double *data, MatrixDim d,
const double *Adata, int A_num_rows, int A_num_cols,
int A_row_stride, int A_col_stride,
const CuBlockMatrixData *B_cu_data,
int B_num_blocks, double alpha, double beta,
int B_trans);
void cudaD_block_add_mat_mat(dim3 Gr, dim3 Bl, CuBlockMatrixData *B_cu_data,
int num_blocks, const double *C_data,
int C_num_cols, int C_row_stride, int C_col_stride,
const double *D_data, int D_row_stride,
int D_col_stride, double alpha, double beta);
/*
* cu::
*/
void cudaD_softmax(size_t Gr, size_t Bl, double *y, const double *x,
MatrixDim d);
void cudaD_softmax_reduce(size_t Gr, size_t Bl, double *y, const double *x,
MatrixDim d, int src_stride);
void cudaD_softmax_part(dim3 Gr, dim3 Bl, const double *X,
const int32_cuda *vec_ids, double *Y, MatrixDim d);
void cudaD_soft_hinge(dim3 Gr, dim3 Bl, double *y, const double *x, MatrixDim d,
int src_stride);
void cudaD_group_pnorm(dim3 Gr, dim3 Bl, double *y, const double *x,
MatrixDim d, int src_stride, int group_size,
double power);
void cudaD_sigmoid(dim3 Gr, dim3 Bl, double *y, const double *x, MatrixDim d,
int src_stride);
void cudaD_diff_sigmoid(dim3 Gr, dim3 Bl, double *eout, const double *e,
const double *y, MatrixDim d, int src_stride);
void cudaD_tanh(dim3 Gr, dim3 Bl, double *y, const double *x, MatrixDim d,
int src_stride);
void cudaD_diff_tanh(dim3 Gr, dim3 Bl, double *eout, const double *e,
const double *y, MatrixDim d);
void cudaD_regularize_l1(dim3 Gr, dim3 Bl, double *wei, double *grad, double l1,
double lr, MatrixDim d);
void cudaD_find_row_max_id(dim3 Gr, dim3 Bl, const double *mat, double *vec_val,
int32_cuda *vec_id, int32_cuda voff, MatrixDim d);
void cudaD_diff_xent(dim3 Gr, dim3 Bl, const int32_cuda *vec_tgt,
double *mat_net_out, double *vec_log_post, MatrixDim d);
void cudaD_copy_rows_from_vec(dim3 Gr, dim3 Bl, double *mat_out,
MatrixDim d_out, const double *v_in);
void cudaD_randomize(dim3 Gr, dim3 Bl, double *y, const double *x,
const int32_cuda *copy_from, MatrixDim d_out,
MatrixDim d_in);
void cudaD_splice(dim3 Gr, dim3 Bl, double *y, const double *x,
const int32_cuda *off, MatrixDim d_out, MatrixDim d_in);
void cudaD_one(int Gr, int Bl, double *x, int dim);
void cudaD_copy(dim3 Gr, dim3 Bl, double *y, const double *x,
const int32_cuda *copy_from, MatrixDim d_out, MatrixDim d_in);
void cudaD_copy_from_sp(dim3 Gr, dim3 Bl, const double *x, double *y,
MatrixDim d_out);
void cudaD_take_lower(dim3 Gr, dim3 Bl, const double *x, double *y,
MatrixDim d_in);
void cudaD_take_upper(dim3 Gr, dim3 Bl, const double *x, double *y,
MatrixDim d_in);
void cudaD_take_mean(dim3 Gr, dim3 Bl, const double *x, double *y,
MatrixDim d_in);
// some mostly mixed-type kernels.
void cuda_copy_from_mat_df(dim3 Gr, dim3 Bl, double *mat_out,
const float *mat_in, MatrixDim d_out,
MatrixDim d_in);
void cuda_copy_from_mat_ff(dim3 Gr, dim3 Bl, float *mat_out,
const float *mat_in, MatrixDim d_out,
MatrixDim d_in);
void cuda_copy_from_mat_fd(dim3 Gr, dim3 Bl, float *mat_out,
const double *mat_in, MatrixDim d_out,
MatrixDim d_in);
void cuda_copy_from_mat_dd(dim3 Gr, dim3 Bl, double *mat_out,
const double *mat_in, MatrixDim d_out,
MatrixDim d_in);
void cuda_copy_from_mat_df_trans(dim3 Gr, dim3 Bl, double *mat_out,
const float *mat_in, MatrixDim d_out,
MatrixDim d_in);
void cuda_copy_from_mat_ff_trans(dim3 Gr, dim3 Bl, float *mat_out,
const float *mat_in, MatrixDim d_out,
MatrixDim d_in);
void cuda_copy_from_mat_fd_trans(dim3 Gr, dim3 Bl, float *mat_out,
const double *mat_in, MatrixDim d_out,
MatrixDim d_in);
void cuda_copy_from_mat_dd_trans(dim3 Gr, dim3 Bl, double *mat_out,
const double *mat_in, MatrixDim d_out,
MatrixDim d_in);
void cudaD_matrix_add_elements(dim3 Gr, dim3 Bl, double *data, MatrixDim dim,
double alpha, MatrixElement<double> *x, int s);
void cudaD_comp_obj_deriv(dim3 Gr, dim3 Bl, MatrixElement<double> *x, int s,
const double *z, MatrixDim d, double *z2,
MatrixDim d2, double *t);
void cudaD_transpose_matrix(dim3 Gr, dim3 Bl, double *mat, MatrixDim d);
void cudaD_sy_add_tr2(dim3 Gr, dim3 Bl, double alpha, double beta,
const double *T, MatrixDim tdim, double *S,
MatrixDim sdim);
void cudaD_sum_column_ranges(dim3 Gr, dim3 Bl, double *data, MatrixDim dim,
const double *src_data, MatrixDim src_dim,
const Int32Pair *indices);
void cudaD_matrix_lookup(dim3 Gr, dim3 Bl, const double *data, MatrixDim dim,
const Int32Pair *indices, int indices_size,
double *output);
void cudaD_equal_element_mask(dim3 Gr, dim3 Bl, const double *mat1,
const double *mat2, double *mask,
MatrixDim mat1_dim, int mat2_stride,
int mask_stride);
} // extern "C"
#endif // HAVE_CUDA
#endif
<|start_filename|>tonic-suite/asr/src/gmm/diag-gmm-normal.h<|end_filename|>
// gmm/diag-gmm-normal.h
// Copyright 2009-2011 Saarland University <NAME> <NAME>
//
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#ifndef KALDI_GMM_DIAG_GMM_NORMAL_H_
#define KALDI_GMM_DIAG_GMM_NORMAL_H_ 1
#include <vector>
#include "base/kaldi-common.h"
#include "gmm/model-common.h"
#include "gmm/diag-gmm.h"
#include "matrix/matrix-lib.h"
namespace kaldi {
class DiagGmm;
/** \class DiagGmmNormal
* Definition for Gaussian Mixture Model with diagonal covariances in normal
* mode: where the parameters are stored as means and variances (instead of
* the exponential form that the DiagGmm class is stored as). This class will
* be used in the update (since the update formulas are for the standard
* parameterization) and then copied to the exponential form of the DiagGmm
* class. The DiagGmmNormal class will not be used anywhere else, and should
* not have any extra methods that are not needed.
*/
class DiagGmmNormal {
public:
/// Empty constructor.
DiagGmmNormal() {}
explicit DiagGmmNormal(const DiagGmm &gmm) { CopyFromDiagGmm(gmm); }
/// Resizes arrays to this dim. Does not initialize data.
void Resize(int32 nMix, int32 dim);
/// Copies from given DiagGmm
void CopyFromDiagGmm(const DiagGmm &diaggmm);
/// Copies to DiagGmm the requested parameters
void CopyToDiagGmm(DiagGmm *diaggmm, GmmFlagsType flags = kGmmAll) const;
int32 NumGauss() { return weights_.Dim(); }
int32 Dim() { return means_.NumCols(); }
Vector<double> weights_; ///< weights (not log).
Matrix<double> means_; ///< Means
Matrix<double> vars_; ///< diagonal variance
KALDI_DISALLOW_COPY_AND_ASSIGN(DiagGmmNormal);
};
} // End namespace kaldi
#endif // KALDI_GMM_DIAG_GMM_NORMAL_H_
<|start_filename|>tonic-suite/asr/src/fstbin/fstrand.cc<|end_filename|>
// fstbin/fstrand.cc
// Copyright 2009-2011 Microsoft Corporation
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/kaldi-io.h"
#include "util/parse-options.h"
#include "util/text-utils.h"
#include "fstext/rand-fst.h"
#include "time.h"
#include "fstext/fstext-utils.h"
int main(int argc, char *argv[]) {
try {
using namespace fst;
using kaldi::int32;
const char *usage =
"Generate random FST\n"
"\n"
"Usage: fstrand [out.fst]\n";
srand(time(NULL));
RandFstOptions opts;
kaldi::ParseOptions po(usage);
po.Register("allow-empty", &opts.allow_empty,
"If true, we may generate an empty FST.");
if (po.NumArgs() > 1) {
po.PrintUsage();
exit(1);
}
std::string fst_out_filename = po.GetOptArg(1);
VectorFst<StdArc> *rand_fst = RandFst<StdArc>(opts);
WriteFstKaldi(*rand_fst, fst_out_filename);
delete rand_fst;
return 0;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/online/online-decodable.cc<|end_filename|>
// online/online-decodable.cc
// Copyright 2012 Cisco Systems (author: <NAME>)
// Modifications to the original contribution by Cisco Systems made by:
// <NAME>
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "online/online-decodable.h"
namespace kaldi {
OnlineDecodableDiagGmmScaled::OnlineDecodableDiagGmmScaled(
const AmDiagGmm &am, const TransitionModel &trans_model,
const BaseFloat scale, OnlineFeatureMatrix *input_feats)
: features_(input_feats),
ac_model_(am),
ac_scale_(scale),
trans_model_(trans_model),
feat_dim_(input_feats->Dim()),
cur_frame_(-1) {
if (!input_feats->IsValidFrame(0)) {
// It's not safe to throw from a constructor, so please check
// this condition yourself before reaching this point in the code.
KALDI_ERR << "Attempt to initialize decodable object with empty "
<< "input: please check this before the initializer!";
}
int32 num_pdfs = trans_model_.NumPdfs();
cache_.resize(num_pdfs, std::pair<int32, BaseFloat>(-1, 0.0));
}
void OnlineDecodableDiagGmmScaled::CacheFrame(int32 frame) {
KALDI_ASSERT(frame >= 0);
cur_feats_.Resize(feat_dim_);
if (!features_->IsValidFrame(frame))
KALDI_ERR
<< "Request for invalid frame (you need to check IsLastFrame, or, "
<< "for frame zero, check that the input is valid.";
cur_feats_.CopyFromVec(features_->GetFrame(frame));
cur_frame_ = frame;
}
BaseFloat OnlineDecodableDiagGmmScaled::LogLikelihood(int32 frame,
int32 index) {
if (frame != cur_frame_) CacheFrame(frame);
int32 pdf_id = trans_model_.TransitionIdToPdf(index);
if (cache_[pdf_id].first == frame) return cache_[pdf_id].second;
BaseFloat ans = ac_model_.LogLikelihood(pdf_id, cur_feats_) * ac_scale_;
cache_[pdf_id].first = frame;
cache_[pdf_id].second = ans;
return ans;
}
bool OnlineDecodableDiagGmmScaled::IsLastFrame(int32 frame) const {
return !features_->IsValidFrame(frame + 1);
}
} // namespace kaldi
<|start_filename|>tonic-suite/nlp/src/SENNA_NER.h<|end_filename|>
#ifndef SENNA_NER_H
#define SENNA_NER_H
#include "tonic.h"
typedef struct SENNA_NER_ {
/* sizes */
int window_size;
int ll_word_size;
int ll_word_max_idx;
int ll_caps_size;
int ll_caps_max_idx;
int ll_gazl_size;
int ll_gazl_max_idx;
int ll_gazm_size;
int ll_gazm_max_idx;
int ll_gazo_size;
int ll_gazo_max_idx;
int ll_gazp_size;
int ll_gazp_max_idx;
int input_state_size;
int hidden_state_size;
int output_state_size;
/* weights */
float *ll_word_weight;
float *ll_caps_weight;
float *ll_gazl_weight;
float *ll_gazm_weight;
float *ll_gazo_weight;
float *ll_gazp_weight;
float *l1_weight;
float *l1_bias;
float *l2_weight;
float *l2_bias;
float *viterbi_score_init;
float *viterbi_score_trans;
/* states */
float *input_state;
float *hidden_state;
float *output_state;
int *labels;
/* padding indices */
int ll_word_padding_idx;
int ll_caps_padding_idx;
int ll_gazt_padding_idx;
/* service flag */
bool service;
bool debug;
/* profiling */
int calls;
unsigned int apptime;
unsigned int dnntime;
} SENNA_NER;
SENNA_NER *SENNA_NER_new(const char *path, const char *subpath);
int *SENNA_NER_forward(SENNA_NER *ner, const int *sentence_words,
const int *sentence_caps, const int *sentence_gazl,
const int *sentence_gazm, const int *sentence_gazo,
const int *sentence_gazp, TonicSuiteApp app);
void SENNA_NER_free(SENNA_NER *ner);
#endif
<|start_filename|>tonic-suite/asr/src/bin/acc-tree-stats.cc<|end_filename|>
// bin/acc-tree-stats.cc
// Copyright 2009-2011 Microsoft Corporation, GoVivace Inc.
// 2013 Johns Hopkins University (author: <NAME>)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "util/common-utils.h"
#include "tree/context-dep.h"
#include "tree/build-tree-utils.h"
#include "hmm/transition-model.h"
#include "hmm/tree-accu.h"
/** @brief Accumulate tree statistics for decision tree training. The
program reads in a feature archive, and the corresponding alignments,
and generats the sufficient statistics for the decision tree
creation. Context width and central phone position are used to
identify the contexts.Transition model is used as an input to identify
the PDF's and the phones. */
int main(int argc, char *argv[]) {
using namespace kaldi;
typedef kaldi::int32 int32;
try {
const char *usage =
"Accumulate statistics for phonetic-context tree building.\n"
"Usage: acc-tree-stats [options] model-in features-rspecifier "
"alignments-rspecifier [tree-accs-out]\n"
"e.g.: \n"
" acc-tree-stats 1.mdl scp:train.scp ark:1.ali 1.tacc\n";
ParseOptions po(usage);
bool binary = true;
float var_floor = 0.01;
string ci_phones_str;
std::string phone_map_rxfilename;
int N = 3;
int P = 1;
po.Register("binary", &binary, "Write output in binary mode");
po.Register("var-floor", &var_floor, "Variance floor for tree clustering.");
po.Register("ci-phones", &ci_phones_str,
"Colon-separated list of integer "
"indices of context-independent phones (after mapping, if "
"--phone-map option is used).");
po.Register("context-width", &N, "Context window size.");
po.Register("central-position", &P,
"Central context-window position "
"(zero-based)");
po.Register("phone-map", &phone_map_rxfilename,
"File name containing old->new phone mapping (each line is: "
"old-integer-id new-integer-id)");
po.Read(argc, argv);
if (po.NumArgs() < 3 || po.NumArgs() > 4) {
po.PrintUsage();
exit(1);
}
std::string model_filename = po.GetArg(1),
feature_rspecifier = po.GetArg(2),
alignment_rspecifier = po.GetArg(3),
accs_out_wxfilename = po.GetOptArg(4);
std::vector<int32> phone_map;
if (phone_map_rxfilename != "") { // read phone map.
ReadPhoneMap(phone_map_rxfilename, &phone_map);
}
std::vector<int32> ci_phones;
if (ci_phones_str != "") {
SplitStringToIntegers(ci_phones_str, ":", false, &ci_phones);
std::sort(ci_phones.begin(), ci_phones.end());
if (!IsSortedAndUniq(ci_phones) || ci_phones[0] == 0) {
KALDI_ERR << "Invalid set of ci_phones: " << ci_phones_str;
}
}
TransitionModel trans_model;
{
bool binary;
Input ki(model_filename, &binary);
trans_model.Read(ki.Stream(), binary);
// There is more in this file but we don't need it.
}
SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier);
RandomAccessInt32VectorReader alignment_reader(alignment_rspecifier);
std::map<EventType, GaussClusterable *> tree_stats;
int num_done = 0, num_no_alignment = 0, num_other_error = 0;
for (; !feature_reader.Done(); feature_reader.Next()) {
std::string key = feature_reader.Key();
if (!alignment_reader.HasKey(key)) {
num_no_alignment++;
} else {
const Matrix<BaseFloat> &mat = feature_reader.Value();
const std::vector<int32> &alignment = alignment_reader.Value(key);
if (alignment.size() != mat.NumRows()) {
KALDI_WARN << "Alignments has wrong size " << (alignment.size())
<< " vs. " << (mat.NumRows());
num_other_error++;
continue;
}
////// This is the important part of this program. ////////
AccumulateTreeStats(
trans_model, var_floor, N, P, ci_phones, alignment, mat,
(phone_map_rxfilename != "" ? &phone_map : NULL), &tree_stats);
num_done++;
if (num_done % 1000 == 0)
KALDI_LOG << "Processed " << num_done << " utterances.";
}
}
BuildTreeStatsType stats; // vectorized form.
for (std::map<EventType, GaussClusterable *>::const_iterator iter =
tree_stats.begin();
iter != tree_stats.end(); iter++) {
stats.push_back(std::make_pair(iter->first, iter->second));
}
tree_stats.clear();
{
Output ko(accs_out_wxfilename, binary);
WriteBuildTreeStats(ko.Stream(), binary, stats);
}
KALDI_LOG << "Accumulated stats for " << num_done << " files, "
<< num_no_alignment << " failed due to no alignment, "
<< num_other_error << " failed for other reasons.";
KALDI_LOG << "Number of separate stats (context-dependent states) is "
<< stats.size();
DeleteBuildTreeStats(&stats);
if (num_done != 0)
return 0;
else
return 1;
} catch (const std::exception &e) {
std::cerr << e.what();
return -1;
}
}
<|start_filename|>tonic-suite/asr/src/ivector/plda-test.cc<|end_filename|>
// ivector/plda-test.cc
// Copyright 2013 <NAME>
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "ivector/plda.h"
namespace kaldi {
void UnitTestPldaEstimation(int32 dim) {
int32 num_classes = 4000 + Rand() % 10;
Matrix<double> between_proj(dim, dim);
between_proj.SetRandn();
Matrix<double> within_proj(dim, dim);
within_proj.SetRandn();
Vector<double> global_mean(dim);
global_mean.SetRandn();
global_mean.Scale(10.0);
PldaStats stats;
for (int32 n = 0; n < num_classes; n++) {
int32 num_egs = 1 + Rand() % 30;
Vector<double> rand_vec(dim);
rand_vec.SetRandn();
Vector<double> class_mean(global_mean);
class_mean.AddMatVec(1.0, between_proj, kNoTrans, rand_vec, 1.0);
Matrix<double> rand_mat(num_egs, dim);
rand_mat.SetRandn();
Matrix<double> offset_mat(num_egs, dim);
offset_mat.AddMatMat(1.0, rand_mat, kNoTrans, within_proj, kTrans, 0.0);
offset_mat.AddVecToRows(1.0, class_mean);
double weight = 1.0 + (0.1 * (Rand() % 30));
stats.AddSamples(weight, offset_mat);
}
SpMatrix<double> between_var(dim), within_var(dim);
between_var.AddMat2(1.0, between_proj, kNoTrans, 0.0);
within_var.AddMat2(1.0, within_proj, kNoTrans, 0.0);
stats.Sort();
PldaEstimator estimator(stats);
Plda plda;
PldaEstimationConfig config;
estimator.Estimate(config, &plda);
KALDI_LOG << "Trace of true within-var is " << within_var.Trace();
KALDI_LOG << "Trace of true between-var is " << between_var.Trace();
{
TpMatrix<double> C(dim);
C.Cholesky(within_var);
C.Invert();
SpMatrix<double> between_var_proj(dim);
between_var_proj.AddTp2Sp(1.0, C, kNoTrans, between_var, 0.0);
Vector<double> s(dim);
between_var_proj.Eig(&s);
s.Scale(-1.0);
std::sort(s.Data(), s.Data() + s.Dim());
s.Scale(-1.0);
KALDI_LOG << "Diagonal of between-class variance in normalized space "
<< "should be: " << s;
}
}
}
/*
This test is really just making sure that the PLDA estimation does not
crash. As for testing that it's working: I did this by eyeballing the
output where it says "Trace of true within-var is XX" or "Trace of true
between-var is XX" and comparing with the output from the estimation
that says, "Trace of within-class variance is XX" and "Trace of betweeen-class
variance is XX" (on the last iteration). I make sure they are similar.
I also checked that the objective function (where it says
"Objective function is XX" is non-decreasing, and seems to be converging.
*/
int main() {
using namespace kaldi;
SetVerboseLevel(3);
for (int i = 0; i < 5; i++) UnitTestPldaEstimation(i + 1);
// UnitTestPldaEstimation(400);
UnitTestPldaEstimation(80);
std::cout << "Test OK.\n";
return 0;
}
<|start_filename|>tonic-suite/nlp/src/SENNA_Hash.h<|end_filename|>
#ifndef SENNA_HASH_H
#define SENNA_HASH_H
typedef struct SENNA_Hash_ {
char *is_admissible_key;
char **keys;
int size;
char is_sorted;
} SENNA_Hash;
SENNA_Hash *SENNA_Hash_new(const char *path, const char *filename);
SENNA_Hash *SENNA_Hash_new_with_admissible_keys(
const char *path, const char *filename,
const char *admissible_keys_filename);
int SENNA_Hash_index(SENNA_Hash *hash, const char *key);
const char *SENNA_Hash_key(SENNA_Hash *hash, int idx);
void SENNA_Hash_convert_IOBES_to_brackets(SENNA_Hash *hash);
void SENNA_Hash_convert_IOBES_to_IOB(SENNA_Hash *hash);
int SENNA_Hash_size(SENNA_Hash *hash);
char SENNA_Hash_is_admissible_index(SENNA_Hash *hash, int idx);
void SENNA_Hash_free(SENNA_Hash *hash);
#endif
| csb1024/djinn_csb |
<|start_filename|>src/java/com/snowtide/pdf/lucene/LucenePDFConfiguration.java<|end_filename|>
package com.snowtide.pdf.lucene;
import java.util.HashMap;
import java.util.Map;
/**
* Instances of this class are used to control the creation of Lucene Documents from PDF content
* through the {@link LucenePDFDocumentFactory} class.
*
* @see <a href="http://www.snowtide.com/help/indexing-pdf-documents-with-lucene-and-pdfxstream">Indexing PDF
* Documents with Lucene and PDFxStream</a> for usage details
*
* @version ©2004-2014 Snowtide, http://snowtide.com, licensed under MIT. See LICENSE in the top level of the
* <a href="https://github.com/snowtide/lucene-pdf">lucene-pdf</a> project directory.
*/
public class LucenePDFConfiguration {
/**
* The default name assigned to the Lucene Field containing the main body of text extracted from a PDF file:
* <code>"text"</code>.
*/
public static final String DEFAULT_MAIN_TEXT_FIELD_NAME = "text";
/**
* Mapping from PDF metadata keys to Lucene document field names.
*/
private final HashMap<String, String> metadataFieldMapping = new HashMap<String,String>();
private boolean copyAllPDFMetadata = true;
private boolean indexBodyText = true;
private boolean storeBodyText = false;
private boolean tokenizeBodyText = true;
private boolean indexMetadata = true;
private boolean storeMetadata = true;
private boolean tokenizeMetadata = true;
private String bodyTextFieldName = DEFAULT_MAIN_TEXT_FIELD_NAME;
/**
* Creates a new config object. The resulting object retains the default configuration
* except for the name assigned to the Lucene Field that contains the main PDF text content.
*
* @param mainTextFieldName - the name that should be assigned to Fields containing
* the main PDF text content.
*/
public LucenePDFConfiguration (String mainTextFieldName) {
setBodyTextFieldName(mainTextFieldName);
}
/**
* Creates a new config object. Fields containing the main text content of
* {@link com.snowtide.pdf.Document PDF documents} converted into
* Lucene Documents will be assigned a {@link LucenePDFConfiguration#DEFAULT_MAIN_TEXT_FIELD_NAME
* default name}. Other configuration defaults are as follows:
* <ul>
* <li>All PDF metadata attributes are copied to the resulting Lucene documents</li>
* <li>The main text content is tokenized and indexed, but not stored</li>
* <li>The PDF metadata attributes are tokenized, stored, and indexed.</li>
* </ul>
*/
public LucenePDFConfiguration () {
this(DEFAULT_MAIN_TEXT_FIELD_NAME);
}
/**
* Sets the name that will be assigned to Lucene Fields containing PDF body text content.
*/
public void setBodyTextFieldName (String bodyTextFieldName) {
this.bodyTextFieldName = bodyTextFieldName;
}
/**
* Returns the name that will be assigned to Lucene Fields containing PDF body text content.
*/
public String getBodyTextFieldName () {
return bodyTextFieldName;
}
/**
* Returns a copy of the mapping between PDF metadata attributes and the names given to Lucene fields created for
* them.
*/
public Map<String,String> getMetadataFieldMapping () {
return new HashMap<String,String>(metadataFieldMapping);
}
/**
* Returns the name that should be given to Lucene Fields created from the value of the named PDF metadata
* attribute.
*/
public String getMetadataFieldMapping (String pdfMetadataAttr) {
return metadataFieldMapping.get(pdfMetadataAttr);
}
/**
* Sets the name that will be assigned to Lucene Fields corresponding to the provided PDF metadata attribute
* name (e.g. {@link com.snowtide.pdf.Document#ATTR_AUTHOR}, etc).
*/
public void setMetadataFieldMapping (String pdfMetadataAttr, String fieldName) {
metadataFieldMapping.put(pdfMetadataAttr, fieldName);
}
/**
* Returns true if any PDF metadata attributes not explicitly {@link #getMetadataFieldMapping() mapped} will
* be added to generated Lucene Documents using their names as specified in the source PDFs.
*/
public boolean copyAllPDFMetadata() {
return copyAllPDFMetadata;
}
/**
* @see LucenePDFConfiguration#copyAllPDFMetadata()
*/
public void setCopyAllPDFMetadata(boolean b) {
copyAllPDFMetadata = b;
}
/**
* Sets Field attributes that will be used when creating the Field object for the main text content of
* a PDF document. These attributes correspond to the <code>store</code>,
* <code>index</code>, and <code>token</code> parameters of the {@link org.apache.lucene.document.Field}
* constructor before Lucene v4.x and the same-named attributes of {@link org.apache.lucene.document.FieldType}
* afterwards.
*/
public void setBodyTextSettings (boolean store, boolean index, boolean token) {
indexBodyText = index;
storeBodyText = store;
tokenizeBodyText = token;
}
/**
* Sets Field attributes that will be used when creating Field objects for the document attributes found in
* a PDF document. These attributes correspond to the <code>store</code>,
* <code>index</code>, and <code>token</code> parameters of the {@link org.apache.lucene.document.Field}
* constructor before Lucene v4.x and the same-named attributes of {@link org.apache.lucene.document.FieldType}
* afterwards.
*/
public void setMetadataSettings (boolean store, boolean index, boolean token) {
indexMetadata = index;
storeMetadata = store;
tokenizeMetadata = token;
}
/**
* Returns true if the main body text of PDFs added to Lucene Documents created through
* {@link LucenePDFDocumentFactory} using this config object will be indexed.
*/
public boolean indexBodyText () {
return indexBodyText;
}
/**
* Returns true if the main body text of PDFs added to Lucene Documents created through
* {@link LucenePDFDocumentFactory} using this config object will be stored.
*/
public boolean storeBodyText () {
return storeBodyText;
}
/**
* Returns true if the main body text of PDFs added to Lucene Documents created through
* {@link LucenePDFDocumentFactory} using this config object will be tokenized.
*/
public boolean tokenizeBodyText () {
return tokenizeBodyText;
}
/**
* Returns true if the PDF metadata attributes added Lucene Documents created through
* {@link LucenePDFDocumentFactory} using this config object will be indexed.
*/
public boolean indexMetadata () {
return indexMetadata;
}
/**
* Returns true if the PDF metadata attributes added Lucene Documents created through
* {@link LucenePDFDocumentFactory} using this config object will be stored.
*/
public boolean storeMetadata () {
return storeMetadata;
}
/**
* Returns true if the PDF metadata attributes added Lucene Documents created through
* {@link LucenePDFDocumentFactory} using this config object will be tokenized.
*/
public boolean tokenizeMetadata () {
return tokenizeMetadata;
}
}
<|start_filename|>src/java/com/snowtide/pdf/lucene/package.html<|end_filename|>
<body>
The <code>com.snowtide.pdf.lucene</code> package provides an easy way to index PDF documents with
<a href="http://lucene.apache.org">Apache Lucene</a> using <a href="http://snowtide.com">PDFxStream</a>.
See {@link com.snowtide.pdf.lucene.PDFDocumentFactory}, <a href="http://www.snowtide.com/help/indexing-pdf-documents-with-lucene-and-pdfxstream">Indexing PDF Documents with Lucene and PDFxStream</a>, or the
<a href="http://github.com/snowtide/lucene-pdf">lucene-pdf</a> project readme for usage information.
</body>
<|start_filename|>src/lucene-1/com/snowtide/pdf/lucene/LuceneInterface1.java<|end_filename|>
package com.snowtide.pdf.lucene;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
/**
* Implementation of {@link LucenePDFDocumentFactory.LuceneInterface} corresponding to Lucene v1.x.
*
* @version ©2004-2014 Snowtide, http://snowtide.com, licensed under MIT. See LICENSE in the top level of the
* <a href="https://github.com/snowtide/lucene-pdf">lucene-pdf</a> project directory.
*/
public class LuceneInterface1 extends LucenePDFDocumentFactory.LuceneInterface {
public void addField (Document doc, String name, String value, boolean store, boolean index, boolean tokenize) {
doc.add(new Field(name, value, store, index, tokenize));
}
public int version () {
return 1;
}
}
<|start_filename|>src/java/com/snowtide/pdf/lucene/LucenePDFDocumentFactory.java<|end_filename|>
package com.snowtide.pdf.lucene;
import com.snowtide.pdf.OutputTarget;
import com.snowtide.pdf.PDFDateParser;
import com.snowtide.util.logging.Log;
import com.snowtide.util.logging.LoggingRegistry;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DateTools;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Map;
/**
* <p>
* This class enables easy Lucene indexing of PDF text and metadata via integration with PDFxStream. A supported
* lucene-core library jar must be on any application's classpath that uses this class.
* </p>
* <p>
* Typical usage would be to create a new {@link LucenePDFConfiguration} object, configure it as desired, and pass that object into
* {@link #buildPDFDocument(com.snowtide.pdf.Document, LucenePDFConfiguration)}
* along with an open {@link com.snowtide.pdf.Document}. A Lucene {@link org.apache.lucene.document.Document} will be
* returned containing {@link org.apache.lucene.document.Field}s corresponding to the source PDF document's text and
* metadata, as dictated by the provided configuration object.
* </p>
* <p>
* {@link #buildPDFDocument(com.snowtide.pdf.Document)} is also provided; this does not require a configuration
* object, but results in Lucene {@link org.apache.lucene.document.Document}s that contain a direct dump of the PDF's
* text content and metadata attributes according to a {@link
* LucenePDFConfiguration#LucenePDFConfiguration() default configuration}.
* This makes little sense in most environments, where the default names of PDF
* metadata attributes are unlikely to match the names of the corresponding Lucene Fields for those
* metadata attributes. See {@link LucenePDFConfiguration} for details of
* the default configuration of instances of that class.
* </p>
*
* @see <a href="http://www.snowtide.com/help/indexing-pdf-documents-with-lucene-and-pdfxstream">Indexing PDF
* Documents with Lucene and PDFxStream</a> for usage details
* @version ©2004-2014 Snowtide, http://snowtide.com, licensed under MIT. See LICENSE in the top level of the
* <a href="https://github.com/snowtide/lucene-pdf">lucene-pdf</a> project directory.
*/
public class LucenePDFDocumentFactory {
private static final Log log = LoggingRegistry.getLog(LucenePDFDocumentFactory.class);
private static final boolean LOG_DEBUG = log.isDebugEnabled();
private static final LucenePDFConfiguration DEFAULT_CONFIG = new LucenePDFConfiguration();
static LuceneInterface LUCENE_INTERFACE;
static {
try {
Class c = Class.forName("org.apache.lucene.document.FieldType");
LUCENE_INTERFACE = (LuceneInterface) Class.forName("com.snowtide.pdf.lucene.LuceneInterface4").newInstance();
log.info("Recognized Lucene v4.0.0 or greater.");
} catch (Throwable t3) {
try {
Class c = Class.forName("org.apache.lucene.document.Field$Index");
if (c.getField("ANALYZED") != null) {
LUCENE_INTERFACE = (LuceneInterface) Class.forName("com.snowtide.pdf.lucene.LuceneInterface3").newInstance();
log.info("Recognized Lucene v2.4 or greater.");
} else {
throw new IllegalStateException();
}
} catch (Throwable t1) {
try {
Class.forName("org.apache.lucene.document.Fieldable");
LUCENE_INTERFACE = (LuceneInterface) Class.forName("com.snowtide.pdf.lucene.LuceneInterface2").newInstance();
log.info("Recognized Lucene v2.1 or greater.");
} catch (Throwable t) {
try {
Class.forName("org.apache.lucene.document.Field$Store");
LUCENE_INTERFACE = (LuceneInterface) Class.forName("com.snowtide.pdf.lucene.LuceneInterface1").newInstance();
log.info("Recognized Lucene v1.9 or greater.");
} catch (Throwable t2) {
log.error("Could not recognize Lucene library version, PDFxStream Lucene integration will fail.");
}
}
}
}
}
/**
* Creates a new Lucene Document instance using the PDF text and metadata provided by the PDFxStream
* Document using a default {@link LucenePDFConfiguration#LucenePDFConfiguration()} to control Lucene field names,
* etc.
*/
public static Document buildPDFDocument (com.snowtide.pdf.Document pdf) throws IOException {
return buildPDFDocument(pdf, DEFAULT_CONFIG);
}
/**
* Creates a new Lucene Document instance using the PDF text and metadata provided by the PDFxStream
* Document using the provided {@link LucenePDFConfiguration} to control Lucene field
* names, etc.
*/
public static Document buildPDFDocument (com.snowtide.pdf.Document pdf, LucenePDFConfiguration config) throws
IOException {
StringWriter sb = new StringWriter();
pdf.pipe(new OutputTarget(sb));
Document doc = new Document();
LUCENE_INTERFACE.addField(doc, config.getBodyTextFieldName(), sb.toString(),
config.storeBodyText(), config.indexBodyText(), config.tokenizeBodyText());
for (Map.Entry<String, Object> metadataEntry : pdf.getAttributeMap().entrySet()) {
String docPropName = metadataEntry.getKey();
String fieldName = config.getMetadataFieldMapping(docPropName);
if (fieldName == null) {
if (config.copyAllPDFMetadata()) {
fieldName = docPropName;
} else {
continue;
}
}
Object value = metadataEntry.getValue();
String valueStr;
if (value == null) {
if (LOG_DEBUG) log.debug("Null document property value found for name ["+docPropName+"] ("+pdf.getName()+')');
continue;
} else if (value instanceof String) {
if (docPropName.equals(com.snowtide.pdf.Document.ATTR_MOD_DATE) ||
docPropName.equals(com.snowtide.pdf.Document.ATTR_CREATION_DATE)) {
try {
valueStr = DateTools.dateToString(PDFDateParser.parseDateString((String)value),
DateTools.Resolution.MILLISECOND);
} catch (Exception e) {
log.warn("PDF date string could not be parsed into a java.util.Date instance ["+value+"] ("+pdf.getName()+')', e);
valueStr = (String)value;
}
} else {
valueStr = (String)value;
}
} else if (value instanceof Number) {
valueStr = value.toString();
} else {
if (LOG_DEBUG) log.debug("Unexpected document property value type: "+value.getClass().getName()+
", for name ("+docPropName+") ("+pdf.getName()+')');
continue;
}
LUCENE_INTERFACE.addField(doc, fieldName, valueStr,
config.storeMetadata(), config.indexMetadata(), config.tokenizeMetadata());
}
return doc;
}
/**
* Very thin interface implemented by shim classes to allow
* {@link LucenePDFDocumentFactory} to be used with
* any version of Lucene without separate per-version implementation dependencies.
*/
static abstract class LuceneInterface {
public abstract void addField (Document doc, String name, String value, boolean store, boolean index, boolean tokenize);
public abstract int version ();
}
}
<|start_filename|>project.clj<|end_filename|>
(defproject com.snowtide/lucene-pdf "3.0.1-SNAPSHOT"
:description "A library enabling easy Lucene indexing of PDF text and metadata"
:url "http://github.com/snowtide/lucene-pdf"
:license {:name "MIT"
:url "http://opensource.org/licenses/MIT"}
:min-lein-version "2.4.2"
:dependencies [[com.snowtide/pdfxstream "3.1.1"]
[org.apache.lucene/lucene-core "1.9.1"]]
:auto-clean false
:omit-source true
:java-source-paths ["src/java"]
:test-paths ["test"]
:plugins [[s3-wagon-private "1.1.2"]]
:repositories {"snowtide-releases" {:url "http://maven.snowtide.com/releases"}}
:profiles {:lucene-1 [:dev :base {:dependencies []
:java-source-paths ["src/lucene-1"]}]
:lucene-2 [:dev :base {:dependencies [[org.apache.lucene/lucene-core "2.9.4"]]
:java-source-paths ["src/lucene-2"]}]
:lucene-3 [:dev :base {:dependencies [[org.apache.lucene/lucene-core "3.6.2"]]
:java-source-paths ["src/lucene-3"]}]
:lucene-4 [:dev :base {:dependencies [[org.apache.lucene/lucene-core "4.10.2"]
[org.apache.lucene/lucene-analyzers-common "4.10.2"]]
:java-source-paths ["src/lucene-4"]}]
:dev {:dependencies [[org.clojure/clojure "1.6.0"]]}}
:classifiers {:sources {:resource-paths ["src/java" "src/lucene-1" "src/lucene-2" "src/lucene-3" "src/lucene-4"]
:java-source-paths ^:replace []}
; lein-javadoc plugin (via its dependencies) ends up adding a tools.jar into the generated project jar
; TODO update when https://github.com/davidsantiago/lein-javadoc/issues/1 is fixed
:javadoc {:plugins [[lein-javadoc "0.1.1"]]
:dependencies [[org.clojure/clojure "1.6.0"]]
:resource-paths ^:replace ["target/javadoc/javadoc"]
:javadoc-opts {:package-names "com.snowtide.pdf.lucene"
:output-dir "target/javadoc/javadoc"
:additional-args ["-Xdoclint:-missing" "-version" "-charset" "UTF-8"
"-docencoding" "UTF-8" "-encoding" "UTF-8"]}
:javac-options ["-target" "1.5" "-source" "1.5"]
:prep-tasks ["javadoc"]}}
:aliases {"compile+" ["with-profile" "lucene-1:lucene-2:lucene-3:lucene-4" "do" "javac," "test"]
"release" ["do" "clean," "compile+," "release"]}
:deploy-repositories {"releases" {:url "https://oss.sonatype.org/service/local/staging/deploy/maven2/" :creds :gpg}
"snapshots" {:url "https://oss.sonatype.org/content/repositories/snapshots/" :creds :gpg}}
;;maven central requirements
:scm {:url "<EMAIL>:snowtide/lucene-pdf.git"}
:pom-addition [:developers [:developer
[:name "<NAME>"]
[:url "http://snowtide.com"]
[:email "<EMAIL>"]
[:timezone "-5"]]] )
<|start_filename|>test/test_lucene_integration.clj<|end_filename|>
(ns test-lucene-integration
(:import com.snowtide.PDF
(com.snowtide.pdf PDFDateParser Document PDFDateParser)
(com.snowtide.pdf.lucene LucenePDFDocumentFactory LucenePDFConfiguration)
(org.apache.lucene.document Field DateTools)
(org.apache.lucene.index IndexWriter IndexReader Term)
(org.apache.lucene.store Directory FSDirectory)
(org.apache.lucene.search IndexSearcher TermQuery PhraseQuery)
org.apache.lucene.analysis.standard.StandardAnalyzer
java.io.File
java.util.Date)
(:use clojure.test))
(def filename-field-name "pdf__filename")
(def ^:dynamic ^:private *idx-reader*)
(def ^:private lucene-interface
(.get (doto (.getDeclaredField LucenePDFDocumentFactory "LUCENE_INTERFACE")
(.setAccessible true))
nil))
(defn- build-lucene-document [pdf-file config]
(with-open [pdf (PDF/open pdf-file)]
(let [lucene-doc (LucenePDFDocumentFactory/buildPDFDocument pdf config)]
(.addField lucene-interface lucene-doc
filename-field-name (.getName pdf)
true false false)
lucene-doc)))
(defn- populate-index [^IndexWriter w]
(let [config (doto (LucenePDFConfiguration.)
(.setCopyAllPDFMetadata true))]
(.addDocument w (build-lucene-document (File. "test/key_steps.pdf") config))
(doto config
(.setBodyTextFieldName "alt_text_field")
(.setCopyAllPDFMetadata false)
(.setMetadataFieldMapping "Author" "author_attr_field_name"))
(.addDocument w (build-lucene-document (File. "test/DocumentSerialization.pdf") config))
w))
(def fsdirectory
(try
(eval '#(org.apache.lucene.store.SimpleFSDirectory. %))
(catch Throwable t
(eval '#(FSDirectory/getDirectory % true)))))
(def index-writer
(case (.version lucene-interface)
(1 2) (eval '#(IndexWriter. % (org.apache.lucene.analysis.standard.StandardAnalyzer.) true))
3 (eval '#(IndexWriter. %
(org.apache.lucene.analysis.standard.StandardAnalyzer.
org.apache.lucene.util.Version/LUCENE_CURRENT)
org.apache.lucene.index.IndexWriter$MaxFieldLength/UNLIMITED))
4 (eval '#(IndexWriter. %
(org.apache.lucene.index.IndexWriterConfig.
org.apache.lucene.util.Version/LUCENE_CURRENT
(org.apache.lucene.analysis.standard.StandardAnalyzer.
org.apache.lucene.util.Version/LUCENE_CURRENT))))))
(defn- setup-index [f]
(let [index-dir (fsdirectory (File. (str "target/test-index" (.version lucene-interface))))]
(-> (index-writer index-dir)
populate-index
.close)
(with-open [reader (IndexReader/open index-dir)]
(binding [*idx-reader* reader]
(f)))))
(use-fixtures :once setup-index)
(deftest key-steps-queries
(let [searcher (IndexSearcher. *idx-reader*)]
(let [results (.search searcher
(TermQuery. (Term. LucenePDFConfiguration/DEFAULT_MAIN_TEXT_FIELD_NAME "macromedia"))
nil 1000)]
(is (= 1 (.-totalHits results)))
(let [doc (.doc searcher (-> results .-scoreDocs first .doc))]
(is (= "key_steps.pdf" (.stringValue (.getField doc filename-field-name))))
(is (instance? Date (DateTools/stringToDate (-> doc (.getField "CreationDate") .stringValue))))
(is (= "Adobe InDesign 2.0.1" (-> doc (.getField "Creator") .stringValue)))))))
(deftest document-serialization-queries
(let [searcher (IndexSearcher. *idx-reader*)]
(let [results (.search searcher
(TermQuery. (Term. "alt_text_field" "jxta"))
nil 1000)]
(is (= 1 (.-totalHits results)))
(let [doc (.doc searcher (-> results .-scoreDocs first .doc))]
(is (= "DocumentSerialization.pdf" (.stringValue (.getField doc filename-field-name))))
(is (= "gseidman" (-> doc (.getField "author_attr_field_name") .stringValue)))))
(let [results (.search searcher
(doto (PhraseQuery.)
(.add (Term. "alt_text_field" "tight"))
(.add (Term. "alt_text_field" "loops")))
nil 1000)]
(is (= 1 (.-totalHits results)))
(let [doc (.doc searcher (-> results .-scoreDocs first .doc))]
(is (= "DocumentSerialization.pdf" (.stringValue (.getField doc filename-field-name))))))))
<|start_filename|>src/lucene-2/com/snowtide/pdf/lucene/LuceneInterface2.java<|end_filename|>
package com.snowtide.pdf.lucene;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
/**
* Implementation of {@link LucenePDFDocumentFactory.LuceneInterface} corresponding to Lucene v2.x. (Basically: {@link
* com.snowtide.pdf.lucene.LuceneInterface3} will be selected over this for Lucene versions >= 2.4.0 due to the
* introduction of Field.Index.ANALYZED, etc.)
*
* @version ©2004-2014 Snowtide, http://snowtide.com, licensed under MIT. See LICENSE in the top level of the
* <a href="https://github.com/snowtide/lucene-pdf">lucene-pdf</a> project directory.
*/
public class LuceneInterface2 extends LucenePDFDocumentFactory.LuceneInterface {
public void addField (Document doc, String name, String value, boolean store, boolean index, boolean tokenize) {
doc.add(new Field(name, value, store ? Field.Store.YES : Field.Store.NO,
index ? (tokenize ? Field.Index.TOKENIZED : Field.Index.UN_TOKENIZED) : Field.Index.NO));
}
public int version () {
return 2;
}
}
<|start_filename|>test/test_documents.clj<|end_filename|>
(ns test-documents
(:import com.snowtide.PDF
(com.snowtide.pdf PDFDateParser Document PDFDateParser)
(com.snowtide.pdf.lucene LucenePDFDocumentFactory LucenePDFConfiguration)
(org.apache.lucene.document Field DateTools)
java.io.File
java.util.Date)
(:use clojure.test))
(def test-pdf "test/DocumentSerialization.pdf")
(try
(Class/forName "org.apache.lucene.document.FieldType")
(defn assert-field-params [^Field f store index tokenize]
(let [ft (.fieldType f)]
(is (= store (.stored ft)))
(is (= index (.indexed ft)))
(is (= tokenize (.tokenized ft)))))
(catch Throwable t
(defn assert-field-params [^Field f store index tokenize]
(is (= store (.isStored f)))
(is (= index (.isIndexed f)))
(is (= tokenize (.isTokenized f))))))
(def attr-names [Document/ATTR_AUTHOR Document/ATTR_PRODUCER Document/ATTR_TITLE
Document/ATTR_CREATION_DATE Document/ATTR_CREATOR])
(defn verify-lucene-fields
[pdf lucene-doc text-field text-field-params metadata-fields metadata-field-params]
(apply assert-field-params (.getField lucene-doc text-field) text-field-params)
(is (> (-> (.getField lucene-doc text-field) .stringValue count) 9500)
"PDF text content did not get into lucene document")
(doseq [[pdf-attr lucene-field-name] metadata-fields
:let [v (.getAttribute pdf pdf-attr)
field (.getField lucene-doc lucene-field-name)]]
(apply assert-field-params field metadata-field-params)
(if (= pdf-attr Document/ATTR_CREATION_DATE)
(= (PDFDateParser/parseDateString v) (DateTools/stringToDate (.stringValue field)))
(= v (.stringValue field)))))
(deftest default-lucene-document-creation
(with-open [pdf (PDF/open test-pdf)]
(let [lucene-doc (LucenePDFDocumentFactory/buildPDFDocument pdf)]
(verify-lucene-fields pdf lucene-doc
LucenePDFConfiguration/DEFAULT_MAIN_TEXT_FIELD_NAME [false true true]
(zipmap attr-names attr-names) [true true true]))))
(deftest custom-lucene-document-creation
(with-open [pdf (PDF/open test-pdf)]
(let [text-field-name "l_text"
attr-names (remove #{Document/ATTR_PRODUCER} attr-names)
metadata-field-params (zipmap attr-names (map #(str "l_" %) attr-names))
config (doto (LucenePDFConfiguration. text-field-name)
(.setCopyAllPDFMetadata false)
(.setMetadataSettings false true false)
(.setBodyTextSettings true true false))
_ (doseq [[pdf-attr lucene-field-name] metadata-field-params]
(.setMetadataFieldMapping config pdf-attr lucene-field-name))
lucene-doc (LucenePDFDocumentFactory/buildPDFDocument pdf config)]
(verify-lucene-fields pdf lucene-doc
text-field-name [true true false]
metadata-field-params [false true false])
;; ensure producer attr wasn't copied -- it wasn't mapped above, and 'copyAllPDFMetadata' is false in config
(is (nil? (.getField lucene-doc Document/ATTR_PRODUCER)))
(is (nil? (.getField lucene-doc (str "l_" Document/ATTR_PRODUCER)))))))
<|start_filename|>src/lucene-4/com/snowtide/pdf/lucene/LuceneInterface4.java<|end_filename|>
package com.snowtide.pdf.lucene;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
/**
* Implementation of {@link LucenePDFDocumentFactory.LuceneInterface} corresponding to Lucene v4.x.
*
* @version ©2004-2014 Snowtide, http://snowtide.com, licensed under MIT. See LICENSE in the top level of the
* <a href="https://github.com/snowtide/lucene-pdf">lucene-pdf</a> project directory.
*/
public class LuceneInterface4 extends LucenePDFDocumentFactory.LuceneInterface {
public void addField (Document doc, String name, String value, boolean store, boolean index, boolean tokenize) {
doc.add(new Field(name, value, store ? Field.Store.YES : Field.Store.NO,
index ? (tokenize ? Field.Index.ANALYZED : Field.Index.NOT_ANALYZED) : Field.Index.NO));
}
public int version () {
return 4;
}
}
<|start_filename|>src/lucene-3/com/snowtide/pdf/lucene/LuceneInterface3.java<|end_filename|>
package com.snowtide.pdf.lucene;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
/**
* Implementation of {@link LucenePDFDocumentFactory.LuceneInterface} corresponding to Lucene v3.x. (Basically:
* this will be selected over {@link
* com.snowtide.pdf.lucene.LuceneInterface2} for Lucene versions >= 2.4.0 due to the
* introduction of Field.Index.ANALYZED, etc.)
*
* @version ©2004-2014 Snowtide, http://snowtide.com, licensed under MIT. See LICENSE in the top level of the
* <a href="https://github.com/snowtide/lucene-pdf">lucene-pdf</a> project directory.
*/
public class LuceneInterface3 extends LucenePDFDocumentFactory.LuceneInterface {
public void addField (Document doc, String name, String value, boolean store, boolean index, boolean tokenize) {
doc.add(new Field(name, value, store ? Field.Store.YES : Field.Store.NO,
index ? (tokenize ? Field.Index.ANALYZED : Field.Index.NOT_ANALYZED) : Field.Index.NO));
}
public int version () {
return 3;
}
}
| snowtide/lucene-pdf |
<|start_filename|>src/BirdMessenger.Test/TusBuildUnitTest.cs<|end_filename|>
using System;
using System.IO;
using Xunit;
using BirdMessenger;
using System.Security.Cryptography;
using System.Text;
using System.Collections.Generic;
using System.Threading.Tasks;
using BirdMessenger.Collections;
namespace BirdMessenger.Test
{
public class TusBuildUnitTest
{
public Uri tusHost = new Uri("http://localhost:5000/files");
[Fact]
public async Task TestCreateTusClientAsync()
{
var tusClient = TusBuild.DefaultTusClientBuild(tusHost)
.Build();
var fileInfo = new FileInfo(@"TestFile/test.mp4");
MetadataCollection dir = new MetadataCollection();
dir["filename"] = fileInfo.FullName;
var result = await tusClient.Create(fileInfo, dir);
}
[Fact]
public async Task TestUploadFilesAsync()
{
var tusClient = TusBuild.DefaultTusClientBuild(tusHost)
.Build();
var fileInfo = new FileInfo(@"TestFile/test.mp4");
MetadataCollection dir = new MetadataCollection();
dir["filename"] = fileInfo.FullName;
List<Uri> fileUrls = new List<Uri>();
for (int i = 0; i < 30; i++)
{
var fileUrl = await tusClient.Create(fileInfo, dir);
fileUrls.Add(fileUrl);
}
foreach (var item in fileUrls)
{
var uploadResult = await tusClient.Upload(item, fileInfo, null);
Assert.True(uploadResult);
}
}
[Fact]
public async Task TestConfigTusAsync()
{
var tusClient = TusBuild.DefaultTusClientBuild(tusHost)
.Configure((option, httpClientBuilder) =>
{
option.GetChunkUploadSize = (s, u) => 10 * 1024 * 1024;
})
.Build();
var fileInfo = new FileInfo(@"TestFile/test.mp4");
MetadataCollection dir = new MetadataCollection();
dir["filename"] = fileInfo.FullName;
var result = await tusClient.Create(fileInfo, dir);
}
public static string GetHash(HashAlgorithm hashAlgorithm, byte[] data)
{
byte[] hashData = hashAlgorithm.ComputeHash(data);
var sBuilder = new StringBuilder();
for (int i = 0; i < hashData.Length; i++)
{
sBuilder.Append(hashData[i].ToString("x2"));
}
return sBuilder.ToString();
}
public static bool VerifyHash(HashAlgorithm hashAlgorithm, byte[] data, string hash)
{
string hashOfData = GetHash(hashAlgorithm, data);
StringComparer comparer = StringComparer.OrdinalIgnoreCase;
return comparer.Compare(hash, hashOfData) == 0;
}
}
}
<|start_filename|>samples/demo3/Program.cs<|end_filename|>
using System;
using System.Text;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Reflection;
using System.Threading.Tasks;
using BirdMessenger;
using BirdMessenger.Collections;
using BirdMessenger.Infrastructure;
namespace demo3
{
class Program
{
static async Task Main(string[] args)
{
var stream = new MemoryStream(1024 * 1024 * 32);
for(var i = 0; i < 1024 * 1024 * 32; i++) {
stream.Write(Encoding.UTF8.GetBytes(BitConverter.ToString(new byte[] { (byte)i }), 0, 2));
}
//reset position
stream.Position = 0;
// remote tus service
var hostUri = new Uri(@"http://localhost:5000/files");
// build a standalone tus client instance
var tusClient = TusBuild.DefaultTusClientBuild(hostUri)
.Build();
//hook up events
tusClient.UploadProgress += printUploadProcess;
tusClient.UploadFinish += uploadFinish;
//define additional file metadata
MetadataCollection metadata = new MetadataCollection();
//create upload url
var uploadUrl = await tusClient.Create(stream.Length, metadata);
//upload file
var uploadResult = await tusClient.Upload(uploadUrl, stream, null);
}
public static void printUploadProcess(ITusClient src, ITusUploadContext context)
{
Console.WriteLine($"finished:fileUri:{context.UploadUrl}-{context.UploadedSize},total:{context.TotalSize} ");
}
public static void uploadFinish(ITusClient src, ITusUploadContext context)
{
Console.WriteLine($"uploadfinish :{context.UploadUrl.ToString()}");
}
}
}
<|start_filename|>src/BirdMessenger/ITusClientOptions.cs<|end_filename|>
using BirdMessenger.Delegates;
using System;
namespace BirdMessenger
{
public interface ITusClientOptions
{
/// <summary>
/// tus server host
/// </summary>
public Uri TusHost { get;}
/// <summary>
/// method to compute the chunk size for upload
/// </summary>
public TusChunkUploadSizeDelegate GetChunkUploadSize { get; }
/// <summary>
/// generate temporary change
/// </summary>
public IDisposable ChangeChunkUploadSize(TusChunkUploadSizeDelegate tusChunkUploadSizeDelegate);
/// <summary>
/// metadata key for uploaded file name
/// </summary>
public string FileNameMetadataName { get; }
}
}
<|start_filename|>src/BirdMessenger/Core/TusGeneric.cs<|end_filename|>
using System.Net.Http;
using BirdMessenger.Abstractions;
namespace BirdMessenger.Core
{
/// <summary>
/// Tus implementation class
/// </summary>
public class Tus<TService> : Tus, ITusCore<TService>, ITusExtension<TService>
{
public Tus(HttpClient httpClient) : base(httpClient)
{
}
}
}
<|start_filename|>src/BirdMessenger.BenchMark/Benchmarks.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using BenchmarkDotNet;
using BenchmarkDotNet.Attributes;
using BirdMessenger.Collections;
namespace BirdMessenger.BenchMark
{
[MemoryDiagnoser]
public class Benchmarks
{
//[Benchmark]
public async Task Scenario1()
{
var fileInfo = new FileInfo(@"TestFile/testf");
MetadataCollection dir = new MetadataCollection();
dir["filename"] = fileInfo.FullName;
var fileUrl = await Program.tusClient.Create(fileInfo, dir);
var uploadResult = await Program.tusClient.Upload(fileUrl, fileInfo, null);
}
[Benchmark]
public async Task Scenario2()
{
var fileInfo = new FileInfo(@"TestFile/bigFile");
MetadataCollection dir = new MetadataCollection();
dir["filename"] = fileInfo.FullName;
var fileUrl = await Program.tusClient.Create(fileInfo, dir);
var uploadResult = await Program.tusClient.Upload(fileUrl, fileInfo, null);
}
}
}
<|start_filename|>src/BirdMessenger.BenchMark/Program.cs<|end_filename|>
using System;
using BenchmarkDotNet.Running;
namespace BirdMessenger.BenchMark
{
public class Program
{
public static Uri host = new Uri("http://localhost:5000/files");
public static ITusClient tusClient=TusBuild.DefaultTusClientBuild(host)
.Build();
public static void Main(string[] args)
{
var summary = BenchmarkRunner.Run<Benchmarks>();
}
}
}
<|start_filename|>src/BirdMessenger/Infrastructure/TusUploadContext.cs<|end_filename|>
using System;
using System.IO;
namespace BirdMessenger.Infrastructure
{
internal class TusUploadContext : ITusUploadContext
{
public TusUploadContext(long totalSize, long uploadedSize, Uri uploadUrl, object state)
{
TotalSize = totalSize;
UploadedSize = uploadedSize;
UploadUrl = uploadUrl;
State = state;
}
public long TotalSize { get; }
public long UploadedSize { get; set; }
public Uri UploadUrl { get; }
public object State { get; }
public double UploadPercentage { get { return (float)UploadedSize / TotalSize; } }
}
}
<|start_filename|>src/BirdMessenger/ServiceCollectionExtensions.cs<|end_filename|>
using BirdMessenger.Abstractions;
using BirdMessenger.Builder;
using BirdMessenger.Core;
using Microsoft.Extensions.DependencyInjection;
using System;
using System.Net.Http;
namespace BirdMessenger
{
public static class ServiceCollectionExtensions
{
private static void DefaultHttpClientConfigure(HttpClient c)
{
c.DefaultRequestHeaders.Add("Tus-Resumable", "1.0.0");
}
public static TusHttpClientConfiguration AddTusClient(this IServiceCollection services, Uri tusHost)
{
return services.AddTusClient((opts) => { opts.TusHost = tusHost; });
}
public static TusHttpClientConfiguration AddTusClient(this IServiceCollection services, Action<TusClientOptions> configure)
{
var options = new TusClientOptions();
configure(options);
var coreHttpClientBuilder = services.AddHttpClient<ITusCore, Tus>(httpClient =>
{
DefaultHttpClientConfigure(httpClient);
});
var extensionHttpClientBuilder = services.AddHttpClient<ITusExtension, Tus>(httpClient =>
{
DefaultHttpClientConfigure(httpClient);
});
services.AddTransient<ITusClient>((services) =>
{
var tusCore = services.GetService<ITusCore>();
var tusExtension = services.GetService<ITusExtension>();
var opts = options;
return new TusClient(tusCore, tusExtension, opts);
});
return new TusHttpClientConfiguration(options, coreHttpClientBuilder, extensionHttpClientBuilder);
}
public static TusHttpClientConfiguration AddTusClient<TService>(this IServiceCollection services, Uri tusHost)
{
return services.AddTusClient<TService>((opts) => { opts.TusHost = tusHost; });
}
public static TusHttpClientConfiguration AddTusClient<TService>(this IServiceCollection services, Action<TusClientOptions> configure)
{
var options = new TusClientOptions();
configure(options);
var coreHttpClientBuilder = services.AddHttpClient<ITusCore<TService>, Tus<TService>>(httpClient =>
{
DefaultHttpClientConfigure(httpClient);
});
var extensionHttpClientBuilder = services.AddHttpClient<ITusExtension<TService>, Tus<TService>>(httpClient =>
{
DefaultHttpClientConfigure(httpClient);
});
services.AddTransient<ITusClient<TService>>((services) =>
{
var tusCore = services.GetService<ITusCore<TService>>();
var tusExtension = services.GetService<ITusExtension<TService>>();
var opts = options;
return new TusClient<TService>(tusCore, tusExtension, opts);
});
return new TusHttpClientConfiguration(options, coreHttpClientBuilder, extensionHttpClientBuilder);
}
}
}
<|start_filename|>samples/testDotNetSite/Services/BackgroundServiceBase.cs<|end_filename|>
using Microsoft.Extensions.Hosting;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace testDotNetSite.Services
{
/// <summary>
/// BackgroundServiceBase
/// </summary>
/// <seealso cref="Microsoft.Extensions.Hosting.IHostedService" />
/// <seealso cref="System.IDisposable" />
public abstract class BackgroundServiceBase : IHostedService, IDisposable
{
private Task _executingTask;
private readonly CancellationTokenSource _stoppingCts = new CancellationTokenSource();
/// <summary>
///
/// </summary>
/// <param name="stoppingToken">Triggered when <see cref="IHostedService.StopAsync(CancellationToken)" /> is called.</param>
/// <returns>
/// A <see cref="Task" /> that represents the long running operations.
/// </returns>
protected abstract Task ExecuteAsync(CancellationToken stoppingToken);
/// <summary>
/// Triggered when the application host is ready to start the service.
/// </summary>
/// <param name="cancellationToken">Indicates that the start process has been aborted.</param>
/// <returns></returns>
public virtual Task StartAsync(CancellationToken cancellationToken)
{
_executingTask = ExecuteAsync(_stoppingCts.Token);
if (_executingTask.IsCompleted)
{
return _executingTask;
}
return Task.CompletedTask;
}
/// <summary>
/// Triggered when the application host is performing a graceful shutdown.
/// </summary>
/// <param name="cancellationToken">Indicates that the shutdown process should no longer be graceful.</param>
/// <returns></returns>
public virtual async Task StopAsync(CancellationToken cancellationToken)
{
if (_executingTask == null)
{
return;
}
try
{
_stoppingCts.Cancel();
}
finally
{
await Task.WhenAny(_executingTask, Task.Delay(Timeout.Infinite, cancellationToken));
}
}
public virtual void Dispose()
{
_stoppingCts.Cancel();
}
}
}
<|start_filename|>src/BirdMessenger/TusClientOptions.cs<|end_filename|>
using BirdMessenger.Delegates;
using BirdMessenger.Infrastructure;
using System;
namespace BirdMessenger
{
public class TusClientOptions : ITusClientOptions
{
public TusClientOptions()
{
FileNameMetadataName = "fileName";
GetChunkUploadSize = (src, ctx) => 1 * 1024 * 1024;
}
/// <summary>
/// tus server host
/// </summary>
public Uri TusHost { get; set; }
/// <summary>
/// method to compute the chunk size for upload
/// </summary>
public TusChunkUploadSizeDelegate GetChunkUploadSize { get; set; }
/// <summary>
/// metadata key for uploaded file name
/// </summary>
public string FileNameMetadataName { get; set; }
public IDisposable ChangeChunkUploadSize(TusChunkUploadSizeDelegate tusChunkUploadSizeDelegate)
{
var original = GetChunkUploadSize;
GetChunkUploadSize = tusChunkUploadSizeDelegate;
return new TemporaryOptionChange(() =>
{
GetChunkUploadSize = original;
});
}
}
}
<|start_filename|>src/BirdMessenger/Infrastructure/ITusUploadContext.cs<|end_filename|>
using System;
using System.IO;
namespace BirdMessenger.Infrastructure
{
public interface ITusUploadContext
{
long TotalSize { get; }
long UploadedSize { get; }
Uri UploadUrl { get; }
object State { get; }
double UploadPercentage { get; }
}
}
| alexandru-bagu/BirdMessenger |
<|start_filename|>src/cmd/new_pull_request.go<|end_filename|>
package cmd
import (
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
"github.com/git-town/git-town/src/prompt"
"github.com/git-town/git-town/src/steps"
"github.com/spf13/cobra"
)
type newPullRequestConfig struct {
InitialBranch string
BranchesToSync []string
}
var newPullRequestCommand = &cobra.Command{
Use: "new-pull-request",
Short: "Creates a new pull request",
Long: `Creates a new pull request
Syncs the current branch
and opens a browser window to the new pull request page of your repository.
The form is pre-populated for the current branch
so that the pull request only shows the changes made
against the immediate parent branch.
Supported only for repositories hosted on GitHub, GitLab, Gitea and Bitbucket.
When using self-hosted versions this command needs to be configured with
"git config git-town.code-hosting-driver <driver>"
where driver is "github", "gitlab", "gitea", or "bitbucket".
When using SSH identities, this command needs to be configured with
"git config git-town.code-hosting-origin-hostname <hostname>"
where hostname matches what is in your ssh config file.`,
Run: func(cmd *cobra.Command, args []string) {
config, err := getNewPullRequestConfig(prodRepo)
if err != nil {
cli.Exit(err)
}
driver := drivers.Load(prodRepo.Config, &prodRepo.Silent, cli.PrintDriverAction)
if driver == nil {
cli.Exit(drivers.UnsupportedHostingError())
}
stepList, err := getNewPullRequestStepList(config, prodRepo)
if err != nil {
cli.Exit(err)
}
runState := steps.NewRunState("new-pull-request", stepList)
err = steps.Run(runState, prodRepo, driver)
if err != nil {
cli.Exit(err)
}
},
Args: cobra.NoArgs,
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
if err := validateIsConfigured(prodRepo); err != nil {
return err
}
if err := prodRepo.Config.ValidateIsOnline(); err != nil {
return err
}
return nil
},
}
func getNewPullRequestConfig(repo *git.ProdRepo) (result newPullRequestConfig, err error) {
hasOrigin, err := repo.Silent.HasRemote("origin")
if err != nil {
return result, err
}
if hasOrigin {
err := repo.Logging.Fetch()
if err != nil {
return result, err
}
}
result.InitialBranch, err = repo.Silent.CurrentBranch()
if err != nil {
return result, err
}
err = prompt.EnsureKnowsParentBranches([]string{result.InitialBranch}, repo)
if err != nil {
return result, err
}
result.BranchesToSync = append(repo.Config.GetAncestorBranches(result.InitialBranch), result.InitialBranch)
return
}
func getNewPullRequestStepList(config newPullRequestConfig, repo *git.ProdRepo) (result steps.StepList, err error) {
for _, branchName := range config.BranchesToSync {
steps, err := steps.GetSyncBranchSteps(branchName, true, repo)
if err != nil {
return result, err
}
result.AppendList(steps)
}
err = result.Wrap(steps.WrapOptions{RunInGitRoot: true, StashOpenChanges: true}, repo)
if err != nil {
return result, err
}
result.Append(&steps.CreatePullRequestStep{BranchName: config.InitialBranch})
return result, nil
}
func init() {
RootCmd.AddCommand(newPullRequestCommand)
}
<|start_filename|>src/drivers/helpers/url-hostname.go<|end_filename|>
package helpers
import "regexp"
// GetURLHostname returns the hostname contained within the given Git URL.
func GetURLHostname(url string) string {
hostnameRegex := regexp.MustCompile("(^[^:]*://([^@]*@)?|git@)([^/:]+).*")
matches := hostnameRegex.FindStringSubmatch(url)
if matches == nil {
return ""
}
return matches[3]
}
<|start_filename|>test/mocking_shell_test.go<|end_filename|>
// nolint: testpackage
package test
import (
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/git-town/git-town/src/run"
"github.com/stretchr/testify/assert"
)
func TestMockingShell_MockCommand(t *testing.T) {
workDir := CreateTempDir(t)
devDir := filepath.Join(workDir, "dev")
err := os.Mkdir(devDir, 0744)
assert.NoError(t, err)
shell := NewMockingShell(devDir, workDir, filepath.Join(workDir, "bin"))
err = shell.MockCommand("foo")
assert.NoError(t, err)
// run a program that calls the mocked command
res, err := shell.Run("bash", "-c", "foo bar")
assert.NoError(t, err)
// verify that it called our overridden "foo" command
assert.Equal(t, "foo called with: bar", res.OutputSanitized())
}
func TestShellRunner_Run(t *testing.T) {
runner := NewMockingShell(CreateTempDir(t), CreateTempDir(t), "")
res, err := runner.Run("echo", "hello", "world")
assert.NoError(t, err)
assert.Equal(t, "hello world", res.OutputSanitized())
}
func TestShellRunner_RunMany(t *testing.T) {
workDir := CreateTempDir(t)
runner := NewMockingShell(workDir, CreateTempDir(t), "")
err := runner.RunMany([][]string{
{"touch", "first"},
{"touch", "second"},
})
assert.NoError(t, err)
infos, err := ioutil.ReadDir(workDir)
assert.NoError(t, err)
assert.Len(t, infos, 2)
assert.Equal(t, "first", infos[0].Name())
assert.Equal(t, "second", infos[1].Name())
}
func TestShellRunner_RunString(t *testing.T) {
workDir := CreateTempDir(t)
runner := NewMockingShell(workDir, CreateTempDir(t), "")
_, err := runner.RunString("touch first")
assert.NoError(t, err)
_, err = os.Stat(filepath.Join(workDir, "first"))
assert.False(t, os.IsNotExist(err))
}
func TestShellRunner_RunStringWith_Dir(t *testing.T) {
dir1 := CreateTempDir(t)
dir2 := filepath.Join(dir1, "subdir")
err := os.Mkdir(dir2, 0744)
assert.NoError(t, err)
runner := NewMockingShell(dir1, CreateTempDir(t), "")
toolPath := filepath.Join(dir2, "list-dir")
err = CreateLsTool(toolPath)
assert.NoError(t, err)
res, err := runner.RunWith(run.Options{Dir: "subdir"}, toolPath)
assert.NoError(t, err)
assert.Equal(t, ScriptName("list-dir"), res.OutputSanitized())
}
func TestShellRunner_RunStringWith_Input(t *testing.T) {
dir1 := CreateTempDir(t)
dir2 := filepath.Join(dir1, "subdir")
err := os.Mkdir(dir2, 0744)
assert.NoError(t, err)
runner := NewMockingShell(dir1, CreateTempDir(t), "")
toolPath := filepath.Join(dir2, "list-dir")
err = CreateInputTool(toolPath)
assert.NoError(t, err)
cmd, args := CallScriptArgs(toolPath)
res, err := runner.RunWith(run.Options{Input: []string{"one\n", "two\n"}}, cmd, args...)
assert.NoError(t, err)
assert.Contains(t, res.OutputSanitized(), "You entered one and two")
}
<|start_filename|>src/cmd/prune_branches.go<|end_filename|>
package cmd
import (
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/git"
"github.com/git-town/git-town/src/steps"
"github.com/spf13/cobra"
)
type pruneBranchesConfig struct {
initialBranchName string
mainBranch string
localBranchesWithDeletedTrackingBranches []string
}
var pruneBranchesCommand = &cobra.Command{
Use: "prune-branches",
Short: "Deletes local branches whose tracking branch no longer exists",
Long: `Deletes local branches whose tracking branch no longer exists
Deletes branches whose tracking branch no longer exists from the local repository.
This usually means the branch was shipped or killed on another machine.`,
Run: func(cmd *cobra.Command, args []string) {
config, err := getPruneBranchesConfig(prodRepo)
if err != nil {
cli.Exit(err)
}
stepList, err := getPruneBranchesStepList(config, prodRepo)
if err != nil {
cli.Exit(err)
}
runState := steps.NewRunState("prune-branches", stepList)
err = steps.Run(runState, prodRepo, nil)
if err != nil {
cli.Exit(err)
}
},
Args: cobra.NoArgs,
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
if err := validateIsConfigured(prodRepo); err != nil {
return err
}
return prodRepo.Config.ValidateIsOnline()
},
}
func getPruneBranchesConfig(repo *git.ProdRepo) (result pruneBranchesConfig, err error) {
hasOrigin, err := repo.Silent.HasRemote("origin")
if err != nil {
return result, err
}
if hasOrigin {
err = repo.Logging.Fetch()
if err != nil {
return result, err
}
}
result.mainBranch = repo.Config.GetMainBranch()
result.initialBranchName, err = repo.Silent.CurrentBranch()
if err != nil {
return result, err
}
result.localBranchesWithDeletedTrackingBranches, err = repo.Silent.LocalBranchesWithDeletedTrackingBranches()
return result, err
}
func getPruneBranchesStepList(config pruneBranchesConfig, repo *git.ProdRepo) (result steps.StepList, err error) {
initialBranchName := config.initialBranchName
for _, branchName := range config.localBranchesWithDeletedTrackingBranches {
if initialBranchName == branchName {
result.Append(&steps.CheckoutBranchStep{BranchName: config.mainBranch})
}
parent := repo.Config.GetParentBranch(branchName)
if parent != "" {
for _, child := range repo.Config.GetChildBranches(branchName) {
result.Append(&steps.SetParentBranchStep{BranchName: child, ParentBranchName: parent})
}
result.Append(&steps.DeleteParentBranchStep{BranchName: branchName})
}
if repo.Config.IsPerennialBranch(branchName) {
result.Append(&steps.RemoveFromPerennialBranches{BranchName: branchName})
}
result.Append(&steps.DeleteLocalBranchStep{BranchName: branchName})
}
err = result.Wrap(steps.WrapOptions{RunInGitRoot: false, StashOpenChanges: false}, repo)
return result, err
}
func init() {
RootCmd.AddCommand(pruneBranchesCommand)
}
<|start_filename|>src/steps/run_state_to_disk.go<|end_filename|>
package steps
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"github.com/git-town/git-town/src/git"
)
// LoadPreviousRunState loads the run state from disk if it exists or creates a new run state.
func LoadPreviousRunState(repo *git.ProdRepo) (result *RunState, err error) {
filename, err := getRunResultFilename(repo)
if err != nil {
return nil, err
}
_, err = os.Stat(filename)
if err != nil {
if os.IsNotExist(err) {
return nil, nil
}
return nil, fmt.Errorf("cannot check file %q: %w", filename, err)
}
var runState RunState
content, err := ioutil.ReadFile(filename)
if err != nil {
return result, fmt.Errorf("cannot read file %q: %w", filename, err)
}
err = json.Unmarshal(content, &runState)
if err != nil {
return result, fmt.Errorf("cannot parse content of file %q: %w", filename, err)
}
return &runState, nil
}
// DeletePreviousRunState deletes the previous run state from disk.
func DeletePreviousRunState(repo *git.ProdRepo) error {
filename, err := getRunResultFilename(repo)
if err != nil {
return err
}
_, err = os.Stat(filename)
if err != nil {
if os.IsNotExist(err) {
return nil
}
return fmt.Errorf("cannot check file %q: %w", filename, err)
}
err = os.Remove(filename)
if err != nil {
return fmt.Errorf("cannot delete file %q: %w", filename, err)
}
return nil
}
// SaveRunState saves the run state to disk.
func SaveRunState(runState *RunState, repo *git.ProdRepo) error {
content, err := json.MarshalIndent(runState, "", " ")
if err != nil {
return fmt.Errorf("cannot encode run-state: %w", err)
}
filename, err := getRunResultFilename(repo)
if err != nil {
return err
}
err = ioutil.WriteFile(filename, content, 0600)
if err != nil {
return fmt.Errorf("cannot write file %q: %w", filename, err)
}
return nil
}
func getRunResultFilename(repo *git.ProdRepo) (string, error) {
replaceCharacterRegexp := regexp.MustCompile("[[:^alnum:]]")
rootDir, err := repo.Silent.RootDirectory()
if err != nil {
return "", err
}
directory := replaceCharacterRegexp.ReplaceAllString(rootDir, "-")
return filepath.Join(os.TempDir(), directory), nil
}
<|start_filename|>src/run/silent_shell.go<|end_filename|>
package run
import (
"fmt"
"github.com/kballard/go-shellquote"
)
// SilentShell is an implementation of the Shell interface that runs commands in the current working directory.
type SilentShell struct {
}
// WorkingDir provides the directory that this Shell operates in.
func (shell SilentShell) WorkingDir() string {
return "."
}
// Run runs the given command in this ShellRunner's directory.
func (shell SilentShell) Run(cmd string, args ...string) (*Result, error) {
return Exec(cmd, args...)
}
// RunMany runs all given commands in current directory.
// Commands are provided as a list of argv-style strings.
// Failed commands abort immediately with the encountered error.
func (shell SilentShell) RunMany(commands [][]string) error {
for _, argv := range commands {
_, err := Exec(argv[0], argv[1:]...)
if err != nil {
return fmt.Errorf("error running command %q: %w", argv, err)
}
}
return nil
}
// RunString runs the given command (including possible arguments) in this ShellInDir's directory.
func (shell SilentShell) RunString(fullCmd string) (*Result, error) {
parts, err := shellquote.Split(fullCmd)
if err != nil {
return nil, fmt.Errorf("cannot split command %q: %w", fullCmd, err)
}
cmd, args := parts[0], parts[1:]
return Exec(cmd, args...)
}
// RunStringWith runs the given command (including possible arguments) in this ShellInDir's directory.
func (shell SilentShell) RunStringWith(fullCmd string, options Options) (*Result, error) {
parts, err := shellquote.Split(fullCmd)
if err != nil {
return nil, fmt.Errorf("cannot split command %q: %w", fullCmd, err)
}
cmd, args := parts[0], parts[1:]
return WithOptions(options, cmd, args...)
}
<|start_filename|>src/steps/commit_open_changes_step.go<|end_filename|>
package steps
import (
"fmt"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// CommitOpenChangesStep commits all open changes as a new commit.
// It does not ask the user for a commit message, but chooses one automatically.
type CommitOpenChangesStep struct {
NoOpStep
previousSha string
}
// CreateUndoStep returns the undo step for this step.
func (step *CommitOpenChangesStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &ResetToShaStep{Sha: step.previousSha}, nil
}
// Run executes this step.
func (step *CommitOpenChangesStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) (err error) {
step.previousSha, err = repo.Silent.CurrentSha()
if err != nil {
return err
}
err = repo.Logging.StageFiles("-A")
if err != nil {
return err
}
currentBranch, err := repo.Silent.CurrentBranch()
if err != nil {
return err
}
return repo.Logging.CommitStagedChanges(fmt.Sprintf("WIP on %s", currentBranch))
}
<|start_filename|>src/config/config.go<|end_filename|>
package config
import (
"errors"
"fmt"
"os"
"os/exec"
"regexp"
"sort"
"strconv"
"strings"
"github.com/git-town/git-town/src/run"
"github.com/git-town/git-town/src/stringslice"
)
// Config manages the Git Town configuration
// stored in Git metadata in the given local repo and the global Git configuration.
// This class manages which config values are stored in local vs global settings.
type Config struct {
// localConfigCache is a cache of the Git configuration in the local Git repo.
localConfigCache map[string]string
// globalConfigCache is a cache of the global Git configuration.
globalConfigCache map[string]string
// for running shell commands
shell run.Shell
}
// NewConfiguration provides a Configuration instance reflecting the configuration values in the given directory.
func NewConfiguration(shell run.Shell) *Config {
return &Config{
shell: shell,
localConfigCache: loadGitConfig(shell, false),
globalConfigCache: loadGitConfig(shell, true),
}
}
// loadGitConfig provides the Git configuration from the given directory or the global one if the global flag is set.
func loadGitConfig(shell run.Shell, global bool) map[string]string {
result := map[string]string{}
cmdArgs := []string{"config", "-lz"}
if global {
cmdArgs = append(cmdArgs, "--global")
} else {
cmdArgs = append(cmdArgs, "--local")
}
res, err := shell.Run("git", cmdArgs...)
if err != nil {
return result
}
output := res.Output()
if output == "" {
return result
}
for _, line := range strings.Split(output, "\x00") {
if len(line) == 0 {
continue
}
parts := strings.SplitN(line, "\n", 2)
key, value := parts[0], parts[1]
result[key] = value
}
return result
}
// AddToPerennialBranches registers the given branch names as perennial branches.
// The branches must exist.
func (c *Config) AddToPerennialBranches(branchNames ...string) error {
return c.SetPerennialBranches(append(c.GetPerennialBranches(), branchNames...))
}
// AddGitAlias sets the given Git alias.
func (c *Config) AddGitAlias(command string) (*run.Result, error) {
return c.SetGlobalConfigValue("alias."+command, "town "+command)
}
// DeleteMainBranchConfiguration removes the configuration entry for the main branch name.
func (c *Config) DeleteMainBranchConfiguration() error {
return c.removeLocalConfigValue("git-town.main-branch-name")
}
// DeleteParentBranch removes the parent branch entry for the given branch
// from the Git configuration.
func (c *Config) DeleteParentBranch(branchName string) error {
return c.removeLocalConfigValue("git-town-branch." + branchName + ".parent")
}
// DeletePerennialBranchConfiguration removes the configuration entry for the perennial branches.
func (c *Config) DeletePerennialBranchConfiguration() error {
return c.removeLocalConfigValue("git-town.perennial-branch-names")
}
// GetAncestorBranches returns the names of all parent branches for the given branch,
// This information is read from the cache in the Git config,
// so might be out of date when the branch hierarchy has been modified.
func (c *Config) GetAncestorBranches(branchName string) (result []string) {
parentBranchMap := c.GetParentBranchMap()
current := branchName
for {
if c.IsMainBranch(current) || c.IsPerennialBranch(current) {
return
}
parent := parentBranchMap[current]
if parent == "" {
return
}
result = append([]string{parent}, result...)
current = parent
}
}
// GetBranchAncestryRoots provides the branches with children and no parents.
func (c *Config) GetBranchAncestryRoots() []string {
parentMap := c.GetParentBranchMap()
roots := []string{}
for _, parent := range parentMap {
if _, ok := parentMap[parent]; !ok && !stringslice.Contains(roots, parent) {
roots = append(roots, parent)
}
}
sort.Strings(roots)
return roots
}
// GetChildBranches returns the names of all branches for which the given branch
// is a parent.
func (c *Config) GetChildBranches(branchName string) (result []string) {
for _, key := range c.localConfigKeysMatching(`^git-town-branch\..*\.parent$`) {
parent := c.getLocalConfigValue(key)
if parent == branchName {
child := strings.TrimSuffix(strings.TrimPrefix(key, "git-town-branch."), ".parent")
result = append(result, child)
}
}
return
}
// GetCodeHostingDriverName provides the name of the code hosting driver to use.
func (c *Config) GetCodeHostingDriverName() string {
return c.getLocalOrGlobalConfigValue("git-town.code-hosting-driver")
}
// GetCodeHostingOriginHostname provides the host name of the code hosting server.
func (c *Config) GetCodeHostingOriginHostname() string {
return c.getLocalConfigValue("git-town.code-hosting-origin-hostname")
}
// getGlobalConfigValue provides the configuration value with the given key from the local Git configuration.
func (c *Config) getGlobalConfigValue(key string) string {
return c.globalConfigCache[key]
}
// getLocalConfigValue provides the configuration value with the given key from the local Git configuration.
func (c *Config) getLocalConfigValue(key string) string {
return c.localConfigCache[key]
}
// getLocalOrGlobalConfigValue provides the configuration value with the given key from the local and global Git configuration.
// Local configuration takes precedence.
func (c *Config) getLocalOrGlobalConfigValue(key string) string {
local := c.getLocalConfigValue(key)
if local != "" {
return local
}
return c.getGlobalConfigValue(key)
}
// GetParentBranchMap returns a map from branch name to its parent branch.
func (c *Config) GetParentBranchMap() map[string]string {
result := map[string]string{}
for _, key := range c.localConfigKeysMatching(`^git-town-branch\..*\.parent$`) {
child := strings.TrimSuffix(strings.TrimPrefix(key, "git-town-branch."), ".parent")
parent := c.getLocalConfigValue(key)
result[child] = parent
}
return result
}
// GetGitAlias provides the currently set alias for the given Git Town command.
func (c *Config) GetGitAlias(command string) string {
return c.getGlobalConfigValue("alias." + command)
}
// GetGitHubToken provides the content of the GitHub API token stored in the local or global Git Town configuration.
func (c *Config) GetGitHubToken() string {
return c.getLocalOrGlobalConfigValue("git-town.github-token")
}
// GetGiteaToken provides the content of the Gitea API token stored in the local or global Git Town configuration.
func (c *Config) GetGiteaToken() string {
return c.getLocalOrGlobalConfigValue("git-town.gitea-token")
}
// GetMainBranch returns the name of the main branch.
func (c *Config) GetMainBranch() string {
return c.getLocalOrGlobalConfigValue("git-town.main-branch-name")
}
// GetParentBranch returns the name of the parent branch of the given branch.
func (c *Config) GetParentBranch(branchName string) string {
return c.getLocalConfigValue("git-town-branch." + branchName + ".parent")
}
// GetPerennialBranches returns all branches that are marked as perennial.
func (c *Config) GetPerennialBranches() []string {
result := c.getLocalOrGlobalConfigValue("git-town.perennial-branch-names")
if result == "" {
return []string{}
}
return strings.Split(result, " ")
}
// GetPullBranchStrategy returns the currently configured pull branch strategy.
func (c *Config) GetPullBranchStrategy() string {
config := c.getLocalOrGlobalConfigValue("git-town.pull-branch-strategy")
if config != "" {
return config
}
return "rebase"
}
// GetRemoteOriginURL returns the URL for the "origin" remote.
// In tests this value can be stubbed.
func (c *Config) GetRemoteOriginURL() string {
remote := os.Getenv("GIT_TOWN_REMOTE")
if remote != "" {
return remote
}
res, _ := c.shell.Run("git", "remote", "get-url", "origin")
return res.OutputSanitized()
}
// HasBranchInformation indicates whether this configuration contains any branch hierarchy entries.
func (c *Config) HasBranchInformation() bool {
for key := range c.localConfigCache {
if strings.HasPrefix(key, "git-town-branch.") {
return true
}
}
return false
}
// HasParentBranch returns whether or not the given branch has a parent.
func (c *Config) HasParentBranch(branchName string) bool {
return c.GetParentBranch(branchName) != ""
}
// IsAncestorBranch indicates whether the given branch is an ancestor of the other given branch.
func (c *Config) IsAncestorBranch(branchName, ancestorBranchName string) bool {
ancestorBranches := c.GetAncestorBranches(branchName)
return stringslice.Contains(ancestorBranches, ancestorBranchName)
}
// IsFeatureBranch indicates whether the branch with the given name is
// a feature branch.
func (c *Config) IsFeatureBranch(branchName string) bool {
return !c.IsMainBranch(branchName) && !c.IsPerennialBranch(branchName)
}
// IsMainBranch indicates whether the branch with the given name
// is the main branch of the repository.
func (c *Config) IsMainBranch(branchName string) bool {
return branchName == c.GetMainBranch()
}
// IsOffline indicates whether Git Town is currently in offline mode.
func (c *Config) IsOffline() bool {
config := c.getGlobalConfigValue("git-town.offline")
if config == "" {
return false
}
result, err := strconv.ParseBool(config)
if err != nil {
fmt.Printf("Invalid value for git-town.offline: %q. Please provide either true or false. Considering false for now.", config)
fmt.Println()
return false
}
return result
}
// IsPerennialBranch indicates whether the branch with the given name is
// a perennial branch.
func (c *Config) IsPerennialBranch(branchName string) bool {
perennialBranches := c.GetPerennialBranches()
return stringslice.Contains(perennialBranches, branchName)
}
// localConfigKeysMatching provides the names of the Git Town configuration keys matching the given RegExp string.
func (c *Config) localConfigKeysMatching(toMatch string) (result []string) {
re := regexp.MustCompile(toMatch)
for key := range c.localConfigCache {
if re.MatchString(key) {
result = append(result, key)
}
}
return result
}
// Reload refreshes the cached configuration information.
func (c *Config) Reload() {
c.localConfigCache = loadGitConfig(c.shell, false)
c.globalConfigCache = loadGitConfig(c.shell, true)
}
// RemoveFromPerennialBranches removes the given branch as a perennial branch.
func (c *Config) RemoveFromPerennialBranches(branchName string) error {
return c.SetPerennialBranches(stringslice.Remove(c.GetPerennialBranches(), branchName))
}
// RemoveGitAlias removes the given Git alias.
func (c *Config) RemoveGitAlias(command string) (*run.Result, error) {
return c.removeGlobalConfigValue("alias." + command)
}
func (c *Config) removeGlobalConfigValue(key string) (*run.Result, error) {
delete(c.globalConfigCache, key)
return c.shell.Run("git", "config", "--global", "--unset", key)
}
// removeLocalConfigurationValue deletes the configuration value with the given key from the local Git Town configuration.
func (c *Config) removeLocalConfigValue(key string) error {
delete(c.localConfigCache, key)
_, err := c.shell.Run("git", "config", "--unset", key)
return err
}
// RemoveLocalGitConfiguration removes all Git Town configuration.
func (c *Config) RemoveLocalGitConfiguration() error {
_, err := c.shell.Run("git", "config", "--remove-section", "git-town")
if err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) && exitErr.ExitCode() == 128 {
// Git returns exit code 128 when trying to delete a non-existing config section.
// This is not an error condition in this workflow so we can ignore it here.
return nil
}
return fmt.Errorf("unexpected error while removing the 'git-town' section from the Git configuration: %v", err)
}
return nil
}
// SetCodeHostingDriver sets the "github.code-hosting-driver" setting.
func (c *Config) SetCodeHostingDriver(value string) error {
const key = "git-town.code-hosting-driver"
c.localConfigCache[key] = value
_, err := c.shell.Run("git", "config", key, value)
return err
}
// SetCodeHostingOriginHostname sets the "github.code-hosting-driver" setting.
func (c *Config) SetCodeHostingOriginHostname(value string) error {
const key = "git-town.code-hosting-origin-hostname"
c.localConfigCache[key] = value
_, err := c.shell.Run("git", "config", key, value)
return err
}
// SetColorUI configures whether Git output contains color codes.
func (c *Config) SetColorUI(value string) error {
_, err := c.shell.Run("git", "config", "color.ui", value)
return err
}
// SetGlobalConfigValue sets the given configuration setting in the global Git configuration.
func (c *Config) SetGlobalConfigValue(key, value string) (*run.Result, error) {
c.globalConfigCache[key] = value
return c.shell.Run("git", "config", "--global", key, value)
}
// SetLocalConfigValue sets the local configuration with the given key to the given value.
func (c *Config) SetLocalConfigValue(key, value string) (*run.Result, error) {
c.localConfigCache[key] = value
return c.shell.Run("git", "config", key, value)
}
// SetMainBranch marks the given branch as the main branch
// in the Git Town configuration.
func (c *Config) SetMainBranch(branchName string) error {
_, err := c.SetLocalConfigValue("git-town.main-branch-name", branchName)
return err
}
// SetNewBranchPush updates whether the current repository is configured to push
// freshly created branches up to the origin remote.
func (c *Config) SetNewBranchPush(value bool, global bool) error {
if global {
_, err := c.SetGlobalConfigValue("git-town.new-branch-push-flag", strconv.FormatBool(value))
return err
}
_, err := c.SetLocalConfigValue("git-town.new-branch-push-flag", strconv.FormatBool(value))
return err
}
// SetOffline updates whether Git Town is in offline mode.
func (c *Config) SetOffline(value bool) error {
_, err := c.SetGlobalConfigValue("git-town.offline", strconv.FormatBool(value))
return err
}
// SetTestOrigin sets the origin to be used for testing.
func (c *Config) SetTestOrigin(value string) error {
_, err := c.SetLocalConfigValue("git-town.testing.remote-url", value)
return err
}
// SetParentBranch marks the given branch as the direct parent of the other given branch
// in the Git Town configuration.
func (c *Config) SetParentBranch(branchName, parentBranchName string) error {
_, err := c.SetLocalConfigValue("git-town-branch."+branchName+".parent", parentBranchName)
return err
}
// SetPerennialBranches marks the given branches as perennial branches.
func (c *Config) SetPerennialBranches(branchNames []string) error {
_, err := c.SetLocalConfigValue("git-town.perennial-branch-names", strings.Join(branchNames, " "))
return err
}
// SetPullBranchStrategy updates the configured pull branch strategy.
func (c *Config) SetPullBranchStrategy(strategy string) error {
_, err := c.SetLocalConfigValue("git-town.pull-branch-strategy", strategy)
return err
}
// SetShouldShipDeleteRemoteBranch updates the configured pull branch strategy.
func (c *Config) SetShouldShipDeleteRemoteBranch(value bool) error {
_, err := c.SetLocalConfigValue("git-town.ship-delete-remote-branch", strconv.FormatBool(value))
return err
}
// SetShouldSyncUpstream updates the configured pull branch strategy.
func (c *Config) SetShouldSyncUpstream(value bool) error {
_, err := c.SetLocalConfigValue("git-town.sync-upstream", strconv.FormatBool(value))
return err
}
// ShouldNewBranchPush indicates whether the current repository is configured to push
// freshly created branches up to the origin remote.
func (c *Config) ShouldNewBranchPush() bool {
config := c.getLocalOrGlobalConfigValue("git-town.new-branch-push-flag")
if config == "" {
return false
}
value, err := strconv.ParseBool(config)
if err != nil {
fmt.Printf("Invalid value for git-town.new-branch-push-flag: %q. Please provide either true or false. Considering false for now.\n", config)
return false
}
return value
}
// ShouldNewBranchPushGlobal indictes whether the global configuration requires to push
// freshly created branches up to the origin remote.
func (c *Config) ShouldNewBranchPushGlobal() bool {
config := c.getGlobalConfigValue("git-town.new-branch-push-flag")
return config == "true"
}
// ShouldShipDeleteRemoteBranch indicates whether to delete the remote branch after shipping.
func (c *Config) ShouldShipDeleteRemoteBranch() bool {
setting := c.getLocalOrGlobalConfigValue("git-town.ship-delete-remote-branch")
if setting == "" {
return true
}
result, err := strconv.ParseBool(setting)
if err != nil {
fmt.Printf("Invalid value for git-town.ship-delete-remote-branch: %q. Please provide either true or false. Considering true for now.\n", setting)
return true
}
return result
}
// ShouldSyncUpstream indicates whether this repo should sync with its upstream.
func (c *Config) ShouldSyncUpstream() bool {
return c.getLocalOrGlobalConfigValue("git-town.sync-upstream") != "false"
}
// ValidateIsOnline asserts that Git Town is not in offline mode.
func (c *Config) ValidateIsOnline() error {
if c.IsOffline() {
return errors.New("this command requires an active internet connection")
}
return nil
}
<|start_filename|>src/steps/driver_merge_pull_request_step.go<|end_filename|>
package steps
import (
"fmt"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// DriverMergePullRequestStep squash merges the branch with the given name into the current branch.
type DriverMergePullRequestStep struct {
NoOpStep
BranchName string
PullRequestNumber int64
CommitMessage string
DefaultCommitMessage string
enteredEmptyCommitMessage bool
mergeError error
mergeSha string
}
// CreateAbortStep returns the abort step for this step.
func (step *DriverMergePullRequestStep) CreateAbortStep() Step {
if step.enteredEmptyCommitMessage {
return &DiscardOpenChangesStep{}
}
return nil
}
// CreateUndoStep returns the undo step for this step.
func (step *DriverMergePullRequestStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &RevertCommitStep{Sha: step.mergeSha}, nil
}
// GetAutomaticAbortError returns the error message to display when this step
// cause the command to automatically abort.
func (step *DriverMergePullRequestStep) GetAutomaticAbortError() error {
if step.enteredEmptyCommitMessage {
return fmt.Errorf("aborted because commit exited with error")
}
return step.mergeError
}
// Run executes this step.
func (step *DriverMergePullRequestStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
commitMessage := step.CommitMessage
// nolint:nestif
if commitMessage == "" {
// Allow the user to enter the commit message as if shipping without a driver
// then revert the commit since merging via the driver will perform the actual squash merge
step.enteredEmptyCommitMessage = true
err := repo.Logging.SquashMerge(step.BranchName)
if err != nil {
return err
}
err = repo.Silent.CommentOutSquashCommitMessage(step.DefaultCommitMessage + "\n\n")
if err != nil {
return fmt.Errorf("cannot comment out the squash commit message: %w", err)
}
err = repo.Logging.StartCommit()
if err != nil {
return err
}
commitMessage, err = repo.Silent.LastCommitMessage()
if err != nil {
return err
}
err = repo.Logging.DeleteLastCommit()
if err != nil {
return err
}
step.enteredEmptyCommitMessage = false
}
currentBranch, err := repo.Silent.CurrentBranch()
if err != nil {
return err
}
step.mergeSha, step.mergeError = driver.MergePullRequest(drivers.MergePullRequestOptions{
Branch: step.BranchName,
PullRequestNumber: step.PullRequestNumber,
CommitMessage: commitMessage,
LogRequests: true,
ParentBranch: currentBranch,
})
return step.mergeError
}
// ShouldAutomaticallyAbortOnError returns whether this step should cause the command to
// automatically abort if it errors.
func (step *DriverMergePullRequestStep) ShouldAutomaticallyAbortOnError() bool {
return true
}
<|start_filename|>src/drivers/bitbucket.go<|end_filename|>
package drivers
import (
"errors"
"fmt"
"net/url"
"strings"
"github.com/git-town/git-town/src/drivers/helpers"
)
// bitbucketCodeHostingDriver provides access to the API of Bitbucket installations.
type bitbucketCodeHostingDriver struct {
git gitRunner
hostname string
originURL string
repository string
}
// LoadBitbucket provides a Bitbucket driver instance if the given repo configuration is for a Bitbucket repo,
// otherwise nil.
func LoadBitbucket(config config, git gitRunner) CodeHostingDriver {
driverType := config.GetCodeHostingDriverName()
originURL := config.GetRemoteOriginURL()
hostname := helpers.GetURLHostname(originURL)
configuredHostName := config.GetCodeHostingOriginHostname()
if configuredHostName != "" {
hostname = configuredHostName
}
if driverType != "bitbucket" && hostname != "bitbucket.org" {
return nil
}
return &bitbucketCodeHostingDriver{
git: git,
hostname: hostname,
originURL: originURL,
repository: helpers.GetURLRepositoryName(originURL),
}
}
func (d *bitbucketCodeHostingDriver) LoadPullRequestInfo(branch, parentBranch string) (result PullRequestInfo, err error) {
return result, nil
}
func (d *bitbucketCodeHostingDriver) NewPullRequestURL(branch, parentBranch string) (string, error) {
query := url.Values{}
branchSha, err := d.git.ShaForBranch(branch)
if err != nil {
return "", fmt.Errorf("cannot determine pull request URL from %q to %q: %w", branch, parentBranch, err)
}
query.Add("source", strings.Join([]string{d.repository, branchSha[0:12], branch}, ":"))
query.Add("dest", strings.Join([]string{d.repository, "", parentBranch}, ":"))
return fmt.Sprintf("%s/pull-request/new?%s", d.RepositoryURL(), query.Encode()), nil
}
func (d *bitbucketCodeHostingDriver) RepositoryURL() string {
return fmt.Sprintf("https://%s/%s", d.hostname, d.repository)
}
func (d *bitbucketCodeHostingDriver) MergePullRequest(options MergePullRequestOptions) (mergeSha string, err error) {
return "", errors.New("shipping pull requests via the Bitbucket API is currently not supported. If you need this functionality, please vote for it by opening a ticket at https://github.com/git-town/git-town/issues")
}
func (d *bitbucketCodeHostingDriver) HostingServiceName() string {
return "Bitbucket"
}
<|start_filename|>src/steps/fetch_upstream_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// FetchUpstreamStep brings the Git history of the local repository
// up to speed with activities that happened in the upstream remote.
type FetchUpstreamStep struct {
NoOpStep
BranchName string
}
// Run executes this step.
func (step *FetchUpstreamStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.FetchUpstream(step.BranchName)
}
<|start_filename|>website/js/application.coffee<|end_filename|>
# application coffeescript goes here
<|start_filename|>main.go<|end_filename|>
// Git Town - a high-level CLI for Git
//
// Git Town makes Git more efficient, especially for large teams.
// More information at https://github.com/git-town/git-town.
package main
import "github.com/git-town/git-town/src/cmd"
func main() {
cmd.Execute()
}
<|start_filename|>src/steps/stash_open_changes_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// StashOpenChangesStep stores all uncommitted changes on the Git stash.
type StashOpenChangesStep struct {
NoOpStep
}
// CreateUndoStep returns the undo step for this step.
func (step *StashOpenChangesStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &RestoreOpenChangesStep{}, nil
}
// Run executes this step.
func (step *StashOpenChangesStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.Stash()
}
<|start_filename|>test/git_environment.go<|end_filename|>
package test
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/cucumber/messages-go/v10"
"github.com/git-town/git-town/src/git"
"github.com/git-town/git-town/test/helpers"
)
// GitEnvironment is the complete Git environment for a test scenario.
type GitEnvironment struct {
// Dir is the directory that this environment is in.
Dir string
// OriginRepo is the Git repository that simulates the remote repo (on GitHub).
// If this value is nil, the current test setup has no remote.
OriginRepo *Repo
// DevRepo is the Git repository that is locally checked out at the developer machine.
DevRepo Repo
// DevShell provides a reference to the MockingShell instance used in the DeveloperRepo.
DevShell *MockingShell
// CoworkerRepo is the optional Git repository that is locally checked out at the coworker machine.
CoworkerRepo *Repo
// UpstreamRepo is the optional Git repository that contains the upstream for this environment.
UpstreamRepo *Repo
}
// CloneGitEnvironment provides a GitEnvironment instance in the given directory,
// containing a copy of the given GitEnvironment.
func CloneGitEnvironment(original *GitEnvironment, dir string) (*GitEnvironment, error) {
err := CopyDirectory(original.Dir, dir)
if err != nil {
return nil, fmt.Errorf("cannot clone GitEnvironment %q to folder %q: %w", original.Dir, dir, err)
}
binDir := filepath.Join(dir, "bin")
originDir := filepath.Join(dir, "origin")
originRepo := NewRepo(originDir, dir, "")
developerDir := filepath.Join(dir, "developer")
developerShell := NewMockingShell(developerDir, dir, binDir)
result := GitEnvironment{
Dir: dir,
DevRepo: NewRepo(developerDir, dir, binDir),
DevShell: developerShell,
OriginRepo: &originRepo,
}
// Since we copied the files from the memoized directory,
// we have to set the "origin" remote to the copied origin repo here.
_, err = result.DevShell.Run("git", "remote", "remove", "origin")
if err != nil {
return nil, fmt.Errorf("cannot remove remote: %w", err)
}
err = result.DevRepo.AddRemote("origin", result.originRepoPath())
if err != nil {
return nil, fmt.Errorf("cannot set remote: %w", err)
}
err = result.DevRepo.Fetch()
if err != nil {
return nil, fmt.Errorf("cannot fetch: %w", err)
}
// and connect the main branches again
err = result.DevRepo.ConnectTrackingBranch("main")
if err != nil {
return nil, fmt.Errorf("cannot connect tracking branch: %w", err)
}
return &result, err
}
// NewStandardGitEnvironment provides a GitEnvironment in the given directory,
// fully populated as a standardized setup for scenarios.
//
// The origin repo has the master branch checked out.
// Git repos cannot receive pushes of the currently checked out branch
// because that will change files in the current workspace.
// The tests don't use the master branch.
func NewStandardGitEnvironment(dir string) (gitEnv *GitEnvironment, err error) {
// create the folder
// create the GitEnvironment
gitEnv = &GitEnvironment{Dir: dir}
// create the origin repo
err = os.MkdirAll(gitEnv.originRepoPath(), 0744)
if err != nil {
return nil, fmt.Errorf("cannot create directory %q: %w", gitEnv.originRepoPath(), err)
}
// initialize the repo in the folder
originRepo, err := InitRepo(gitEnv.originRepoPath(), gitEnv.Dir, gitEnv.binPath())
if err != nil {
return nil, err
}
err = originRepo.RunMany([][]string{
{"git", "commit", "--allow-empty", "-m", "Initial commit"},
{"git", "branch", "main", "master"},
})
if err != nil {
return gitEnv, fmt.Errorf("cannot initialize origin directory at %q: %w", gitEnv.originRepoPath(), err)
}
gitEnv.OriginRepo = &originRepo
// clone the "developer" repo
gitEnv.DevRepo, err = originRepo.Clone(gitEnv.developerRepoPath())
if err != nil {
return gitEnv, fmt.Errorf("cannot clone developer repo %q from origin %q: %w", gitEnv.originRepoPath(), gitEnv.developerRepoPath(), err)
}
err = gitEnv.initializeWorkspace(&gitEnv.DevRepo)
if err != nil {
return gitEnv, fmt.Errorf("cannot create new standard Git environment: %w", err)
}
err = gitEnv.DevRepo.RemoveUnnecessaryFiles()
if err != nil {
return gitEnv, err
}
err = gitEnv.OriginRepo.RemoveUnnecessaryFiles()
if err != nil {
return gitEnv, err
}
return gitEnv, nil
}
// AddUpstream adds an upstream repository.
func (env *GitEnvironment) AddUpstream() (err error) {
repo, err := env.DevRepo.Clone(filepath.Join(env.Dir, "upstream"))
if err != nil {
return fmt.Errorf("cannot clone upstream: %w", err)
}
env.UpstreamRepo = &repo
err = env.DevRepo.AddRemote("upstream", env.UpstreamRepo.WorkingDir())
if err != nil {
return fmt.Errorf("cannot set upstream remote: %w", err)
}
return nil
}
// AddCoworkerRepo adds a coworker repository.
func (env *GitEnvironment) AddCoworkerRepo() (err error) {
coworkerRepo, err := env.OriginRepo.Clone(env.coworkerRepoPath())
if err != nil {
return fmt.Errorf("cannot clone coworker: %w", err)
}
env.CoworkerRepo = &coworkerRepo
return env.initializeWorkspace(env.CoworkerRepo)
}
// binPath provides the full path of the folder containing the test tools for this GitEnvironment.
func (env *GitEnvironment) binPath() string {
return filepath.Join(env.Dir, "bin")
}
// Branches provides a tabular list of all branches in this GitEnvironment.
func (env *GitEnvironment) Branches() (result DataTable, err error) {
result.AddRow("REPOSITORY", "BRANCHES")
branches, err := env.DevRepo.LocalBranchesMainFirst()
if err != nil {
return result, fmt.Errorf("cannot determine the developer repo branches of the GitEnvironment: %w", err)
}
result.AddRow("local", strings.Join(branches, ", "))
if env.OriginRepo != nil {
branches, err = env.OriginRepo.LocalBranchesMainFirst()
if err != nil {
return result, fmt.Errorf("cannot determine the origin repo branches of the GitEnvironment: %w", err)
}
result.AddRow("remote", strings.Join(branches, ", "))
}
return result, nil
}
// CreateCommits creates the commits described by the given Gherkin table in this Git repository.
func (env *GitEnvironment) CreateCommits(commits []git.Commit) error {
for _, commit := range commits {
var err error
for _, location := range commit.Locations {
switch location {
case "coworker":
err = env.CoworkerRepo.CreateCommit(commit)
case "local":
err = env.DevRepo.CreateCommit(commit)
case "local, remote":
err = env.DevRepo.CreateCommit(commit)
if err != nil {
return fmt.Errorf("cannot create local commit: %w", err)
}
err = env.DevRepo.PushBranchSetUpstream(commit.Branch)
if err != nil {
return fmt.Errorf("cannot push branch %q after creating commit: %w", commit.Branch, err)
}
case "remote":
err = env.OriginRepo.CreateCommit(commit)
case "upstream":
err = env.UpstreamRepo.CreateCommit(commit)
default:
return fmt.Errorf("unknown commit location %q", commit.Locations)
}
}
if err != nil {
return err
}
}
// after setting up the commits, check out the "master" branch in the origin repo so that we can git-push to it.
if env.OriginRepo != nil {
err := env.OriginRepo.CheckoutBranch("master")
if err != nil {
return fmt.Errorf("cannot change origin repo back to master: %w", err)
}
}
return nil
}
// CreateRemoteBranch creates a branch with the given name only in the remote directory.
func (env GitEnvironment) CreateRemoteBranch(name, parent string) error {
err := env.OriginRepo.CreateBranch(name, parent)
if err != nil {
return fmt.Errorf("cannot create remote branch %q: %w", name, err)
}
return nil
}
// CreateTags creates tags from the given gherkin table.
func (env GitEnvironment) CreateTags(table *messages.PickleStepArgument_PickleTable) error {
columnNames := helpers.TableFields(table)
if columnNames[0] != "NAME" && columnNames[1] != "LOCATION" {
return fmt.Errorf("tag table must have columns NAME and LOCATION")
}
for _, row := range table.Rows[1:] {
name := row.Cells[0].Value
location := row.Cells[1].Value
var err error
switch location {
case "local":
err = env.DevRepo.CreateTag(name)
case "remote":
err = env.OriginRepo.CreateTag(name)
default:
err = fmt.Errorf("tag table LOCATION must be 'local' or 'remote'")
}
if err != nil {
return err
}
}
return nil
}
// CommitTable provides a table for all commits in this Git environment containing only the given fields.
func (env GitEnvironment) CommitTable(fields []string) (result DataTable, err error) {
builder := NewCommitTableBuilder()
localCommits, err := env.DevRepo.Commits(fields)
if err != nil {
return result, fmt.Errorf("cannot determine commits in the developer repo: %w", err)
}
builder.AddMany(localCommits, "local")
if env.CoworkerRepo != nil {
coworkerCommits, err := env.CoworkerRepo.Commits(fields)
if err != nil {
return result, fmt.Errorf("cannot determine commits in the coworker repo: %w", err)
}
builder.AddMany(coworkerCommits, "coworker")
}
if env.OriginRepo != nil {
remoteCommits, err := env.OriginRepo.Commits(fields)
if err != nil {
return result, fmt.Errorf("cannot determine commits in the origin repo: %w", err)
}
builder.AddMany(remoteCommits, "remote")
}
if env.UpstreamRepo != nil {
upstreamCommits, err := env.UpstreamRepo.Commits(fields)
if err != nil {
return result, fmt.Errorf("cannot determine commits in the origin repo: %w", err)
}
builder.AddMany(upstreamCommits, "upstream")
}
return builder.Table(fields), nil
}
// TagTable provides a table for all tags in this Git environment.
func (env GitEnvironment) TagTable() (result DataTable, err error) {
builder := NewTagTableBuilder()
localTags, err := env.DevRepo.Tags()
if err != nil {
return result, err
}
builder.AddMany(localTags, "local")
if env.OriginRepo != nil {
remoteTags, err := env.OriginRepo.Tags()
if err != nil {
return result, err
}
builder.AddMany(remoteTags, "remote")
}
return builder.Table(), nil
}
func (env GitEnvironment) initializeWorkspace(repo *Repo) error {
return repo.RunMany([][]string{
{"git", "config", "git-town.main-branch-name", "main"},
{"git", "config", "git-town.perennial-branch-names", ""},
{"git", "checkout", "main"},
// NOTE: the developer repos receives the master branch from origin
// but we don't want it here because it isn't used in tests.
{"git", "branch", "-d", "master"},
})
}
// coworkerRepoPath provides the full path to the Git repository with the given name.
func (env GitEnvironment) coworkerRepoPath() string {
return filepath.Join(env.Dir, "coworker")
}
// developerRepoPath provides the full path to the Git repository with the given name.
func (env GitEnvironment) developerRepoPath() string {
return filepath.Join(env.Dir, "developer")
}
// originRepoPath provides the full path to the Git repository with the given name.
func (env GitEnvironment) originRepoPath() string {
return filepath.Join(env.Dir, "origin")
}
// Remove deletes all files used by this GitEnvironment from disk.
func (env GitEnvironment) Remove() error {
return os.RemoveAll(env.Dir)
}
<|start_filename|>src/steps/run_state_test.go<|end_filename|>
package steps_test
import (
"encoding/json"
"testing"
"github.com/git-town/git-town/src/steps"
"github.com/stretchr/testify/assert"
)
func TestRunState_Marshal(t *testing.T) {
runState := &steps.RunState{
AbortStepList: steps.StepList{
List: []steps.Step{&steps.ResetToShaStep{Sha: "abc"}},
},
Command: "sync",
RunStepList: steps.StepList{
List: []steps.Step{&steps.ResetToShaStep{Sha: "abc"}},
},
UndoStepList: steps.StepList{
List: []steps.Step{&steps.ResetToShaStep{Sha: "abc"}},
},
}
data, err := json.Marshal(runState)
assert.NoError(t, err)
newRunState := &steps.RunState{}
err = json.Unmarshal(data, &newRunState)
assert.NoError(t, err)
assert.Equal(t, runState, newRunState)
}
<|start_filename|>src/drivers/helpers/url-repo-name.go<|end_filename|>
package helpers
import (
"regexp"
"strings"
)
// GetURLRepositoryName returns the repository name contains within the given Git URL.
func GetURLRepositoryName(url string) string {
hostname := GetURLHostname(url)
repositoryNameRegex := regexp.MustCompile(".*" + hostname + "[/:](.+)")
matches := repositoryNameRegex.FindStringSubmatch(url)
if matches == nil {
return ""
}
return strings.TrimSuffix(matches[1], ".git")
}
<|start_filename|>src/cmd/main_branch.go<|end_filename|>
package cmd
import (
"fmt"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/git"
"github.com/spf13/cobra"
)
var mainBranchCommand = &cobra.Command{
Use: "main-branch [<branch>]",
Short: "Displays or sets your main development branch",
Long: `Displays or sets your main development branch
The main branch is the Git branch from which new feature branches are cut.`,
Run: func(cmd *cobra.Command, args []string) {
if len(args) == 0 {
printMainBranch()
} else {
err := setMainBranch(args[0], prodRepo)
if err != nil {
cli.Exit(err)
}
}
},
Args: cobra.MaximumNArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
return ValidateIsRepository(prodRepo)
},
}
func printMainBranch() {
cli.Println(cli.PrintableMainBranch(prodRepo.Config.GetMainBranch()))
}
func setMainBranch(branchName string, repo *git.ProdRepo) error {
hasBranch, err := repo.Silent.HasLocalBranch(branchName)
if err != nil {
return err
}
if !hasBranch {
return fmt.Errorf("there is no branch named %q", branchName)
}
return repo.Config.SetMainBranch(branchName)
}
func init() {
RootCmd.AddCommand(mainBranchCommand)
}
<|start_filename|>src/steps/set_parent_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// SetParentBranchStep registers the branch with the given name as a parent
// of the branch with the other given name.
type SetParentBranchStep struct {
NoOpStep
BranchName string
ParentBranchName string
previousParent string
}
// CreateUndoStep returns the undo step for this step.
func (step *SetParentBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
if step.previousParent == "" {
return &DeleteParentBranchStep{BranchName: step.BranchName}, nil
}
return &SetParentBranchStep{BranchName: step.BranchName, ParentBranchName: step.previousParent}, nil
}
// Run executes this step.
func (step *SetParentBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
step.previousParent = repo.Config.GetParentBranch(step.BranchName)
return repo.Config.SetParentBranch(step.BranchName, step.ParentBranchName)
}
<|start_filename|>src/drivers/gitlab.go<|end_filename|>
package drivers
import (
"errors"
"fmt"
"net/url"
"github.com/git-town/git-town/src/drivers/helpers"
)
// gitlabCodeHostingDriver provides access to the API of GitLab installations.
type gitlabCodeHostingDriver struct {
originURL string
hostname string
repository string
}
// LoadGitlab provides a GitLab driver instance if the given repo configuration is for a GitLab repo,
// otherwise nil.
func LoadGitlab(config config) CodeHostingDriver {
driverType := config.GetCodeHostingDriverName()
originURL := config.GetRemoteOriginURL()
hostname := helpers.GetURLHostname(originURL)
configuredHostName := config.GetCodeHostingOriginHostname()
if configuredHostName != "" {
hostname = configuredHostName
}
if driverType != "gitlab" && hostname != "gitlab.com" {
return nil
}
return &gitlabCodeHostingDriver{
originURL: originURL,
hostname: hostname,
repository: helpers.GetURLRepositoryName(originURL),
}
}
func (d *gitlabCodeHostingDriver) LoadPullRequestInfo(branch, parentBranch string) (result PullRequestInfo, err error) {
return result, nil
}
func (d *gitlabCodeHostingDriver) NewPullRequestURL(branch, parentBranch string) (string, error) {
query := url.Values{}
query.Add("merge_request[source_branch]", branch)
query.Add("merge_request[target_branch]", parentBranch)
return fmt.Sprintf("%s/merge_requests/new?%s", d.RepositoryURL(), query.Encode()), nil
}
func (d *gitlabCodeHostingDriver) RepositoryURL() string {
return fmt.Sprintf("https://%s/%s", d.hostname, d.repository)
}
func (d *gitlabCodeHostingDriver) MergePullRequest(options MergePullRequestOptions) (mergeSha string, err error) {
return "", errors.New("shipping pull requests via the GitLab API is currently not supported. If you need this functionality, please vote for it by opening a ticket at https://github.com/git-town/git-town/issues")
}
func (d *gitlabCodeHostingDriver) HostingServiceName() string {
return "GitLab"
}
<|start_filename|>src/cmd/root_test.go<|end_filename|>
package cmd_test
import (
"fmt"
"testing"
"github.com/git-town/git-town/src/cmd"
"github.com/stretchr/testify/assert"
)
func TestIsAcceptableGitVersion(t *testing.T) {
tests := []struct {
major int
minor int
want bool
}{
{2, 7, true},
{3, 0, true},
{2, 6, false},
{1, 8, false},
}
for _, test := range tests {
have := cmd.IsAcceptableGitVersion(test.major, test.minor)
assert.Equal(t, test.want, have, fmt.Sprintf("%d.%d --> %t", test.major, test.minor, test.want))
}
}
<|start_filename|>src/browsers/browsers.go<|end_filename|>
package browsers
import (
"fmt"
"runtime"
"github.com/git-town/git-town/src/run"
)
// OpenBrowserCommand returns the command to run on the console
// to open the default browser.
func OpenBrowserCommand() string {
if runtime.GOOS == "windows" {
// NOTE: the "explorer" command cannot handle special characters
// like "?" and "=".
// In particular, "?" can be escaped via "\", but "=" cannot.
// So we are using "start" here.
return "start"
}
var openBrowserCommands = []string{
"wsl-open", // for Windows Subsystem for Linux, see https://github.com/git-town/git-town/issues/1344
"garcon-url-handler", // opens links in native browser on ChromeOS
"xdg-open",
"open",
"cygstart",
"x-www-browser",
"firefox",
"opera",
"mozilla",
"netscape",
}
for _, browserCommand := range openBrowserCommands {
res, err := run.Exec("which", browserCommand)
if err == nil && res.OutputSanitized() != "" {
return browserCommand
}
}
return ""
}
// Open opens the default browser with the given URL.
// If no browser is found, prints the URL.
func Open(url string, shell run.Shell) {
command := OpenBrowserCommand()
if command == "" {
fmt.Println("Please open in a browser: " + url)
return
}
_, err := shell.Run(command, url)
if err != nil {
fmt.Println("Please open in a browser: " + url)
}
}
<|start_filename|>src/cmd/offline.go<|end_filename|>
package cmd
import (
"fmt"
"strconv"
"github.com/git-town/git-town/src/cli"
"github.com/spf13/cobra"
)
var offlineCommand = &cobra.Command{
Use: "offline [(true | false)]",
Short: "Displays or sets offline mode",
Long: `Displays or sets offline mode
Git Town avoids network operations in offline mode.`,
Run: func(cmd *cobra.Command, args []string) {
if len(args) == 0 {
cli.Println(cli.PrintableOfflineFlag(prodRepo.Config.IsOffline()))
} else {
value, err := strconv.ParseBool(args[0])
if err != nil {
cli.Exit(fmt.Errorf(`invalid argument: %q. Please provide either "true" or "false".\n`, args[0]))
}
err = prodRepo.Config.SetOffline(value)
if err != nil {
cli.Exit(err)
}
}
},
Args: cobra.MaximumNArgs(1),
}
func init() {
RootCmd.AddCommand(offlineCommand)
}
<|start_filename|>src/drivers/bitbucket_test.go<|end_filename|>
package drivers_test
import (
"testing"
"github.com/git-town/git-town/src/drivers"
"github.com/stretchr/testify/assert"
)
func TestLoadBitbucket(t *testing.T) {
driver := drivers.LoadBitbucket(mockConfig{
codeHostingDriverName: "bitbucket",
remoteOriginURL: "<EMAIL>:git-town/git-town.git",
}, nil)
assert.NotNil(t, driver)
assert.Equal(t, "Bitbucket", driver.HostingServiceName())
assert.Equal(t, "https://self-hosted-bitbucket.com/git-town/git-town", driver.RepositoryURL())
}
func TestLoadBitbucket_customHostName(t *testing.T) {
driver := drivers.LoadBitbucket(mockConfig{
remoteOriginURL: "git@my-ssh-identity.com:git-town/git-town.git",
configuredHostName: "bitbucket.org",
}, nil)
assert.NotNil(t, driver)
assert.Equal(t, "Bitbucket", driver.HostingServiceName())
assert.Equal(t, "https://bitbucket.org/git-town/git-town", driver.RepositoryURL())
}
<|start_filename|>src/steps/preserve_checkout_history_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// PreserveCheckoutHistoryStep does stuff.
type PreserveCheckoutHistoryStep struct {
NoOpStep
InitialBranch string
InitialPreviouslyCheckedOutBranch string
}
// Run executes this step.
func (step *PreserveCheckoutHistoryStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
expectedPreviouslyCheckedOutBranch, err := repo.Silent.ExpectedPreviouslyCheckedOutBranch(step.InitialPreviouslyCheckedOutBranch, step.InitialBranch)
if err != nil {
return err
}
// NOTE: errors are not a failure condition here --> ignoring them
previouslyCheckedOutBranch, _ := repo.Silent.PreviouslyCheckedOutBranch()
if expectedPreviouslyCheckedOutBranch == previouslyCheckedOutBranch {
return nil
}
currentBranch, err := repo.Silent.CurrentBranch()
if err != nil {
return err
}
err = repo.Silent.CheckoutBranch(expectedPreviouslyCheckedOutBranch)
if err != nil {
return err
}
return repo.Silent.CheckoutBranch(currentBranch)
}
<|start_filename|>src/steps/continue_rebase_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// ContinueRebaseBranchStep finishes an ongoing rebase operation
// assuming all conflicts have been resolved by the user.
type ContinueRebaseBranchStep struct {
NoOpStep
}
// CreateAbortStep returns the abort step for this step.
func (step *ContinueRebaseBranchStep) CreateAbortStep() Step {
return &AbortRebaseBranchStep{}
}
// CreateContinueStep returns the continue step for this step.
func (step *ContinueRebaseBranchStep) CreateContinueStep() Step {
return step
}
// Run executes this step.
func (step *ContinueRebaseBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
hasRebaseInProgress, err := repo.Silent.HasRebaseInProgress()
if err != nil {
return err
}
if hasRebaseInProgress {
return repo.Logging.ContinueRebase()
}
return nil
}
<|start_filename|>src/run/shell.go<|end_filename|>
package run
// Shell allows running commands in a subshell.
type Shell interface {
Run(string, ...string) (*Result, error)
RunMany([][]string) error
RunString(string) (*Result, error)
RunStringWith(string, Options) (*Result, error)
WorkingDir() string
}
<|start_filename|>src/steps/step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// Step represents a dedicated activity within a Git Town command.
// Git Town commands are comprised of a number of steps that need to be executed.
type Step interface {
CreateAbortStep() Step
CreateContinueStep() Step
CreateUndoStep(*git.ProdRepo) (Step, error)
GetAutomaticAbortError() error
Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error
ShouldAutomaticallyAbortOnError() bool
}
<|start_filename|>text-run/command-heading.js<|end_filename|>
const diff = require("assert-no-diff")
const getCommand = require("./helpers/get-command.js")
module.exports = async function (activity) {
diff.wordsWithSpace(getCommand(activity.file), getHeadingText(activity))
}
function getHeadingText(activity) {
return activity.nodes.text().replace(" command", "").toLowerCase()
}
<|start_filename|>src/drivers/github_test.go<|end_filename|>
package drivers_test
import (
"encoding/json"
"io/ioutil"
"net/http"
"testing"
"github.com/git-town/git-town/src/drivers"
"github.com/stretchr/testify/assert"
httpmock "gopkg.in/jarcoal/httpmock.v1"
)
const githubRoot = "https://api.github.com"
const githubCurrOpen = githubRoot + "/repos/git-town/git-town/pulls?base=main&head=git-town%3Afeature&state=open"
const githubChildOpen = githubRoot + "/repos/git-town/git-town/pulls?base=feature&state=open"
const githubPR2 = githubRoot + "/repos/git-town/git-town/pulls/2"
const githubPR3 = githubRoot + "/repos/git-town/git-town/pulls/3"
const githubPR1Merge = githubRoot + "/repos/git-town/git-town/pulls/1/merge"
func setupGithubDriver(t *testing.T, token string) (drivers.CodeHostingDriver, func()) {
httpmock.Activate()
driver := drivers.LoadGithub(mockConfig{
remoteOriginURL: "git@github.com:git-town/git-town.git",
gitHubToken: token,
}, log)
assert.NotNil(t, driver)
return driver, func() {
httpmock.DeactivateAndReset()
}
}
func TestLoadGithub(t *testing.T) {
driver := drivers.LoadGithub(mockConfig{
codeHostingDriverName: "github",
remoteOriginURL: "git@self-hosted-github.com:git-town/git-town.git",
}, log)
assert.NotNil(t, driver)
assert.Equal(t, "GitHub", driver.HostingServiceName())
assert.Equal(t, "https://self-hosted-github.com/git-town/git-town", driver.RepositoryURL())
}
func TestLoadGithub_customHostName(t *testing.T) {
driver := drivers.LoadGithub(mockConfig{
remoteOriginURL: "<EMAIL>:git-town/git-town.git",
configuredHostName: "github.com",
}, log)
assert.NotNil(t, driver)
assert.Equal(t, "GitHub", driver.HostingServiceName())
assert.Equal(t, "https://github.com/git-town/git-town", driver.RepositoryURL())
}
func TestGitHubDriver_LoadPullRequestInfo(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
httpmock.RegisterResponder("GET", githubCurrOpen, httpmock.NewStringResponder(200, `[{"number": 1, "title": "my title" }]`))
prInfo, err := driver.LoadPullRequestInfo("feature", "main")
assert.NoError(t, err)
assert.True(t, prInfo.CanMergeWithAPI)
assert.Equal(t, "my title (#1)", prInfo.DefaultCommitMessage)
assert.Equal(t, int64(1), prInfo.PullRequestNumber)
}
func TestGitHubDriver_LoadPullRequestInfo_EmptyGithubToken(t *testing.T) {
driver, teardown := setupGithubDriver(t, "")
defer teardown()
prInfo, err := driver.LoadPullRequestInfo("feature", "main")
assert.NoError(t, err)
assert.False(t, prInfo.CanMergeWithAPI)
}
func TestGitHubDriver_LoadPullRequestInfo_GetPullRequestNumberFails(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
httpmock.RegisterResponder("GET", githubCurrOpen, httpmock.NewStringResponder(404, ""))
_, err := driver.LoadPullRequestInfo("feature", "main")
assert.Error(t, err)
}
func TestGitHubDriver_LoadPullRequestInfo_NoPullRequestForBranch(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
httpmock.RegisterResponder("GET", githubCurrOpen, httpmock.NewStringResponder(200, "[]"))
prInfo, err := driver.LoadPullRequestInfo("feature", "main")
assert.NoError(t, err)
assert.False(t, prInfo.CanMergeWithAPI)
}
func TestGitHubDriver_LoadPullRequestInfo_MultiplePullRequestsForBranch(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
httpmock.RegisterResponder("GET", githubCurrOpen, httpmock.NewStringResponder(200, `[{"number": 1}, {"number": 2}]`))
prInfo, err := driver.LoadPullRequestInfo("feature", "main")
assert.NoError(t, err)
assert.False(t, prInfo.CanMergeWithAPI)
}
func TestGitHubDriver_MergePullRequest_GetPullRequestIdsFails(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
httpmock.RegisterResponder("GET", githubChildOpen, httpmock.NewStringResponder(404, ""))
_, err := driver.MergePullRequest(options)
assert.Error(t, err)
}
func TestGitHubDriver_MergePullRequest_GetPullRequestToMergeFails(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
httpmock.RegisterResponder("GET", githubChildOpen, httpmock.NewStringResponder(200, "[]"))
httpmock.RegisterResponder("GET", githubCurrOpen, httpmock.NewStringResponder(404, ""))
_, err := driver.MergePullRequest(options)
assert.Error(t, err)
}
func TestGitHubDriver_MergePullRequest_PullRequestNotFound(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
httpmock.RegisterResponder("GET", githubChildOpen, httpmock.NewStringResponder(200, "[]"))
httpmock.RegisterResponder("GET", githubCurrOpen, httpmock.NewStringResponder(200, "[]"))
_, err := driver.MergePullRequest(options)
assert.Error(t, err)
assert.Equal(t, "cannot merge via Github since there is no pull request", err.Error())
}
func TestGitHubDriver_MergePullRequest(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
PullRequestNumber: 1,
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
var mergeRequest *http.Request
httpmock.RegisterResponder("GET", githubChildOpen, httpmock.NewStringResponder(200, "[]"))
httpmock.RegisterResponder("GET", githubCurrOpen, httpmock.NewStringResponder(200, `[{"number": 1}]`))
httpmock.RegisterResponder("PUT", githubPR1Merge, func(req *http.Request) (*http.Response, error) {
mergeRequest = req
return httpmock.NewStringResponse(200, `{"sha": "abc123"}`), nil
})
sha, err := driver.MergePullRequest(options)
assert.NoError(t, err)
assert.Equal(t, "abc123", sha)
mergeParameters := getRequestData(mergeRequest)
assert.Equal(t, "title", mergeParameters["commit_title"])
assert.Equal(t, "extra detail1\nextra detail2", mergeParameters["commit_message"])
assert.Equal(t, "squash", mergeParameters["merge_method"])
}
func TestGitHubDriver_MergePullRequest_MergeFails(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
httpmock.RegisterResponder("GET", githubChildOpen, httpmock.NewStringResponder(200, "[]"))
httpmock.RegisterResponder("GET", githubCurrOpen, httpmock.NewStringResponder(200, `[{"number": 1}]`))
httpmock.RegisterResponder("PUT", githubPR1Merge, httpmock.NewStringResponder(404, ""))
_, err := driver.MergePullRequest(options)
assert.Error(t, err)
}
func TestGitHubDriver_MergePullRequest_UpdateChildPRs(t *testing.T) {
driver, teardown := setupGithubDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
PullRequestNumber: 1,
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
var updateRequest1, updateRequest2 *http.Request
httpmock.RegisterResponder("GET", githubChildOpen, httpmock.NewStringResponder(200, `[{"number": 2}, {"number": 3}]`))
httpmock.RegisterResponder("PATCH", githubPR2, func(req *http.Request) (*http.Response, error) {
updateRequest1 = req
return httpmock.NewStringResponse(200, ""), nil
})
httpmock.RegisterResponder("PATCH", githubPR3, func(req *http.Request) (*http.Response, error) {
updateRequest2 = req
return httpmock.NewStringResponse(200, ""), nil
})
httpmock.RegisterResponder("GET", githubCurrOpen, httpmock.NewStringResponder(200, `[{"number": 1}]`))
httpmock.RegisterResponder("PUT", githubPR1Merge, httpmock.NewStringResponder(200, `{"sha": "abc123"}`))
_, err := driver.MergePullRequest(options)
assert.NoError(t, err)
updateParameters1 := getRequestData(updateRequest1)
assert.Equal(t, "main", updateParameters1["base"])
updateParameters2 := getRequestData(updateRequest2)
assert.Equal(t, "main", updateParameters2["base"])
}
func getRequestData(request *http.Request) map[string]interface{} {
dataStr, err := ioutil.ReadAll(request.Body)
if err != nil {
panic(err)
}
data := map[string]interface{}{}
err = json.Unmarshal(dataStr, &data)
if err != nil {
panic(err)
}
return data
}
<|start_filename|>src/drivers/core_test.go<|end_filename|>
package drivers_test
type mockConfig struct {
configuredHostName string
codeHostingDriverName string
giteaToken string
gitHubToken string
mainBranch string
remoteOriginURL string
}
func (mc mockConfig) GetCodeHostingOriginHostname() string {
return mc.configuredHostName
}
func (mc mockConfig) GetCodeHostingDriverName() string {
return mc.codeHostingDriverName
}
func (mc mockConfig) GetGitHubToken() string {
return mc.gitHubToken
}
func (mc mockConfig) GetGiteaToken() string {
return mc.giteaToken
}
func (mc mockConfig) GetMainBranch() string {
return mc.mainBranch
}
func (mc mockConfig) GetRemoteOriginURL() string {
return mc.remoteOriginURL
}
<|start_filename|>src/cmd/prepend.go<|end_filename|>
package cmd
import (
"fmt"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/git"
"github.com/git-town/git-town/src/prompt"
"github.com/git-town/git-town/src/steps"
"github.com/spf13/cobra"
)
type prependConfig struct {
initialBranch string
parentBranch string
targetBranch string
ancestorBranches []string
hasOrigin bool
shouldNewBranchPush bool
isOffline bool
}
var prependCommand = &cobra.Command{
Use: "prepend <branch>",
Short: "Creates a new feature branch as the parent of the current branch",
Long: `Creates a new feature branch as the parent of the current branch
Syncs the parent branch,
cuts a new feature branch with the given name off the parent branch,
makes the new branch the parent of the current branch,
pushes the new feature branch to the remote repository
(if "new-branch-push-flag" is true),
and brings over all uncommitted changes to the new feature branch.
See "sync" for remote upstream options.
`,
Run: func(cmd *cobra.Command, args []string) {
config, err := getPrependConfig(args, prodRepo)
if err != nil {
cli.Exit(err)
}
stepList, err := getPrependStepList(config, prodRepo)
if err != nil {
cli.Exit(err)
}
runState := steps.NewRunState("prepend", stepList)
err = steps.Run(runState, prodRepo, nil)
if err != nil {
fmt.Println(err)
cli.Exit(err)
}
},
Args: cobra.ExactArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
return validateIsConfigured(prodRepo)
},
}
func getPrependConfig(args []string, repo *git.ProdRepo) (result prependConfig, err error) {
result.initialBranch, err = repo.Silent.CurrentBranch()
if err != nil {
return result, err
}
result.targetBranch = args[0]
result.hasOrigin, err = repo.Silent.HasRemote("origin")
if err != nil {
return result, err
}
result.shouldNewBranchPush = repo.Config.ShouldNewBranchPush()
result.isOffline = repo.Config.IsOffline()
if result.hasOrigin && !result.isOffline {
err := repo.Logging.Fetch()
if err != nil {
return result, err
}
}
hasBranch, err := repo.Silent.HasLocalOrRemoteBranch(result.targetBranch)
if err != nil {
return result, err
}
if hasBranch {
return result, fmt.Errorf("a branch named %q already exists", result.targetBranch)
}
if !repo.Config.IsFeatureBranch(result.initialBranch) {
return result, fmt.Errorf("the branch %q is not a feature branch. Only feature branches can have parent branches", result.initialBranch)
}
err = prompt.EnsureKnowsParentBranches([]string{result.initialBranch}, repo)
if err != nil {
return result, err
}
result.parentBranch = repo.Config.GetParentBranch(result.initialBranch)
result.ancestorBranches = repo.Config.GetAncestorBranches(result.initialBranch)
return result, nil
}
func getPrependStepList(config prependConfig, repo *git.ProdRepo) (result steps.StepList, err error) {
for _, branchName := range config.ancestorBranches {
steps, err := steps.GetSyncBranchSteps(branchName, true, repo)
if err != nil {
return result, err
}
result.AppendList(steps)
}
result.Append(&steps.CreateBranchStep{BranchName: config.targetBranch, StartingPoint: config.parentBranch})
result.Append(&steps.SetParentBranchStep{BranchName: config.targetBranch, ParentBranchName: config.parentBranch})
result.Append(&steps.SetParentBranchStep{BranchName: config.initialBranch, ParentBranchName: config.targetBranch})
result.Append(&steps.CheckoutBranchStep{BranchName: config.targetBranch})
if config.hasOrigin && config.shouldNewBranchPush && !config.isOffline {
result.Append(&steps.CreateTrackingBranchStep{BranchName: config.targetBranch})
}
err = result.Wrap(steps.WrapOptions{RunInGitRoot: true, StashOpenChanges: true}, repo)
return result, err
}
func init() {
RootCmd.AddCommand(prependCommand)
}
<|start_filename|>src/steps/sync_steps.go<|end_filename|>
package steps
import (
"fmt"
"github.com/git-town/git-town/src/git"
)
// GetSyncBranchSteps returns the steps to sync the branch with the given name.
func GetSyncBranchSteps(branchName string, pushBranch bool, repo *git.ProdRepo) (result StepList, err error) {
isFeature := repo.Config.IsFeatureBranch(branchName)
hasRemoteOrigin, err := repo.Silent.HasRemote("origin")
if err != nil {
return result, err
}
if !hasRemoteOrigin && !isFeature {
return
}
result.Append(&CheckoutBranchStep{BranchName: branchName})
if isFeature {
steps, err := getSyncFeatureBranchSteps(branchName, repo)
if err != nil {
return result, err
}
result.AppendList(steps)
} else {
steps, err := getSyncNonFeatureBranchSteps(branchName, repo)
if err != nil {
return result, err
}
result.AppendList(steps)
}
if pushBranch && hasRemoteOrigin && !repo.Config.IsOffline() {
hasTrackingBranch, err := repo.Silent.HasTrackingBranch(branchName)
if err != nil {
return result, err
}
if hasTrackingBranch {
result.Append(&PushBranchStep{BranchName: branchName})
} else {
result.Append(&CreateTrackingBranchStep{BranchName: branchName})
}
}
return result, nil
}
// Helpers
func getSyncFeatureBranchSteps(branchName string, repo *git.ProdRepo) (result StepList, err error) {
hasTrackingBranch, err := repo.Silent.HasTrackingBranch(branchName)
if err != nil {
return result, err
}
if hasTrackingBranch {
result.Append(&MergeBranchStep{BranchName: repo.Silent.TrackingBranchName(branchName)})
}
result.Append(&MergeBranchStep{BranchName: repo.Config.GetParentBranch(branchName)})
return
}
func getSyncNonFeatureBranchSteps(branchName string, repo *git.ProdRepo) (result StepList, err error) {
hasTrackingBranch, err := repo.Silent.HasTrackingBranch(branchName)
if err != nil {
return result, err
}
if hasTrackingBranch {
if repo.Config.GetPullBranchStrategy() == "rebase" {
result.Append(&RebaseBranchStep{BranchName: repo.Silent.TrackingBranchName(branchName)})
} else {
result.Append(&MergeBranchStep{BranchName: repo.Silent.TrackingBranchName(branchName)})
}
}
mainBranchName := repo.Config.GetMainBranch()
hasUpstream, err := repo.Silent.HasRemote("upstream")
if err != nil {
return result, err
}
if mainBranchName == branchName && hasUpstream && repo.Config.ShouldSyncUpstream() {
result.Append(&FetchUpstreamStep{BranchName: mainBranchName})
result.Append(&RebaseBranchStep{BranchName: fmt.Sprintf("upstream/%s", mainBranchName)})
}
return
}
<|start_filename|>main_test.go<|end_filename|>
package main_test
import (
"runtime"
"testing"
"github.com/cucumber/godog"
"github.com/git-town/git-town/test"
)
func FeatureContext(suite *godog.Suite) {
// The current Godog implementation only provides a FeatureContext,
// no SuiteContext nor ScenarioContext.
// Hence we have to register the scenario state here (and reuse it for all scenarios in a feature)
// and register the steps here.
// It is initialized in SuiteSteps.BeforeScenario.
state := &test.ScenarioState{}
test.Steps(suite, state)
}
func TestGodog(t *testing.T) {
tags := ""
if runtime.GOOS == "windows" {
tags = "~@skipWindows"
}
status := godog.RunWithOptions("godog", func(s *godog.Suite) {
FeatureContext(s)
}, godog.Options{
Format: "progress",
Concurrency: runtime.NumCPU(),
Strict: true,
Paths: []string{"features/"},
Tags: tags,
})
if status > 0 {
t.FailNow()
}
}
<|start_filename|>src/steps/merge_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// MergeBranchStep merges the branch with the given name into the current branch.
type MergeBranchStep struct {
NoOpStep
BranchName string
previousSha string
}
// CreateAbortStep returns the abort step for this step.
func (step *MergeBranchStep) CreateAbortStep() Step {
return &AbortMergeBranchStep{}
}
// CreateContinueStep returns the continue step for this step.
func (step *MergeBranchStep) CreateContinueStep() Step {
return &ContinueMergeBranchStep{}
}
// CreateUndoStep returns the undo step for this step.
func (step *MergeBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &ResetToShaStep{Hard: true, Sha: step.previousSha}, nil
}
// Run executes this step.
func (step *MergeBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) (err error) {
step.previousSha, err = repo.Silent.CurrentSha()
if err != nil {
return err
}
return repo.Logging.MergeBranchNoEdit(step.BranchName)
}
<|start_filename|>src/run/silent_shell_test.go<|end_filename|>
package run_test
import (
"io/ioutil"
"os"
"testing"
"github.com/git-town/git-town/src/run"
"github.com/stretchr/testify/assert"
)
func TestSilentShell_Run_arguments(t *testing.T) {
shell := run.SilentShell{}
res, err := shell.Run("echo", "hello", "world")
assert.NoError(t, err)
assert.Equal(t, "hello world", res.OutputSanitized())
}
func TestSilentShell_RunMany(t *testing.T) {
shell := run.SilentShell{}
err := shell.RunMany([][]string{
{"mkdir", "tmp"},
{"touch", "tmp/first"},
{"touch", "tmp/second"},
})
defer os.RemoveAll("tmp")
assert.NoError(t, err)
infos, err := ioutil.ReadDir("tmp")
assert.NoError(t, err)
assert.Equal(t, "first", infos[0].Name())
assert.Equal(t, "second", infos[1].Name())
}
func TestSilentShell_RunString(t *testing.T) {
shell := run.SilentShell{}
_, err := shell.RunString("touch first")
defer os.Remove("first")
assert.NoError(t, err)
_, err = os.Stat("first")
assert.False(t, os.IsNotExist(err))
}
func TestSilentShell_RunStringWith(t *testing.T) {
shell := run.SilentShell{}
res, err := shell.RunStringWith("ls -1", run.Options{Dir: ".."})
assert.NoError(t, err)
assert.Contains(t, res.OutputSanitized(), "cmd")
}
<|start_filename|>src/cmd/continue.go<|end_filename|>
package cmd
import (
"fmt"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/steps"
"github.com/spf13/cobra"
)
var continueCmd = &cobra.Command{
Use: "continue",
Short: "Restarts the last run git-town command after having resolved conflicts",
Run: func(cmd *cobra.Command, args []string) {
runState, err := steps.LoadPreviousRunState(prodRepo)
if err != nil {
cli.Exit(fmt.Errorf("cannot load previous run state: %v", err))
}
if runState == nil || !runState.IsUnfinished() {
cli.Exit(fmt.Errorf("nothing to continue"))
}
hasConflicts, err := prodRepo.Silent.HasConflicts()
if err != nil {
cli.Exit(err)
}
if hasConflicts {
cli.Exit(fmt.Errorf("you must resolve the conflicts before continuing"))
}
err = steps.Run(runState, prodRepo, drivers.Load(prodRepo.Config, &prodRepo.Silent, cli.PrintDriverAction))
if err != nil {
cli.Exit(err)
}
},
Args: cobra.NoArgs,
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
return validateIsConfigured(prodRepo)
},
}
func init() {
RootCmd.AddCommand(continueCmd)
}
<|start_filename|>src/steps/abort_merge_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// AbortMergeBranchStep aborts the current merge conflict.
type AbortMergeBranchStep struct {
NoOpStep
}
// Run executes this step.
func (step *AbortMergeBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.AbortMerge()
}
<|start_filename|>test/scenario_state.go<|end_filename|>
package test
import (
"github.com/cucumber/messages-go/v10"
"github.com/git-town/git-town/src/run"
)
// ScenarioState constains the state that is shared by all steps within a scenario.
type ScenarioState struct {
// the GitEnvironment used in the current scenario
gitEnv *GitEnvironment
// the error of the last run of Git Town
runErr error
// indicates whether the scenario has verified the error
runErrChecked bool
// the outcome of the last run of Git Town
runRes *run.Result
// initialCommits describes the commits in this Git environment before the WHEN steps ran.
initialCommits *messages.PickleStepArgument_PickleTable
// name of the uncommitted file in the workspace
uncommittedFileName string
// content of the uncommitted file in the workspace
uncommittedContent string
}
// Reset restores the null value of this ScenarioState.
func (state *ScenarioState) Reset(gitEnv *GitEnvironment) {
state.gitEnv = gitEnv
state.initialCommits = nil
state.runRes = nil
state.runErr = nil
state.runErrChecked = false
state.uncommittedFileName = ""
state.uncommittedContent = ""
}
<|start_filename|>src/drivers/core.go<|end_filename|>
package drivers
import "errors"
// Core provides the public API for the drivers subsystem.
// CodeHostingDriver defines the structure of drivers
// for the different code hosting services.
type CodeHostingDriver interface {
// LoadPullRequestInfo loads information about the pull request of the given branch into the given parent branch
// from the code hosting provider.
LoadPullRequestInfo(branch, parentBranch string) (PullRequestInfo, error)
// NewPullRequestURL returns the URL of the page
// to create a new pull request online.
NewPullRequestURL(branch, parentBranch string) (string, error)
// MergePullRequest merges the pull request through the hosting service API.
MergePullRequest(MergePullRequestOptions) (mergeSha string, err error)
// RepositoryURL returns the URL where the given repository
// can be found online.
RepositoryURL() string
// HostingServiceName returns the name of the code hosting service.
HostingServiceName() string
}
// config defines the configuration data needed by the driver package.
type config interface {
GetCodeHostingOriginHostname() string
GetCodeHostingDriverName() string
GetGiteaToken() string
GetGitHubToken() string
GetMainBranch() string
GetRemoteOriginURL() string
}
// runner defines the runner methods used by the driver package.
type gitRunner interface {
ShaForBranch(string) (string, error)
}
// PullRequestInfo contains information about a pull request.
type PullRequestInfo struct {
CanMergeWithAPI bool
DefaultCommitMessage string
PullRequestNumber int64
}
// MergePullRequestOptions defines the options to the MergePullRequest function.
type MergePullRequestOptions struct {
Branch string
PullRequestNumber int64
CommitMessage string
LogRequests bool
ParentBranch string
}
// logFn defines a function with fmt.Printf API that CodeHostingDriver instances can use to give updates on activities they do.
type logFn func(string, ...interface{})
// Load returns the code hosting driver to use based on the git config.
// nolint:interfacer // for Gitea support later
func Load(config config, git gitRunner, log logFn) CodeHostingDriver {
driver := LoadGithub(config, log)
if driver != nil {
return driver
}
driver = LoadGitea(config, log)
if driver != nil {
return driver
}
driver = LoadBitbucket(config, git)
if driver != nil {
return driver
}
driver = LoadGitlab(config)
if driver != nil {
return driver
}
return nil
}
// UnsupportedHostingError provides an error message.
func UnsupportedHostingError() error {
return errors.New(`unsupported hosting service
This command requires hosting on one of these services:
* Bitbucket
* GitHub
* GitLab
* Gitea`)
}
<|start_filename|>src/steps/delete_parent_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// DeleteParentBranchStep removes the parent branch entry in the Git Town configuration.
type DeleteParentBranchStep struct {
NoOpStep
BranchName string
previousParent string
}
// CreateUndoStep returns the undo step for this step.
func (step *DeleteParentBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
if step.previousParent == "" {
return &NoOpStep{}, nil
}
return &SetParentBranchStep{BranchName: step.BranchName, ParentBranchName: step.previousParent}, nil
}
// Run executes this step.
func (step *DeleteParentBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
step.previousParent = repo.Config.GetParentBranch(step.BranchName)
return repo.Config.DeleteParentBranch(step.BranchName)
}
<|start_filename|>src/git/main_first_test.go<|end_filename|>
package git_test
import (
"testing"
"github.com/git-town/git-town/src/git"
"github.com/stretchr/testify/assert"
)
func TestMainFirst(t *testing.T) {
tests := []struct {
give []string
want []string
}{
{give: []string{"main", "one", "two"}, want: []string{"main", "one", "two"}},
{give: []string{"alpha", "main", "omega"}, want: []string{"main", "alpha", "omega"}},
{give: []string{"main"}, want: []string{"main"}},
{give: []string{}, want: []string{}},
}
for tt := range tests {
have := git.MainFirst(tests[tt].give)
assert.Equal(t, tests[tt].want, have)
}
}
<|start_filename|>src/steps/push_tags_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// PushTagsStep pushes newly created Git tags to the remote.
type PushTagsStep struct {
NoOpStep
}
// Run executes this step.
func (step *PushTagsStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.PushTags()
}
<|start_filename|>src/steps/abort_rebase_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// AbortRebaseBranchStep represents aborting on ongoing merge conflict.
// This step is used in the abort scripts for Git Town commands.
type AbortRebaseBranchStep struct {
NoOpStep
}
// Run executes this step.
func (step *AbortRebaseBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.AbortRebase()
}
<|start_filename|>src/steps/remove_from_perennial_branch.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// RemoveFromPerennialBranches removes the branch with the given name as a perennial branch.
type RemoveFromPerennialBranches struct {
NoOpStep
BranchName string
}
// CreateUndoStep returns the undo step for this step.
func (step *RemoveFromPerennialBranches) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &AddToPerennialBranches{BranchName: step.BranchName}, nil
}
// Run executes this step.
func (step *RemoveFromPerennialBranches) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Config.RemoveFromPerennialBranches(step.BranchName)
}
<|start_filename|>src/steps/discard_open_changes_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// DiscardOpenChangesStep resets the branch to the last commit, discarding uncommitted changes.
type DiscardOpenChangesStep struct {
NoOpStep
}
// Run executes this step.
func (step *DiscardOpenChangesStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.DiscardOpenChanges()
}
<|start_filename|>src/drivers/github.go<|end_filename|>
package drivers
import (
"context"
"fmt"
"net/url"
"strings"
"github.com/git-town/git-town/src/drivers/helpers"
"github.com/google/go-github/github"
"golang.org/x/oauth2"
)
// githubCodeHostingDriver provides access to the API of GitHub installations.
type githubCodeHostingDriver struct {
apiToken string
client *github.Client
config config
hostname string
originURL string
owner string
repository string
log logFn
}
// LoadGithub provides a GitHub driver instance if the given repo configuration is for a Github repo,
// otherwise nil.
func LoadGithub(config config, log logFn) CodeHostingDriver {
driverType := config.GetCodeHostingDriverName()
originURL := config.GetRemoteOriginURL()
hostname := helpers.GetURLHostname(originURL)
configuredHostName := config.GetCodeHostingOriginHostname()
if configuredHostName != "" {
hostname = configuredHostName
}
if driverType != "github" && hostname != "github.com" {
return nil
}
repositoryParts := strings.SplitN(helpers.GetURLRepositoryName(originURL), "/", 2)
if len(repositoryParts) != 2 {
return nil
}
owner := repositoryParts[0]
repository := repositoryParts[1]
return &githubCodeHostingDriver{
apiToken: config.GetGitHubToken(),
config: config,
hostname: hostname,
log: log,
originURL: originURL,
owner: owner,
repository: repository,
}
}
func (d *githubCodeHostingDriver) LoadPullRequestInfo(branch, parentBranch string) (result PullRequestInfo, err error) {
if d.apiToken == "" {
return result, nil
}
d.connect()
pullRequests, err := d.getPullRequests(branch, parentBranch)
if err != nil {
return result, err
}
if len(pullRequests) != 1 {
return result, nil
}
result.CanMergeWithAPI = true
result.DefaultCommitMessage = d.getDefaultCommitMessage(pullRequests[0])
result.PullRequestNumber = int64(pullRequests[0].GetNumber())
return result, nil
}
func (d *githubCodeHostingDriver) NewPullRequestURL(branch string, parentBranch string) (string, error) {
toCompare := branch
if parentBranch != d.config.GetMainBranch() {
toCompare = parentBranch + "..." + branch
}
return fmt.Sprintf("%s/compare/%s?expand=1", d.RepositoryURL(), url.PathEscape(toCompare)), nil
}
func (d *githubCodeHostingDriver) RepositoryURL() string {
return fmt.Sprintf("https://%s/%s/%s", d.hostname, d.owner, d.repository)
}
func (d *githubCodeHostingDriver) MergePullRequest(options MergePullRequestOptions) (mergeSha string, err error) {
d.connect()
err = d.updatePullRequestsAgainst(options)
if err != nil {
return "", err
}
return d.mergePullRequest(options)
}
func (d *githubCodeHostingDriver) HostingServiceName() string {
return "GitHub"
}
// Helper
func (d *githubCodeHostingDriver) connect() {
if d.client == nil {
ts := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: d.apiToken},
)
tc := oauth2.NewClient(context.Background(), ts)
d.client = github.NewClient(tc)
}
}
func (d *githubCodeHostingDriver) getDefaultCommitMessage(pullRequest *github.PullRequest) string {
return fmt.Sprintf("%s (#%d)", *pullRequest.Title, *pullRequest.Number)
}
func (d *githubCodeHostingDriver) getPullRequests(branch, parentBranch string) ([]*github.PullRequest, error) {
pullRequests, _, err := d.client.PullRequests.List(context.Background(), d.owner, d.repository, &github.PullRequestListOptions{
Base: parentBranch,
Head: d.owner + ":" + branch,
State: "open",
})
return pullRequests, err
}
func (d *githubCodeHostingDriver) mergePullRequest(options MergePullRequestOptions) (mergeSha string, err error) {
if options.PullRequestNumber == 0 {
return "", fmt.Errorf("cannot merge via Github since there is no pull request")
}
if options.LogRequests {
d.log("GitHub API: Merging PR #%d\n", options.PullRequestNumber)
}
commitMessageParts := strings.SplitN(options.CommitMessage, "\n", 2)
githubCommitTitle := commitMessageParts[0]
githubCommitMessage := ""
if len(commitMessageParts) == 2 {
githubCommitMessage = commitMessageParts[1]
}
result, _, err := d.client.PullRequests.Merge(context.Background(), d.owner, d.repository, int(options.PullRequestNumber), githubCommitMessage, &github.PullRequestOptions{
MergeMethod: "squash",
CommitTitle: githubCommitTitle,
})
if err != nil {
return "", err
}
return *result.SHA, nil
}
func (d *githubCodeHostingDriver) updatePullRequestsAgainst(options MergePullRequestOptions) error {
pullRequests, _, err := d.client.PullRequests.List(context.Background(), d.owner, d.repository, &github.PullRequestListOptions{
Base: options.Branch,
State: "open",
})
if err != nil {
return err
}
for _, pullRequest := range pullRequests {
if options.LogRequests {
d.log("GitHub API: Updating base branch for PR #%d\n", *pullRequest.Number)
}
_, _, err = d.client.PullRequests.Edit(context.Background(), d.owner, d.repository, *pullRequest.Number, &github.PullRequest{
Base: &github.PullRequestBranch{
Ref: &options.ParentBranch,
},
})
if err != nil {
return err
}
}
return nil
}
<|start_filename|>test/helpers/unique_string_test.go<|end_filename|>
package helpers_test
import (
"testing"
"github.com/git-town/git-town/test/helpers"
"github.com/stretchr/testify/assert"
)
func TestUniqueString(t *testing.T) {
assert.NotEqual(t, "", helpers.UniqueString())
}
<|start_filename|>src/steps/revert_commit_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// RevertCommitStep reverts the commit with the given sha.
type RevertCommitStep struct {
NoOpStep
Sha string
}
// Run executes this step.
func (step *RevertCommitStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.RevertCommit(step.Sha)
}
<|start_filename|>src/steps/checkout_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// CheckoutBranchStep checks out a new branch.
type CheckoutBranchStep struct {
NoOpStep
BranchName string
previousBranchName string
}
// CreateUndoStep returns the undo step for this step.
func (step *CheckoutBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &CheckoutBranchStep{BranchName: step.previousBranchName}, nil
}
// Run executes this step.
func (step *CheckoutBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) (err error) {
step.previousBranchName, err = repo.Silent.CurrentBranch()
if err != nil {
return err
}
if step.previousBranchName != step.BranchName {
err := repo.Logging.CheckoutBranch(step.BranchName)
return err
}
return nil
}
<|start_filename|>src/cmd/set_parent_branch.go<|end_filename|>
package cmd
import (
"errors"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/prompt"
"github.com/spf13/cobra"
)
var setParentBranchCommand = &cobra.Command{
Use: "set-parent-branch",
Short: "Prompts to set the parent branch for the current branch",
Long: `Prompts to set the parent branch for the current branch`,
Run: func(cmd *cobra.Command, args []string) {
branchName, err := prodRepo.Silent.CurrentBranch()
if err != nil {
cli.Exit(err)
}
if !prodRepo.Config.IsFeatureBranch(branchName) {
cli.Exit(errors.New("only feature branches can have parent branches"))
}
defaultParentBranch := prodRepo.Config.GetParentBranch(branchName)
if defaultParentBranch == "" {
defaultParentBranch = prodRepo.Config.GetMainBranch()
}
err = prodRepo.Config.DeleteParentBranch(branchName)
if err != nil {
cli.Exit(err)
}
err = prompt.AskForBranchAncestry(branchName, defaultParentBranch, prodRepo)
if err != nil {
cli.Exit(err)
}
},
Args: cobra.NoArgs,
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
return validateIsConfigured(prodRepo)
},
}
func init() {
RootCmd.AddCommand(setParentBranchCommand)
}
<|start_filename|>test/helpers/folder_name_test.go<|end_filename|>
package helpers_test
import (
"testing"
"github.com/git-town/git-town/test/helpers"
"github.com/stretchr/testify/assert"
)
func TestFolderName(t *testing.T) {
tests := map[string]string{
"foo": "foo",
`globally set to "true", local unset`: "globally_set_to_true_local_unset",
}
for give := range tests {
want := tests[give]
have := helpers.FolderName(give)
assert.Equal(t, want, have)
}
}
<|start_filename|>src/steps/continue_merge_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// ContinueMergeBranchStep finishes an ongoing merge conflict
// assuming all conflicts have been resolved by the user.
type ContinueMergeBranchStep struct {
NoOpStep
}
// CreateAbortStep returns the abort step for this step.
func (step *ContinueMergeBranchStep) CreateAbortStep() Step {
return &NoOpStep{}
}
// CreateContinueStep returns the continue step for this step.
func (step *ContinueMergeBranchStep) CreateContinueStep() Step {
return step
}
// Run executes this step.
func (step *ContinueMergeBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
hasMergeInprogress, err := repo.Silent.HasMergeInProgress()
if err != nil {
return err
}
if hasMergeInprogress {
return repo.Logging.CommitNoEdit()
}
return nil
}
<|start_filename|>src/cmd/sync.go<|end_filename|>
package cmd
import (
"os"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/git"
"github.com/git-town/git-town/src/prompt"
"github.com/git-town/git-town/src/steps"
"github.com/spf13/cobra"
)
type syncConfig struct {
initialBranch string
branchesToSync []string
shouldPushTags bool
hasOrigin bool
isOffline bool
}
var syncCmd = &cobra.Command{
Use: "sync",
Short: "Updates the current branch with all relevant changes",
Long: `Updates the current branch with all relevant changes
Synchronizes the current branch with the rest of the world.
When run on a feature branch
- syncs all ancestor branches
- pulls updates for the current branch
- merges the parent branch into the current branch
- pushes the current branch
When run on the main branch or a perennial branch
- pulls and pushes updates for the current branch
- pushes tags
If the repository contains an "upstream" remote,
syncs the main branch with its upstream counterpart.
You can disable this by running "git config git-town.sync-upstream false".`,
Run: func(cmd *cobra.Command, args []string) {
config, err := getSyncConfig(prodRepo)
if err != nil {
cli.Exit(err)
}
stepList, err := getSyncStepList(config, prodRepo)
if err != nil {
cli.Exit(err)
}
runState := steps.NewRunState("sync", stepList)
err = steps.Run(runState, prodRepo, nil)
if err != nil {
cli.Exit(err)
}
},
Args: cobra.NoArgs,
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
if dryRunFlag {
currentBranch, err := prodRepo.Silent.CurrentBranch()
if err != nil {
return err
}
prodRepo.DryRun.Activate(currentBranch)
}
if err := validateIsConfigured(prodRepo); err != nil {
return err
}
exit, err := handleUnfinishedState(prodRepo, nil)
if err != nil {
return err
}
if exit {
os.Exit(0)
}
return nil
},
}
func getSyncConfig(repo *git.ProdRepo) (result syncConfig, err error) {
result.hasOrigin, err = repo.Silent.HasRemote("origin")
if err != nil {
return result, err
}
result.isOffline = prodRepo.Config.IsOffline()
if result.hasOrigin && !result.isOffline {
err := repo.Logging.Fetch()
if err != nil {
return result, err
}
}
result.initialBranch, err = repo.Silent.CurrentBranch()
if err != nil {
return result, err
}
if allFlag {
branches, err := repo.Silent.LocalBranchesMainFirst()
if err != nil {
return result, err
}
err = prompt.EnsureKnowsParentBranches(branches, repo)
if err != nil {
return result, err
}
result.branchesToSync = branches
result.shouldPushTags = true
} else {
err = prompt.EnsureKnowsParentBranches([]string{result.initialBranch}, repo)
if err != nil {
return result, err
}
result.branchesToSync = append(prodRepo.Config.GetAncestorBranches(result.initialBranch), result.initialBranch)
result.shouldPushTags = !prodRepo.Config.IsFeatureBranch(result.initialBranch)
}
return result, nil
}
func getSyncStepList(config syncConfig, repo *git.ProdRepo) (result steps.StepList, err error) {
for _, branchName := range config.branchesToSync {
steps, err := steps.GetSyncBranchSteps(branchName, true, repo)
if err != nil {
return result, err
}
result.AppendList(steps)
}
result.Append(&steps.CheckoutBranchStep{BranchName: config.initialBranch})
if config.hasOrigin && config.shouldPushTags && !config.isOffline {
result.Append(&steps.PushTagsStep{})
}
err = result.Wrap(steps.WrapOptions{RunInGitRoot: true, StashOpenChanges: true}, repo)
return result, err
}
func init() {
syncCmd.Flags().BoolVar(&allFlag, "all", false, "Sync all local branches")
syncCmd.Flags().BoolVar(&dryRunFlag, "dry-run", false, dryRunFlagDescription)
RootCmd.AddCommand(syncCmd)
}
<|start_filename|>src/steps/pull_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// PullBranchStep pulls the branch with the given name from the origin remote.
type PullBranchStep struct {
NoOpStep
BranchName string
}
// Run executes this step.
func (step *PullBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.Pull()
}
<|start_filename|>src/steps/run.go<|end_filename|>
package steps
import (
"fmt"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// Run runs the Git Town command described by the given state.
// nolint: gocyclo, gocognit, nestif, funlen
func Run(runState *RunState, repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
for {
step := runState.RunStepList.Pop()
if step == nil {
runState.MarkAsFinished()
if runState.IsAbort || runState.isUndo {
err := DeletePreviousRunState(repo)
if err != nil {
return fmt.Errorf("cannot delete previous run state: %w", err)
}
} else {
err := SaveRunState(runState, repo)
if err != nil {
return fmt.Errorf("cannot save run state: %w", err)
}
}
fmt.Println()
return nil
}
if getTypeName(step) == "*SkipCurrentBranchSteps" {
runState.SkipCurrentBranchSteps()
continue
}
if getTypeName(step) == "*PushBranchAfterCurrentBranchSteps" {
err := runState.AddPushBranchStepAfterCurrentBranchSteps(repo)
if err != nil {
return err
}
continue
}
runErr := step.Run(repo, driver)
if runErr != nil {
runState.AbortStepList.Append(step.CreateAbortStep())
if step.ShouldAutomaticallyAbortOnError() {
cli.PrintError(fmt.Errorf(runErr.Error() + "\nAuto-aborting..."))
abortRunState := runState.CreateAbortRunState()
err := Run(&abortRunState, repo, driver)
if err != nil {
return fmt.Errorf("cannot run the abort steps: %w", err)
}
cli.Exit(step.GetAutomaticAbortError())
} else {
runState.RunStepList.Prepend(step.CreateContinueStep())
err := runState.MarkAsUnfinished(repo)
if err != nil {
return err
}
currentBranch, err := repo.Silent.CurrentBranch()
if err != nil {
return err
}
rebasing, err := repo.Silent.HasRebaseInProgress()
if err != nil {
return err
}
if runState.Command == "sync" && !(rebasing && repo.Config.IsMainBranch(currentBranch)) {
runState.UnfinishedDetails.CanSkip = true
}
err = SaveRunState(runState, repo)
if err != nil {
return fmt.Errorf("cannot save run state: %w", err)
}
message := runErr.Error() + `
To abort, run "git-town abort".
To continue after having resolved conflicts, run "git-town continue".
`
if runState.UnfinishedDetails.CanSkip {
message += `To continue by skipping the current branch, run "git-town skip".`
}
message += "\n"
return fmt.Errorf(message)
}
}
undoStep, err := step.CreateUndoStep(repo)
if err != nil {
return fmt.Errorf("cannot create undo step for %q: %w", step, err)
}
runState.UndoStepList.Prepend(undoStep)
}
}
<|start_filename|>src/steps/ensure_has_shippable_changes_step.go<|end_filename|>
package steps
import (
"errors"
"fmt"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// EnsureHasShippableChangesStep asserts that the branch has unique changes not on the main branch.
type EnsureHasShippableChangesStep struct {
NoOpStep
BranchName string
}
// GetAutomaticAbortError returns the error message to display when this step
// cause the command to automatically abort.
func (step *EnsureHasShippableChangesStep) GetAutomaticAbortError() error {
return fmt.Errorf("the branch %q has no shippable changes", step.BranchName)
}
// Run executes this step.
func (step *EnsureHasShippableChangesStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
hasShippableChanges, err := repo.Silent.HasShippableChanges(step.BranchName)
if err != nil {
return err
}
if !hasShippableChanges {
return errors.New("no shippable changes")
}
return nil
}
// ShouldAutomaticallyAbortOnError returns whether this step should cause the command to
// automatically abort if it errors.
func (step *EnsureHasShippableChangesStep) ShouldAutomaticallyAbortOnError() bool {
return true
}
<|start_filename|>src/cli/printable.go<|end_filename|>
package cli
import (
"sort"
"strconv"
"strings"
)
// BranchAncestryConfig defines the configuration values needed by the `cli` package.
type BranchAncestryConfig interface {
GetBranchAncestryRoots() []string
GetChildBranches(string) []string
}
// PrintableBranchAncestry provides the branch ancestry in CLI printable format.
func PrintableBranchAncestry(config BranchAncestryConfig) string {
roots := config.GetBranchAncestryRoots()
trees := make([]string, len(roots))
for r := range roots {
trees[r] = PrintableBranchTree(roots[r], config)
}
return strings.Join(trees, "\n\n")
}
// PrintableBranchTree returns a user printable branch tree.
func PrintableBranchTree(branchName string, config BranchAncestryConfig) (result string) {
result += branchName
childBranches := config.GetChildBranches(branchName)
sort.Strings(childBranches)
for _, childBranch := range childBranches {
result += "\n" + Indent(PrintableBranchTree(childBranch, config))
}
return
}
// PrintableMainBranch returns a user printable main branch.
func PrintableMainBranch(mainBranch string) string {
if mainBranch == "" {
return "[none]"
}
return mainBranch
}
// PrintableNewBranchPushFlag returns a user printable new branch push flag.
func PrintableNewBranchPushFlag(flag bool) string {
return strconv.FormatBool(flag)
}
// PrintableOfflineFlag provides a printable version of the given offline flag.
func PrintableOfflineFlag(flag bool) string {
return strconv.FormatBool(flag)
}
// PrintablePerennialBranches returns a user printable list of perennial branches.
func PrintablePerennialBranches(perennialBranches []string) string {
if len(perennialBranches) == 0 {
return "[none]"
}
return strings.Join(perennialBranches, "\n")
}
<|start_filename|>src/steps/rebase_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// RebaseBranchStep rebases the current branch
// against the branch with the given name.
type RebaseBranchStep struct {
NoOpStep
BranchName string
previousSha string
}
// CreateAbortStep returns the abort step for this step.
func (step *RebaseBranchStep) CreateAbortStep() Step {
return &AbortRebaseBranchStep{}
}
// CreateContinueStep returns the continue step for this step.
func (step *RebaseBranchStep) CreateContinueStep() Step {
return &ContinueRebaseBranchStep{}
}
// CreateUndoStep returns the undo step for this step.
func (step *RebaseBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &ResetToShaStep{Hard: true, Sha: step.previousSha}, nil
}
// Run executes this step.
func (step *RebaseBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) (err error) {
step.previousSha, err = repo.Silent.CurrentSha()
if err != nil {
return err
}
err = repo.Logging.Rebase(step.BranchName)
if err != nil {
repo.Silent.CurrentBranchCache.Invalidate()
}
return err
}
<|start_filename|>src/steps/restore_open_changes_step.go<|end_filename|>
package steps
import (
"errors"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// RestoreOpenChangesStep restores stashed away changes into the workspace.
type RestoreOpenChangesStep struct {
NoOpStep
}
// CreateUndoStep returns the undo step for this step.
func (step *RestoreOpenChangesStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &StashOpenChangesStep{}, nil
}
// Run executes this step.
func (step *RestoreOpenChangesStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
err := repo.Logging.PopStash()
if err != nil {
return errors.New("conflicts between your uncommmitted changes and the main branch")
}
return nil
}
<|start_filename|>src/cmd/append.go<|end_filename|>
package cmd
import (
"fmt"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/git"
"github.com/git-town/git-town/src/prompt"
"github.com/git-town/git-town/src/steps"
"github.com/spf13/cobra"
)
type appendConfig struct {
ancestorBranches []string
parentBranch string
targetBranch string
hasOrigin bool
isOffline bool
shouldNewBranchPush bool
}
var appendCommand = &cobra.Command{
Use: "append <branch>",
Short: "Creates a new feature branch as a child of the current branch",
Long: `Creates a new feature branch as a direct child of the current branch.
Syncs the current branch,
forks a new feature branch with the given name off the current branch,
makes the new branch a child of the current branch,
pushes the new feature branch to the remote repository
(if and only if "new-branch-push-flag" is true),
and brings over all uncommitted changes to the new feature branch.
See "sync" for information regarding remote upstream.`,
Run: func(cmd *cobra.Command, args []string) {
config, err := getAppendConfig(args, prodRepo)
if err != nil {
cli.Exit(err)
}
stepList, err := getAppendStepList(config, prodRepo)
if err != nil {
cli.Exit(err)
}
runState := steps.NewRunState("append", stepList)
err = steps.Run(runState, prodRepo, nil)
if err != nil {
cli.Exit(err)
}
},
Args: cobra.ExactArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
return validateIsConfigured(prodRepo)
},
}
func getAppendConfig(args []string, repo *git.ProdRepo) (result appendConfig, err error) {
result.parentBranch, err = repo.Silent.CurrentBranch()
if err != nil {
return result, err
}
result.targetBranch = args[0]
result.hasOrigin, err = repo.Silent.HasRemote("origin")
if err != nil {
return result, err
}
if result.hasOrigin && !repo.Config.IsOffline() {
err := repo.Logging.Fetch()
if err != nil {
return result, err
}
}
hasBranch, err := repo.Silent.HasLocalOrRemoteBranch(result.targetBranch)
if err != nil {
return result, err
}
if hasBranch {
return result, fmt.Errorf("a branch named %q already exists", result.targetBranch)
}
err = prompt.EnsureKnowsParentBranches([]string{result.parentBranch}, repo)
if err != nil {
return result, err
}
result.ancestorBranches = repo.Config.GetAncestorBranches(result.parentBranch)
result.shouldNewBranchPush = repo.Config.ShouldNewBranchPush()
result.isOffline = repo.Config.IsOffline()
return result, err
}
func init() {
RootCmd.AddCommand(appendCommand)
}
<|start_filename|>src/cmd/kill.go<|end_filename|>
package cmd
import (
"fmt"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/git"
"github.com/git-town/git-town/src/prompt"
"github.com/git-town/git-town/src/steps"
"github.com/spf13/cobra"
)
type killConfig struct {
initialBranch string
previousBranch string
targetBranchParent string
targetBranch string
childBranches []string
isOffline bool
isTargetBranchLocal bool
hasOpenChanges bool
hasTrackingBranch bool
}
var killCommand = &cobra.Command{
Use: "kill [<branch>]",
Short: "Removes an obsolete feature branch",
Long: `Removes an obsolete feature branch
Deletes the current or provided branch from the local and remote repositories.
Does not delete perennial branches nor the main branch.`,
Run: func(cmd *cobra.Command, args []string) {
config, err := getKillConfig(args, prodRepo)
if err != nil {
cli.Exit(err)
}
stepList, err := getKillStepList(config, prodRepo)
if err != nil {
cli.Exit(err)
}
runState := steps.NewRunState("kill", stepList)
err = steps.Run(runState, prodRepo, nil)
if err != nil {
cli.Exit(err)
}
},
Args: cobra.MaximumNArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
return validateIsConfigured(prodRepo)
},
}
// nolint: funlen
func getKillConfig(args []string, repo *git.ProdRepo) (result killConfig, err error) {
result.initialBranch, err = repo.Silent.CurrentBranch()
if err != nil {
return result, err
}
if len(args) == 0 {
result.targetBranch = result.initialBranch
} else {
result.targetBranch = args[0]
}
if !repo.Config.IsFeatureBranch(result.targetBranch) {
return result, fmt.Errorf("you can only kill feature branches")
}
result.isTargetBranchLocal, err = repo.Silent.HasLocalBranch(result.targetBranch)
if err != nil {
return result, err
}
if result.isTargetBranchLocal {
err = prompt.EnsureKnowsParentBranches([]string{result.targetBranch}, repo)
if err != nil {
return result, err
}
repo.Config.Reload()
}
hasOrigin, err := repo.Silent.HasRemote("origin")
if err != nil {
return result, err
}
result.isOffline = repo.Config.IsOffline()
if hasOrigin && !result.isOffline {
err := repo.Logging.Fetch()
if err != nil {
return result, err
}
}
if result.initialBranch != result.targetBranch {
hasTargetBranch, err := repo.Silent.HasLocalOrRemoteBranch(result.targetBranch)
if err != nil {
return result, err
}
if !hasTargetBranch {
return result, fmt.Errorf("there is no branch named %q", result.targetBranch)
}
}
result.hasTrackingBranch, err = repo.Silent.HasTrackingBranch(result.targetBranch)
if err != nil {
return result, err
}
result.targetBranchParent = repo.Config.GetParentBranch(result.targetBranch)
result.previousBranch, err = repo.Silent.PreviouslyCheckedOutBranch()
if err != nil {
return result, err
}
result.hasOpenChanges, err = repo.Silent.HasOpenChanges()
if err != nil {
return result, err
}
result.childBranches = repo.Config.GetChildBranches(result.targetBranch)
return result, nil
}
func getKillStepList(config killConfig, repo *git.ProdRepo) (result steps.StepList, err error) {
switch {
case config.isTargetBranchLocal:
if config.hasTrackingBranch && !config.isOffline {
result.Append(&steps.DeleteRemoteBranchStep{BranchName: config.targetBranch, IsTracking: true})
}
if config.initialBranch == config.targetBranch {
if config.hasOpenChanges {
result.Append(&steps.CommitOpenChangesStep{})
}
result.Append(&steps.CheckoutBranchStep{BranchName: config.targetBranchParent})
}
result.Append(&steps.DeleteLocalBranchStep{BranchName: config.targetBranch, Force: true})
for _, child := range config.childBranches {
result.Append(&steps.SetParentBranchStep{BranchName: child, ParentBranchName: config.targetBranchParent})
}
result.Append(&steps.DeleteParentBranchStep{BranchName: config.targetBranch})
case !repo.Config.IsOffline():
result.Append(&steps.DeleteRemoteBranchStep{BranchName: config.targetBranch, IsTracking: false})
default:
return result, fmt.Errorf("cannot delete remote branch %q in offline mode", config.targetBranch)
}
err = result.Wrap(steps.WrapOptions{
RunInGitRoot: true,
StashOpenChanges: config.initialBranch != config.targetBranch && config.targetBranch == config.previousBranch,
}, repo)
return result, err
}
func init() {
RootCmd.AddCommand(killCommand)
}
<|start_filename|>test/mocking_shell.go<|end_filename|>
package test
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"runtime"
"strings"
"github.com/git-town/git-town/src/run"
"github.com/kballard/go-shellquote"
)
// MockingShell runs shell commands using a customizable environment.
// This is useful in tests. Possible customizations:
// - overide environment variables
// - Temporarily override certain shell commands with mock implementations.
// Temporary mocks are only valid for the next command being run.
type MockingShell struct {
workingDir string // the directory in which this runner runs
homeDir string // the directory that contains the global Git configuration
binDir string // the directory that stores the mock shell command implementations, ignored if empty
testOrigin string // optional content of the GIT_TOWN_REMOTE environment variable
hasMockCommand bool // indicates whether the current test has mocked a command
}
// NewMockingShell provides a new MockingShell instance that executes in the given directory.
func NewMockingShell(workingDir string, homeDir string, binDir string) *MockingShell {
return &MockingShell{workingDir: workingDir, homeDir: homeDir, binDir: binDir}
}
// WorkingDir provides the directory this MockingShell operates in.
func (ms *MockingShell) WorkingDir() string {
return ms.workingDir
}
// MockBrokenCommand adds a mock for the given command that returns an error.
func (ms *MockingShell) MockBrokenCommand(name string) error {
// create "bin" dir
err := os.Mkdir(ms.binDir, 0744)
if err != nil {
return fmt.Errorf("cannot create mock bin dir: %w", err)
}
// write custom "which" command
content := fmt.Sprintf("#!/usr/bin/env bash\n\nif [ \"$1\" == %q ]; then\n echo %q\nelse\n exit 1\nfi", name, filepath.Join(ms.binDir, name))
err = ioutil.WriteFile(filepath.Join(ms.binDir, "which"), []byte(content), 0500)
if err != nil {
return fmt.Errorf("cannot write custom which command: %w", err)
}
// write custom command
content = "#!/usr/bin/env bash\n\nexit 1"
err = ioutil.WriteFile(filepath.Join(ms.binDir, name), []byte(content), 0500)
if err != nil {
return fmt.Errorf("cannot write custom command: %w", err)
}
ms.hasMockCommand = true
return nil
}
// MockCommand adds a mock for the command with the given name.
func (ms *MockingShell) MockCommand(name string) error {
// create "bin" dir
err := os.Mkdir(ms.binDir, 0744)
if err != nil {
return fmt.Errorf("cannot create mock bin dir: %w", err)
}
// write custom "which" command
content := fmt.Sprintf("#!/usr/bin/env bash\n\nif [ \"$1\" == %q ]; then\n echo %q\nelse\n exit 1\nfi", name, filepath.Join(ms.binDir, name))
err = ioutil.WriteFile(filepath.Join(ms.binDir, "which"), []byte(content), 0500)
if err != nil {
return fmt.Errorf("cannot write custom which command: %w", err)
}
// write custom command
content = fmt.Sprintf("#!/usr/bin/env bash\n\necho %s called with: \"$@\"\n", name)
err = ioutil.WriteFile(filepath.Join(ms.binDir, name), []byte(content), 0500)
if err != nil {
return fmt.Errorf("cannot write custom command: %w", err)
}
ms.hasMockCommand = true
return nil
}
// MockGit pretends that this repo has Git in the given version installed.
func (ms *MockingShell) MockGit(version string) error {
// create "bin" dir
err := os.Mkdir(ms.binDir, 0744)
if err != nil {
return fmt.Errorf("cannot create mock bin dir %q: %w", ms.binDir, err)
}
// write custom Git command
if runtime.GOOS == "windows" {
content := fmt.Sprintf("echo git version %s\n", version)
err = ioutil.WriteFile(filepath.Join(ms.binDir, "git.cmd"), []byte(content), 0500)
} else {
content := fmt.Sprintf("#!/usr/bin/env bash\n\nif [ \"$1\" = \"version\" ]; then\n echo git version %s\nfi\n", version)
err = ioutil.WriteFile(filepath.Join(ms.binDir, "git"), []byte(content), 0500)
}
if err != nil {
return fmt.Errorf("cannot create custom Git binary: %w", err)
}
ms.hasMockCommand = true
return nil
}
// MockNoCommandsInstalled pretends that no commands are installed.
func (ms *MockingShell) MockNoCommandsInstalled() error {
// create "bin" dir
err := os.Mkdir(ms.binDir, 0744)
if err != nil {
return fmt.Errorf("cannot create mock bin dir: %w", err)
}
// write custom "which" command
content := "#!/usr/bin/env bash\n\nexit 1\n"
err = ioutil.WriteFile(filepath.Join(ms.binDir, "which"), []byte(content), 0500)
if err != nil {
return fmt.Errorf("cannot write custom which command: %w", err)
}
ms.hasMockCommand = true
return nil
}
// Run runs the given command with the given arguments
// in this ShellRunner's directory.
// Shell overrides will be used and removed when done.
func (ms *MockingShell) Run(name string, arguments ...string) (*run.Result, error) {
return ms.RunWith(run.Options{}, name, arguments...)
}
// RunMany runs all given commands in current directory.
// Commands are provided as a list of argv-style strings.
// Shell overrides apply for the first command only.
// Failed commands abort immediately with the encountered error.
func (ms *MockingShell) RunMany(commands [][]string) error {
for _, argv := range commands {
command, args := argv[0], argv[1:]
_, err := ms.Run(command, args...)
if err != nil {
return fmt.Errorf("error running command %q: %w", argv, err)
}
}
return nil
}
// RunString runs the given command (including possible arguments)
// in this ShellRunner's directory.
// Shell overrides will be used and removed when done.
func (ms *MockingShell) RunString(fullCmd string) (*run.Result, error) {
return ms.RunStringWith(fullCmd, run.Options{})
}
// RunStringWith runs the given command (including possible arguments)
// in this ShellRunner's directory using the given options.
// opts.Dir is a relative path inside the working directory of this ShellRunner.
// Shell overrides will be used and removed when done.
func (ms *MockingShell) RunStringWith(fullCmd string, opts run.Options) (result *run.Result, err error) {
parts, err := shellquote.Split(fullCmd)
if err != nil {
return result, fmt.Errorf("cannot split command %q: %w", fullCmd, err)
}
cmd, args := parts[0], parts[1:]
return ms.RunWith(opts, cmd, args...)
}
// RunWith runs the given command with the given options in this ShellRunner's directory.
func (ms *MockingShell) RunWith(opts run.Options, cmd string, args ...string) (result *run.Result, err error) {
// create an environment with the temp shell overrides directory added to the PATH
if opts.Env == nil {
opts.Env = os.Environ()
}
// set HOME to the given global directory so that Git puts the global configuration there.
for i := range opts.Env {
if strings.HasPrefix(opts.Env[i], "HOME=") {
opts.Env[i] = fmt.Sprintf("HOME=%s", ms.homeDir)
}
}
// add the custom origin
if ms.testOrigin != "" {
opts.Env = append(opts.Env, fmt.Sprintf("GIT_TOWN_REMOTE=%s", ms.testOrigin))
}
// add the custom bin dir to the PATH
if ms.hasMockCommand {
for i := range opts.Env {
if strings.HasPrefix(opts.Env[i], "PATH=") {
parts := strings.SplitN(opts.Env[i], "=", 2)
parts[1] = ms.binDir + string(os.PathListSeparator) + parts[1]
opts.Env[i] = strings.Join(parts, "=")
break
}
}
}
// set the working dir
opts.Dir = filepath.Join(ms.workingDir, opts.Dir)
// run the command inside the custom environment
result, err = run.WithOptions(opts, cmd, args...)
if Debug {
fmt.Println(filepath.Base(ms.workingDir), ">", cmd, strings.Join(args, " "))
fmt.Println(result.Output())
if err != nil {
fmt.Printf("ERROR: %v\n", err)
}
}
return result, err
}
// SetTestOrigin adds the given environment variable to subsequent runs of commands.
func (ms *MockingShell) SetTestOrigin(content string) {
ms.testOrigin = content
}
<|start_filename|>src/steps/create_pull_request_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/browsers"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// CreatePullRequestStep creates a new pull request for the current branch.
type CreatePullRequestStep struct {
NoOpStep
BranchName string
}
// Run executes this step.
func (step *CreatePullRequestStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
parentBranch := repo.Config.GetParentBranch(step.BranchName)
prURL, err := driver.NewPullRequestURL(step.BranchName, parentBranch)
if err != nil {
return err
}
browsers.Open(prURL, repo.LoggingShell)
return nil
}
<|start_filename|>src/steps/push_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// PushBranchStep pushes the branch with the given name to the origin remote.
// Optionally with force.
type PushBranchStep struct {
NoOpStep
BranchName string
Force bool
Undoable bool
}
// CreateUndoStep returns the undo step for this step.
func (step *PushBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
if step.Undoable {
return &PushBranchAfterCurrentBranchSteps{}, nil
}
return &SkipCurrentBranchSteps{}, nil
}
// Run executes this step.
func (step *PushBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
shouldPush, err := repo.Silent.ShouldPushBranch(step.BranchName)
if err != nil {
return err
}
if !shouldPush && !repo.DryRun.IsActive() {
return nil
}
if step.Force {
return repo.Logging.PushBranchForce(step.BranchName)
}
currentBranch, err := repo.Silent.CurrentBranch()
if err != nil {
return err
}
if currentBranch == step.BranchName {
return repo.Logging.PushBranch()
}
return repo.Logging.PushBranchSetUpstream(step.BranchName)
}
<|start_filename|>src/drivers/gitea_test.go<|end_filename|>
package drivers_test
import (
"net/http"
"testing"
"github.com/git-town/git-town/src/drivers"
"github.com/stretchr/testify/assert"
httpmock "gopkg.in/jarcoal/httpmock.v1"
)
const giteaRoot = "https://gitea.com/api/v1"
const giteaVersion = giteaRoot + "/version"
const giteaCurrOpen = giteaRoot + "/repos/git-town/git-town/pulls?limit=50&page=0&state=open"
const giteaPR1 = giteaRoot + "/repos/git-town/git-town/pulls/1"
const giteaPR1Merge = giteaRoot + "/repos/git-town/git-town/pulls/1/merge"
func log(template string, messages ...interface{}) {}
func setupGiteaDriver(t *testing.T, token string) (drivers.CodeHostingDriver, func()) {
httpmock.Activate()
driver := drivers.LoadGitea(mockConfig{
remoteOriginURL: "<EMAIL>:git-town/git-town.git",
giteaToken: token,
}, log)
assert.NotNil(t, driver)
return driver, func() {
httpmock.DeactivateAndReset()
}
}
func TestLoadGitea(t *testing.T) {
driver := drivers.LoadGitea(mockConfig{
codeHostingDriverName: "gitea",
remoteOriginURL: "<EMAIL>:git-town/git-town.git",
}, log)
assert.NotNil(t, driver)
assert.Equal(t, "Gitea", driver.HostingServiceName())
assert.Equal(t, "https://self-hosted-gitea.com/git-town/git-town", driver.RepositoryURL())
}
func TestLoadGitea_customHostName(t *testing.T) {
driver := drivers.LoadGitea(mockConfig{
remoteOriginURL: "<EMAIL>:git-town/git-town.git",
configuredHostName: "gitea.com",
}, log)
assert.NotNil(t, driver)
assert.Equal(t, "Gitea", driver.HostingServiceName())
assert.Equal(t, "https://gitea.com/git-town/git-town", driver.RepositoryURL())
}
func TestGiteaDriver_LoadPullRequestInfo(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "TOKEN")
defer teardown()
httpmock.RegisterResponder("GET", giteaCurrOpen, httpmock.NewStringResponder(200, `[{"number": 1, "title": "my title", "mergeable": true, "base": {"label": "main"}, "head": {"label": "git-town/feature"} }]`))
prInfo, err := driver.LoadPullRequestInfo("feature", "main")
assert.NoError(t, err)
assert.True(t, prInfo.CanMergeWithAPI)
assert.Equal(t, "my title (#1)", prInfo.DefaultCommitMessage)
assert.Equal(t, int64(1), prInfo.PullRequestNumber)
}
func TestGiteaDriver_LoadPullRequestInfo_EmptyGiteaToken(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "")
defer teardown()
prInfo, err := driver.LoadPullRequestInfo("feature", "main")
assert.NoError(t, err)
assert.False(t, prInfo.CanMergeWithAPI)
}
func TestGiteaDriver_LoadPullRequestInfo_GetPullRequestNumberFails(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "TOKEN")
defer teardown()
httpmock.RegisterResponder("GET", giteaCurrOpen, httpmock.NewStringResponder(404, ""))
_, err := driver.LoadPullRequestInfo("feature", "main")
assert.Error(t, err)
}
func TestGiteaDriver_LoadPullRequestInfo_NoPullRequestForBranch(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "TOKEN")
defer teardown()
httpmock.RegisterResponder("GET", giteaCurrOpen, httpmock.NewStringResponder(200, "[]"))
prInfo, err := driver.LoadPullRequestInfo("feature", "main")
assert.NoError(t, err)
assert.False(t, prInfo.CanMergeWithAPI)
}
func TestGiteaDriver_LoadPullRequestInfo_MultiplePullRequestsForBranch(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "TOKEN")
defer teardown()
httpmock.RegisterResponder("GET", giteaCurrOpen, httpmock.NewStringResponder(200, `[{"number": 1, "base": {"label": "main"}, "head": {"label": "no-match"} }, {"number": 2, "base": {"label": "main"}, "head": {"label": "no-match2"} }]`))
prInfo, err := driver.LoadPullRequestInfo("feature", "main")
assert.NoError(t, err)
assert.False(t, prInfo.CanMergeWithAPI)
}
func TestGiteaDriver_MergePullRequest_GetPullRequestIdsFails(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
httpmock.RegisterResponder("GET", giteaCurrOpen, httpmock.NewStringResponder(404, ""))
_, err := driver.MergePullRequest(options)
assert.Error(t, err)
}
func TestGiteaDriver_MergePullRequest_GetPullRequestToMergeFails(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
PullRequestNumber: 1,
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
httpmock.RegisterResponder("GET", giteaCurrOpen, httpmock.NewStringResponder(200, "[]"))
httpmock.RegisterResponder("GET", giteaPR1Merge, httpmock.NewStringResponder(404, ""))
_, err := driver.MergePullRequest(options)
assert.Error(t, err)
}
func TestGiteaDriver_MergePullRequest_PullRequestNotFound(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
PullRequestNumber: 1,
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
httpmock.RegisterResponder("GET", giteaCurrOpen, httpmock.NewStringResponder(200, "[]"))
httpmock.RegisterResponder("POST", giteaPR1Merge, func(req *http.Request) (*http.Response, error) {
return httpmock.NewStringResponse(409, `{}`), nil
})
_, err := driver.MergePullRequest(options)
assert.Error(t, err)
}
func TestGiteaDriver_MergePullRequest(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
PullRequestNumber: 1,
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
var mergeRequest *http.Request
httpmock.RegisterResponder("GET", giteaCurrOpen, httpmock.NewStringResponder(200, `[{"number": 1, "base": {"label": "main"}, "head": {"label": "git-town/feature"} }]`))
httpmock.RegisterResponder("GET", giteaVersion, httpmock.NewStringResponder(200, `{"version": "1.11.5"}`))
httpmock.RegisterResponder("POST", giteaPR1Merge, func(req *http.Request) (*http.Response, error) {
mergeRequest = req
return httpmock.NewStringResponse(200, `[]`), nil
})
httpmock.RegisterResponder("GET", giteaPR1, httpmock.NewStringResponder(200, `{"number": 1, "merge_commit_sha": "abc123"}`))
sha, err := driver.MergePullRequest(options)
assert.NoError(t, err)
assert.Equal(t, "abc123", sha)
mergeParameters := getRequestData(mergeRequest)
assert.Equal(t, "title", mergeParameters["MergeTitleField"])
assert.Equal(t, "extra detail1\nextra detail2", mergeParameters["MergeMessageField"])
assert.Equal(t, "squash", mergeParameters["Do"])
}
func TestGiteaDriver_MergePullRequest_MergeFails(t *testing.T) {
driver, teardown := setupGiteaDriver(t, "TOKEN")
defer teardown()
options := drivers.MergePullRequestOptions{
Branch: "feature",
CommitMessage: "title\nextra detail1\nextra detail2",
ParentBranch: "main",
}
httpmock.RegisterResponder("GET", giteaCurrOpen, httpmock.NewStringResponder(200, `[{"number": 1, "base": {"label": "main"}, "head": {"label": "foo"} }]`))
httpmock.RegisterResponder("GET", giteaVersion, httpmock.NewStringResponder(200, `{"version": "1.11.5"}`))
httpmock.RegisterResponder("POST", giteaPR1Merge, httpmock.NewStringResponder(404, ""))
_, err := driver.MergePullRequest(options)
assert.Error(t, err)
}
<|start_filename|>text-run/helpers/get-command.js<|end_filename|>
const path = require("path")
module.exports = function getCommand(filename) {
return path.basename(filename, ".md")
}
<|start_filename|>src/steps/create_tracking_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// CreateTrackingBranchStep pushes the current branch up to origin
// and marks it as tracking the current branch.
type CreateTrackingBranchStep struct {
NoOpStep
BranchName string
}
// CreateUndoStep returns the undo step for this step.
func (step *CreateTrackingBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &DeleteRemoteBranchStep{BranchName: step.BranchName}, nil
}
// Run executes this step.
func (step *CreateTrackingBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.CreateTrackingBranch(step.BranchName)
}
<|start_filename|>src/steps/squash_merge_branch_step.go<|end_filename|>
package steps
import (
"fmt"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
"github.com/git-town/git-town/src/prompt"
)
// SquashMergeBranchStep squash merges the branch with the given name into the current branch.
type SquashMergeBranchStep struct {
NoOpStep
BranchName string
CommitMessage string
}
// CreateAbortStep returns the abort step for this step.
func (step *SquashMergeBranchStep) CreateAbortStep() Step {
return &DiscardOpenChangesStep{}
}
// CreateUndoStep returns the undo step for this step.
func (step *SquashMergeBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
currentSHA, err := repo.Silent.CurrentSha()
if err != nil {
return nil, err
}
return &RevertCommitStep{Sha: currentSHA}, nil
}
// GetAutomaticAbortError returns the error message to display when this step
// cause the command to automatically abort.
func (step *SquashMergeBranchStep) GetAutomaticAbortError() error {
return fmt.Errorf("aborted because commit exited with error")
}
// Run executes this step.
func (step *SquashMergeBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
err := repo.Logging.SquashMerge(step.BranchName)
if err != nil {
return err
}
author, err := prompt.GetSquashCommitAuthor(step.BranchName, repo)
if err != nil {
return fmt.Errorf("error getting squash commit author: %w", err)
}
repoAuthor, err := repo.Silent.Author()
if err != nil {
return fmt.Errorf("cannot determine repo author: %w", err)
}
if err = repo.Silent.CommentOutSquashCommitMessage(""); err != nil {
return fmt.Errorf("cannot comment out the squash commit message: %w", err)
}
switch {
case author != repoAuthor && step.CommitMessage != "":
return repo.Logging.CommitWithMessageAndAuthor(step.CommitMessage, author)
case step.CommitMessage != "":
return repo.Logging.CommitWithMessage(step.CommitMessage)
default:
return repo.Logging.Commit()
}
}
// ShouldAutomaticallyAbortOnError returns whether this step should cause the command to
// automatically abort if it errors.
func (step *SquashMergeBranchStep) ShouldAutomaticallyAbortOnError() bool {
return true
}
<|start_filename|>src/drivers/gitlab_test.go<|end_filename|>
package drivers_test
import (
"testing"
"github.com/git-town/git-town/src/drivers"
"github.com/stretchr/testify/assert"
)
func TestLoadGitLab(t *testing.T) {
driver := drivers.LoadGitlab(mockConfig{
codeHostingDriverName: "gitlab",
remoteOriginURL: "<EMAIL>:git-town/git-town.git",
})
assert.NotNil(t, driver)
assert.Equal(t, "GitLab", driver.HostingServiceName())
assert.Equal(t, "https://self-hosted-gitlab.com/git-town/git-town", driver.RepositoryURL())
}
func TestLoadGitLab_customHostName(t *testing.T) {
driver := drivers.LoadGitlab(mockConfig{
remoteOriginURL: "git@my-ssh-identity.com:git-town/git-town.git",
configuredHostName: "gitlab.com",
})
assert.NotNil(t, driver)
assert.Equal(t, "GitLab", driver.HostingServiceName())
assert.Equal(t, "https://gitlab.com/git-town/git-town", driver.RepositoryURL())
}
<|start_filename|>src/steps/create_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// CreateBranchStep creates a new branch
// but leaves the current branch unchanged.
type CreateBranchStep struct {
NoOpStep
BranchName string
StartingPoint string
}
// CreateUndoStep returns the undo step for this step.
func (step *CreateBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &DeleteLocalBranchStep{BranchName: step.BranchName}, nil
}
// Run executes this step.
func (step *CreateBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.CreateBranch(step.BranchName, step.StartingPoint)
}
<|start_filename|>src/cmd/diff_parent.go<|end_filename|>
package cmd
import (
"fmt"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/git"
"github.com/git-town/git-town/src/prompt"
"github.com/spf13/cobra"
)
type diffParentConfig struct {
branch string
parentBranch string
}
var diffParentCommand = &cobra.Command{
Use: "diff-parent [<branch>]",
Short: "Shows the changes committed to a feature branch",
Long: `Shows the changes committed to a feature branch
Works on either the current branch or the branch name provided.
Exits with error code 1 if the given branch is a perennial branch or the main branch.`,
Run: func(cmd *cobra.Command, args []string) {
config, err := getDiffParentConfig(args, prodRepo)
if err != nil {
cli.Exit(err)
}
err = prodRepo.Logging.DiffParent(config.branch, config.parentBranch)
if err != nil {
cli.Exit(err)
}
},
Args: cobra.MaximumNArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
return validateIsConfigured(prodRepo)
},
}
// Does not return error because "Ensure" functions will call exit directly.
func getDiffParentConfig(args []string, repo *git.ProdRepo) (config diffParentConfig, err error) {
initialBranch, err := repo.Silent.CurrentBranch()
if err != nil {
return config, err
}
if len(args) == 0 {
config.branch = initialBranch
} else {
config.branch = args[0]
}
if initialBranch != config.branch {
hasBranch, err := repo.Silent.HasLocalBranch(config.branch)
if err != nil {
return config, err
}
if !hasBranch {
return config, fmt.Errorf("there is no local branch named %q", config.branch)
}
}
if !prodRepo.Config.IsFeatureBranch(config.branch) {
return config, fmt.Errorf("you can only diff-parent feature branches")
}
err = prompt.EnsureKnowsParentBranches([]string{config.branch}, repo)
if err != nil {
return config, err
}
config.parentBranch = repo.Config.GetParentBranch(config.branch)
return config, nil
}
func init() {
RootCmd.AddCommand(diffParentCommand)
}
<|start_filename|>src/cmd/abort.go<|end_filename|>
package cmd
import (
"fmt"
"github.com/git-town/git-town/src/cli"
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/steps"
"github.com/spf13/cobra"
)
var abortCmd = &cobra.Command{
Use: "abort",
Short: "Aborts the last run git-town command",
Run: func(cmd *cobra.Command, args []string) {
runState, err := steps.LoadPreviousRunState(prodRepo)
if err != nil {
cli.Exit(fmt.Errorf("cannot load previous run state: %w", err))
}
if runState == nil || !runState.IsUnfinished() {
cli.Exit(fmt.Errorf("nothing to abort"))
}
abortRunState := runState.CreateAbortRunState()
err = steps.Run(&abortRunState, prodRepo, drivers.Load(prodRepo.Config, &prodRepo.Silent, cli.PrintDriverAction))
if err != nil {
cli.Exit(err)
}
},
Args: cobra.NoArgs,
PreRunE: func(cmd *cobra.Command, args []string) error {
if err := ValidateIsRepository(prodRepo); err != nil {
return err
}
return validateIsConfigured(prodRepo)
},
}
func init() {
RootCmd.AddCommand(abortCmd)
}
<|start_filename|>src/steps/add_to_perennial_branch.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// AddToPerennialBranches adds the branch with the given name as a perennial branch.
type AddToPerennialBranches struct {
NoOpStep
BranchName string
}
// CreateUndoStep returns the undo step for this step.
func (step *AddToPerennialBranches) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
return &RemoveFromPerennialBranches{BranchName: step.BranchName}, nil
}
// Run executes this step.
func (step *AddToPerennialBranches) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Config.AddToPerennialBranches(step.BranchName)
}
<|start_filename|>src/steps/delete_remote_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// DeleteRemoteBranchStep deletes the current branch from the origin remote.
type DeleteRemoteBranchStep struct {
NoOpStep
BranchName string
IsTracking bool
branchSha string
}
// CreateUndoStep returns the undo step for this step.
func (step *DeleteRemoteBranchStep) CreateUndoStep(repo *git.ProdRepo) (Step, error) {
if step.IsTracking {
return &CreateTrackingBranchStep{BranchName: step.BranchName}, nil
}
return &CreateRemoteBranchStep{BranchName: step.BranchName, Sha: step.branchSha}, nil
}
// Run executes this step.
func (step *DeleteRemoteBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) (err error) {
if !step.IsTracking {
trackingBranchName := repo.Silent.TrackingBranchName(step.BranchName)
step.branchSha, err = repo.Silent.ShaForBranch(trackingBranchName)
if err != nil {
return err
}
}
return repo.Logging.DeleteRemoteBranch(step.BranchName)
}
<|start_filename|>src/steps/create_remote_branch_step.go<|end_filename|>
package steps
import (
"github.com/git-town/git-town/src/drivers"
"github.com/git-town/git-town/src/git"
)
// CreateRemoteBranchStep pushes the current branch up to origin.
type CreateRemoteBranchStep struct {
NoOpStep
BranchName string
Sha string
}
// Run executes this step.
func (step *CreateRemoteBranchStep) Run(repo *git.ProdRepo, driver drivers.CodeHostingDriver) error {
return repo.Logging.CreateRemoteBranch(step.Sha, step.BranchName)
}
| FrazerClews/git-town |
<|start_filename|>resource/config/appfw/appfwsettings.go<|end_filename|>
/*
* Copyright (c) 2021 Citrix Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package appfw
/**
* Configuration for AS settings resource.
*/
type Appfwsettings struct {
/**
* Profile to use when a connection does not match any policy. Default setting is APPFW_BYPASS, which sends unmatched connections back to the Citrix ADC without attempting to filter them further.
*/
Defaultprofile string `json:"defaultprofile,omitempty"`
/**
* Profile to use when an application firewall policy evaluates to undefined (UNDEF).
An UNDEF event indicates an internal error condition. The APPFW_BLOCK built-in profile is the default setting. You can specify a different built-in or user-created profile as the UNDEF profile.
*/
Undefaction string `json:"undefaction,omitempty"`
/**
* Timeout, in seconds, after which a user session is terminated. Before continuing to use the protected web site, the user must establish a new session by opening a designated start URL.
*/
Sessiontimeout int `json:"sessiontimeout,omitempty"`
/**
* Maximum number of connections per second that the application firewall learning engine examines to generate new relaxations for learning-enabled security checks. The application firewall drops any connections above this limit from the list of connections used by the learning engine.
*/
Learnratelimit int `json:"learnratelimit,omitempty"`
/**
* Maximum amount of time (in seconds) that the application firewall allows a user session to remain active, regardless of user activity. After this time, the user session is terminated. Before continuing to use the protected web site, the user must establish a new session by opening a designated start URL.
*/
Sessionlifetime int `json:"sessionlifetime"` // Zero is a valid value
/**
* Name of the session cookie that the application firewall uses to track user sessions.
Must begin with a letter or number, and can consist of from 1 to 31 letters, numbers, and the hyphen (-) and underscore (_) symbols.
The following requirement applies only to the Citrix ADC CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my cookie name" or 'my cookie name').
*/
Sessioncookiename string `json:"sessioncookiename,omitempty"`
/**
* Name of an HTTP header that contains the IP address that the client used to connect to the protected web site or service.
*/
Clientiploggingheader string `json:"clientiploggingheader,omitempty"`
/**
* Cumulative total maximum number of bytes in web forms imported to a protected web site. If a user attempts to upload files with a total byte count higher than the specified limit, the application firewall blocks the request.
*/
Importsizelimit int `json:"importsizelimit,omitempty"`
/**
* Flag used to enable/disable auto update signatures
*/
Signatureautoupdate string `json:"signatureautoupdate,omitempty"`
/**
* URL to download the mapping file from server
*/
Signatureurl string `json:"signatureurl,omitempty"`
/**
* String that is prepended to all encrypted cookie values.
*/
Cookiepostencryptprefix string `json:"cookiepostencryptprefix,omitempty"`
/**
* Log requests that are so malformed that application firewall parsing doesn't occur.
*/
Logmalformedreq string `json:"logmalformedreq,omitempty"`
/**
* Enable Geo-Location Logging in CEF format logs.
*/
Geolocationlogging string `json:"geolocationlogging,omitempty"`
/**
* Enable CEF format logs.
*/
Ceflogging string `json:"ceflogging,omitempty"`
/**
* Transform multibyte (double- or half-width) characters to single width characters.
*/
Entitydecoding string `json:"entitydecoding,omitempty"`
/**
* Use configurable secret key in AppFw operations
*/
Useconfigurablesecretkey string `json:"useconfigurablesecretkey,omitempty"`
/**
* Maximum number of sessions that the application firewall allows to be active, regardless of user activity. After the max_limit reaches, No more user session will be created .
*/
Sessionlimit int `json:"sessionlimit"` // Zero is a valid value
/**
* flag to define action on malformed requests that application firewall cannot parse
*/
Malformedreqaction []string `json:"malformedreqaction,omitempty"`
/**
* Flag used to enable/disable ADM centralized learning
*/
Centralizedlearning string `json:"centralizedlearning,omitempty"`
/**
* Proxy Server IP to get updated signatures from AWS.
*/
Proxyserver string `json:"proxyserver,omitempty"`
/**
* Proxy Server Port to get updated signatures from AWS.
*/
Proxyport int `json:"proxyport,omitempty"`
//------- Read only Parameter ---------;
Learning string `json:"learning,omitempty"`
Builtin string `json:"builtin,omitempty"`
Feature string `json:"feature,omitempty"`
}
| kasturi1204/adc-nitro-go |
<|start_filename|>samples/CommandQuery.Sample.AWSLambda.Tests/FakeLambdaContext.cs<|end_filename|>
using System;
using Amazon.Lambda.Core;
namespace CommandQuery.Sample.AWSLambda.Tests
{
public class FakeLambdaContext : ILambdaContext
{
public string AwsRequestId { get; set; }
public IClientContext ClientContext { get; set; }
public string FunctionName { get; set; }
public string FunctionVersion { get; set; }
public ICognitoIdentity Identity { get; set; }
public string InvokedFunctionArn { get; set; }
public ILambdaLogger Logger { get; set; }
public string LogGroupName { get; set; }
public string LogStreamName { get; set; }
public int MemoryLimitInMB { get; set; }
public TimeSpan RemainingTime { get; set; }
public FakeLambdaContext()
{
Logger = new FakeLambdaLogger();
}
}
public class FakeLambdaLogger : ILambdaLogger
{
public void Log(string message)
{
}
public void LogLine(string message)
{
}
}
}
<|start_filename|>samples/CommandQuery.Sample.AWSLambda.Tests/CommandTests.cs<|end_filename|>
using System.Threading.Tasks;
using Amazon.Lambda.APIGatewayEvents;
using FluentAssertions;
using NUnit.Framework;
namespace CommandQuery.Sample.AWSLambda.Tests
{
public class CommandTests
{
public class when_using_the_real_function
{
[Test]
public async Task should_work()
{
var request = GetRequest("{ 'Value': 'Foo' }");
var context = new FakeLambdaContext();
var result = await new Command().Handle(request.CommandName("FooCommand"), context);
result.Should().NotBeNull();
}
[Test]
public async Task should_handle_errors()
{
var request = GetRequest("{ 'Value': 'Foo' }");
var context = new FakeLambdaContext();
var result = await new Command().Handle(request.CommandName("FailCommand"), context);
result.ShouldBeError("The command type 'FailCommand' could not be found");
}
APIGatewayProxyRequest GetRequest(string content) => new APIGatewayProxyRequest { Body = content };
}
}
}
<|start_filename|>samples/CommandQuery.Sample.Contracts/Commands/FooCommand.cs<|end_filename|>
namespace CommandQuery.Sample.Contracts.Commands
{
public class FooCommand : ICommand
{
public string Value { get; set; }
}
}
<|start_filename|>samples/CommandQuery.Sample.Contracts/Queries/BarQuery.cs<|end_filename|>
namespace CommandQuery.Sample.Contracts.Queries
{
public class BarQuery : IQuery<Bar>
{
public int Id { get; set; }
}
public class Bar
{
public int Id { get; set; }
public string Value { get; set; }
}
}
<|start_filename|>samples/CommandQuery.Sample.AspNet.WebApi.Tests/ShouldExtensions.cs<|end_filename|>
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using System.Web.Http;
using FluentAssertions;
namespace CommandQuery.Sample.AspNet.WebApi.Tests
{
public static class ShouldExtensions
{
public static async Task ShouldBeErrorAsync(this IHttpActionResult result, string message)
{
await (await result.ExecuteAsync(CancellationToken.None)).ShouldBeErrorAsync(message);
}
public static async Task ShouldBeErrorAsync(this HttpResponseMessage result, string message)
{
result.Should().NotBeNull();
result.IsSuccessStatusCode.Should().BeFalse();
var value = await result.Content.ReadAsStringAsync();
value.Should().NotBeNull();
value.Should().Contain(message);
}
}
}
<|start_filename|>samples/CommandQuery.Sample.Handlers/CultureService.cs<|end_filename|>
using System.Globalization;
namespace CommandQuery.Sample.Handlers
{
public interface ICultureService
{
void SetCurrentCulture(string name);
}
public class CultureService : ICultureService
{
public void SetCurrentCulture(string name)
{
var culture = CultureInfo.CreateSpecificCulture(name);
CultureInfo.DefaultThreadCurrentCulture = culture;
CultureInfo.DefaultThreadCurrentUICulture = culture;
}
}
}
<|start_filename|>samples/CommandQuery.Sample.Handlers/DateTimeProxy.cs<|end_filename|>
using System;
namespace CommandQuery.Sample.Handlers
{
public interface IDateTimeProxy
{
DateTime Now { get; }
}
public class DateTimeProxy : IDateTimeProxy
{
public DateTime Now => DateTime.Now;
}
}
<|start_filename|>src/CommandQuery.AspNet.WebApi/CommandQueryDirectRouteProvider.cs<|end_filename|>
using System.Collections.Generic;
using System.Web.Http.Controllers;
using System.Web.Http.Routing;
namespace CommandQuery.AspNet.WebApi
{
/// <summary>
/// Direct route provider for the <see cref="System.Web.Http.HttpConfiguration" /> with attributes inheritance for actions.
/// </summary>
public class CommandQueryDirectRouteProvider : DefaultDirectRouteProvider
{
/// <summary>Gets a set of route factories for the given action descriptor.</summary>
/// <param name="actionDescriptor">The action descriptor.</param>
/// <returns>A set of route factories.</returns>
protected override IReadOnlyList<IDirectRouteFactory> GetActionRouteFactories(HttpActionDescriptor actionDescriptor)
{
return actionDescriptor.GetCustomAttributes<IDirectRouteFactory>(inherit: true);
}
}
}
<|start_filename|>samples/CommandQuery.Sample.AspNet.WebApi.Tests/CommandControllerTests.cs<|end_filename|>
using System.Net.Http;
using System.Threading.Tasks;
using System.Web.Http;
using System.Web.Http.Results;
using CommandQuery.Sample.AspNet.WebApi.Controllers;
using FluentAssertions;
using Newtonsoft.Json.Linq;
using NUnit.Framework;
namespace CommandQuery.Sample.AspNet.WebApi.Tests
{
public class CommandControllerTests
{
public class when_using_the_real_controller
{
[SetUp]
public void SetUp()
{
var configuration = new HttpConfiguration();
WebApiConfig.Register(configuration);
var commandProcessor = configuration.DependencyResolver.GetService(typeof(ICommandProcessor)) as ICommandProcessor;
Subject = new CommandController(commandProcessor, null)
{
Request = new HttpRequestMessage(),
Configuration = configuration
};
}
[Test]
public async Task should_work()
{
var json = JObject.Parse("{ 'Value': 'Foo' }");
var result = await Subject.Handle("FooCommand", json) as OkResult;
result.Should().NotBeNull();
}
[Test]
public async Task should_handle_errors()
{
var json = JObject.Parse("{ 'Value': 'Foo' }");
var result = await Subject.Handle("FailCommand", json);
await result.ShouldBeErrorAsync("The command type 'FailCommand' could not be found");
}
CommandController Subject;
}
}
} | NanoFabricFX/CommandQuery |
<|start_filename|>Sources/WatchCon.h<|end_filename|>
//
// WatchCon.h
// WatchCon
//
// Created by <NAME> on 07/02/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
//! Project version number for WatchCon.
FOUNDATION_EXPORT double WatchConVersionNumber;
//! Project version string for WatchCon.
FOUNDATION_EXPORT const unsigned char WatchConVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <WatchCon/PublicHeader.h>
#import <WatchCon/WatchCon.h>
#import <WatchCon/WatchConSession.h>
<|start_filename|>Sample/WatchOS-Sample/WatchOS-Sample WatchKit Extension/InterfaceController.h<|end_filename|>
//
// InterfaceController.h
// WatchOS-Sample WatchKit Extension
//
// Created by <NAME> on 11/02/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
#import <WatchKit/WatchKit.h>
#import <Foundation/Foundation.h>
#import <WatchCon/WatchCon.h>
@interface InterfaceController : WKInterfaceController<WatchConSessionDelegate>
@end
<|start_filename|>Pods/Target Support Files/Pods-WatchCon-iOS/Pods-WatchCon-iOS-umbrella.h<|end_filename|>
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double Pods_WatchCon_iOSVersionNumber;
FOUNDATION_EXPORT const unsigned char Pods_WatchCon_iOSVersionString[];
<|start_filename|>Sample/WatchOS-Sample/WatchOS-Sample WatchKit Extension/ExtensionDelegate.h<|end_filename|>
//
// ExtensionDelegate.h
// WatchOS-Sample WatchKit Extension
//
// Created by <NAME> on 11/02/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
#import <WatchKit/WatchKit.h>
@interface ExtensionDelegate : NSObject <WKExtensionDelegate>
@end
<|start_filename|>Sources/WatchConSession.h<|end_filename|>
//
// WatchConSession.h
// WatchCon
//
// MIT License
//
// Copyright (c) 2017 <NAME>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#import <Foundation/Foundation.h>
#import <WatchConnectivity/WatchConnectivity.h>
NS_ASSUME_NONNULL_BEGIN
/**
* WatchCon Session Delegate to give info about processes
*/
@protocol WatchConSessionDelegate <NSObject>
@optional
/**
* Called when the session has completed activation.
* With state WCSessionActivationStateNotActivated is succeed
*/
- (void)activationDidCompleteWithState:(WCSessionActivationState)activationState
error:(nullable NSError *)error __IOS_AVAILABLE(9.3) __WATCHOS_AVAILABLE(2.2);
/**
* Called when the session can no longer be used to modify or add any new transfers
*/
- (void)sessionDidBecomeInactive:(WCSession *)session __IOS_AVAILABLE(9.3) __WATCHOS_UNAVAILABLE;
/**
* Called when all delegate callbacks for the previously selected watch has occurred
*/
- (void)sessionDidDeactivate:(WCSession *)session __IOS_AVAILABLE(9.3) __WATCHOS_UNAVAILABLE;
#pragma mark - State Changes
/**
* Called when any of the Watch state properties change
*/
- (void)sessionWatchStateDidChange:(WCSession *)session __WATCHOS_UNAVAILABLE;
/**
* Called when the reachable state of the counterpart app changes
*/
- (void)sessionReachabilityDidChange:(WCSession *)session;
@required
#pragma mark - Background transfers
/**
* Called on the delegate of the receiver. Will be called on startup if an applicationContext is available
*/
- (void)didReceiveApplicationContext:(NSDictionary<NSString *, id> *)applicationContext;
/**
* Will be called in receiver on startup if the user info finished transferring when the receiver was not running
*/
- (void)didReceiveUserInfo:(NSDictionary<NSString *, id> *)userInfo;
/**
* Called on the sending side after the file transfer has successfully completed or failed with an error
*/
- (void)didFinishFileTransfer:(WCSessionFileTransfer *)fileTransfer error:(nullable NSError *)error;
/**
* Will be called on startup if the file finished transferring when the receiver was not running
*/
- (void)didReceiveFile:(WCSessionFile *)file;
@end
/**
* Creates session between iPhone and Apple Watch
*/
@interface WatchConSession : NSObject<WCSessionDelegate>
/**
* Delegate to give info about processes
*/
@property (nonatomic, weak, nullable) id <WatchConSessionDelegate> delegate;
/**
* Singleton instance
*/
+ (instancetype)sharedInstance;
/**
* Activates session
*/
- (void)activate;
/**
* A way to transfer the latest state of an app
*
* @param dictionary Application Context
*/
- (void)updateApplicationContext:(NSDictionary<NSString *, id> *)dictionary;
/**
* Transfers user info
*
* @param dictionary User Info
*/
- (void)transferUserInfo:(NSDictionary<NSString *, id> *)dictionary;
/**
* Transfer file on the given url
*
* @param url File Url
* @param metadataDict The property list types
*
* @return isTransferring
*/
- (BOOL)transferFile:(NSURL *)url metadataDict:(nullable NSDictionary<NSString *, id> *)metadataDict;
/**
* Clients can use this method to send messages to the counterpart app
*
* @param message Dictionary
* @param completionBlock Handler for result or error
*/
- (void)sendMessage:(NSDictionary<NSString *, id> *)message
completionBlock:(void (^)(NSDictionary * _Nullable result, NSError * _Nullable error))completionBlock;
/**
* Clients can use this method to send message data
*
* @param messageData Data
* @param completionBlock Handler for result or error
*/
- (void)sendMessageData:(NSData *)messageData completionBlock:(void (^)(NSData * _Nullable result, NSError * _Nullable error))completionBlock;
@end
NS_ASSUME_NONNULL_END
<|start_filename|>Sample/WatchOS-Sample/WatchOS-Sample/ViewController.h<|end_filename|>
//
// ViewController.h
// WatchOS-Sample
//
// Created by <NAME> on 11/02/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <WatchCon/WatchCon.h>
@interface ViewController : UIViewController<WatchConSessionDelegate>
@end
<|start_filename|>Sample/WatchOS-Sample/WatchOS-Sample WatchKit Extension/NotificationController.h<|end_filename|>
//
// NotificationController.h
// WatchOS-Sample WatchKit Extension
//
// Created by <NAME> on 11/02/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
#import <WatchKit/WatchKit.h>
#import <Foundation/Foundation.h>
@interface NotificationController : WKUserNotificationInterfaceController
@end
| abdullahselek/WatchCon |
<|start_filename|>large-message-connect/src/main/java/com/bakdata/kafka/LargeMessageConverter.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2020 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import java.util.Map;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaAndValue;
import org.apache.kafka.connect.storage.Converter;
/**
* Kafka {@code Converter} that serializes large messages on blob storage.
* <p>
* For configuration options, see {@link LargeMessageConverterConfig}.
*/
public class LargeMessageConverter implements Converter {
private Converter converter;
private LargeMessageStoringClient storingClient;
private LargeMessageRetrievingClient retrievingClient;
private boolean isKey;
@Override
public void configure(final Map<String, ?> configs, final boolean isKey) {
final LargeMessageConverterConfig config = new LargeMessageConverterConfig(configs);
this.storingClient = config.getStorer();
this.retrievingClient = config.getRetriever();
this.isKey = isKey;
this.converter = config.getConverter();
this.converter.configure(configs, isKey);
}
@Override
public byte[] fromConnectData(final String topic, final Schema schema, final Object value) {
final byte[] inner = this.converter.fromConnectData(topic, schema, value);
return this.storingClient.storeBytes(topic, inner, this.isKey);
}
@Override
public SchemaAndValue toConnectData(final String topic, final byte[] value) {
final byte[] inner = this.retrievingClient.retrieveBytes(value);
return this.converter.toConnectData(topic, inner);
}
}
<|start_filename|>large-message-core/src/test/java/com/bakdata/kafka/LargeMessageStoringClientS3IntegrationTest.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2019 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import static com.bakdata.kafka.LargeMessageRetrievingClient.deserializeUri;
import static com.bakdata.kafka.LargeMessageRetrievingClient.getBytes;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
import com.adobe.testing.s3mock.junit5.S3MockExtension;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.amazonaws.util.IOUtils;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.util.Map;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.STRICT_STUBS)
class LargeMessageStoringClientS3IntegrationTest {
@RegisterExtension
static final S3MockExtension S3_MOCK = S3MockExtension.builder().silent()
.withSecureConnection(false).build();
private static final String TOPIC = "output";
private static final Deserializer<String> STRING_DESERIALIZER = Serdes.String().deserializer();
private static final Serializer<String> STRING_SERIALIZER = Serdes.String().serializer();
@Mock
static IdGenerator idGenerator;
private static Map<String, Object> createProperties(final Map<String, Object> properties) {
return ImmutableMap.<String, Object>builder()
.putAll(properties)
.put(AbstractLargeMessageConfig.S3_ENDPOINT_CONFIG, "http://localhost:" + S3_MOCK.getHttpPort())
.put(AbstractLargeMessageConfig.S3_REGION_CONFIG, "us-east-1")
.put(AbstractLargeMessageConfig.S3_ACCESS_KEY_CONFIG, "foo")
.put(AbstractLargeMessageConfig.S3_SECRET_KEY_CONFIG, "bar")
.put(AbstractLargeMessageConfig.S3_ENABLE_PATH_STYLE_ACCESS_CONFIG, true)
.build();
}
private static void expectNonBackedText(final String expected, final byte[] backedText) {
assertThat(STRING_DESERIALIZER.deserialize(null, getBytes(backedText)))
.isInstanceOf(String.class)
.isEqualTo(expected);
}
private static void expectBackedText(final String basePath, final String expected, final byte[] backedText,
final String type) {
final BlobStorageURI uri = deserializeUri(backedText);
expectBackedText(basePath, expected, uri, type);
}
private static void expectBackedText(final String basePath, final String expected, final BlobStorageURI uri,
final String type) {
assertThat(uri).asString().startsWith(basePath + TOPIC + "/" + type + "/");
final byte[] bytes = readBytes(uri);
final String deserialized = STRING_DESERIALIZER.deserialize(null, bytes);
assertThat(deserialized).isEqualTo(expected);
}
private static byte[] readBytes(final BlobStorageURI uri) {
try (final S3Object object = S3_MOCK.createS3Client().getObject(uri.getBucket(), uri.getKey());
final S3ObjectInputStream objectContent = object.getObjectContent()) {
return IOUtils.toByteArray(objectContent);
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
private static LargeMessageStoringClient createStorer(final Map<String, Object> baseProperties) {
final Map<String, Object> properties = createProperties(baseProperties);
final AbstractLargeMessageConfig config = new AbstractLargeMessageConfig(properties);
return config.getStorer();
}
private static byte[] serialize(final String s) {
return STRING_SERIALIZER.serialize(null, s);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldWriteNonBackedText(final boolean isKey) {
final Map<String, Object> properties = ImmutableMap.<String, Object>builder()
.put(AbstractLargeMessageConfig.MAX_BYTE_SIZE_CONFIG, Integer.MAX_VALUE)
.build();
final LargeMessageStoringClient storer = createStorer(properties);
assertThat(storer.storeBytes(null, serialize("foo"), isKey))
.satisfies(backedText -> expectNonBackedText("foo", backedText));
}
@Test
void shouldWriteBackedTextKey() {
final String bucket = "bucket";
final String basePath = "s3://" + bucket + "/base/";
final Map<String, Object> properties = ImmutableMap.<String, Object>builder()
.put(AbstractLargeMessageConfig.MAX_BYTE_SIZE_CONFIG, 0)
.put(AbstractLargeMessageConfig.BASE_PATH_CONFIG, basePath)
.build();
final AmazonS3 s3 = S3_MOCK.createS3Client();
s3.createBucket(bucket);
final LargeMessageStoringClient storer = createStorer(properties);
assertThat(storer.storeBytes(TOPIC, serialize("foo"), true))
.satisfies(backedText -> expectBackedText(basePath, "foo", backedText, "keys"));
s3.deleteBucket(bucket);
}
@Test
void shouldUseConfiguredIdGenerator() {
final String bucket = "bucket";
final String basePath = "s3://" + bucket + "/base/";
final Map<String, Object> properties = ImmutableMap.<String, Object>builder()
.put(AbstractLargeMessageConfig.MAX_BYTE_SIZE_CONFIG, 0)
.put(AbstractLargeMessageConfig.BASE_PATH_CONFIG, basePath)
.put(AbstractLargeMessageConfig.ID_GENERATOR_CONFIG, MockIdGenerator.class)
.build();
final AmazonS3 s3 = S3_MOCK.createS3Client();
s3.createBucket(bucket);
final LargeMessageStoringClient storer = createStorer(properties);
when(idGenerator.generateId("foo".getBytes())).thenReturn("bar");
assertThat(storer.storeBytes(TOPIC, serialize("foo"), true))
.satisfies(backedText -> {
final BlobStorageURI uri = deserializeUri(backedText);
expectBackedText(basePath, "foo", uri, "keys");
assertThat(uri).asString().endsWith("bar");
});
s3.deleteBucket(bucket);
}
public static class MockIdGenerator implements IdGenerator {
@Override
public String generateId(final byte[] bytes) {
return idGenerator.generateId(bytes);
}
}
}
<|start_filename|>large-message-core/src/test/java/com/bakdata/kafka/LargeMessageRetrievingClientTest.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2021 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.mockito.Mockito.when;
import java.io.UncheckedIOException;
import java.util.Collections;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.STRICT_STUBS)
class LargeMessageRetrievingClientTest {
private static final Serializer<String> STRING_SERIALIZER = Serdes.String().serializer();
@Mock
BlobStorageClient client;
private static byte[] createNonBackedText(final String text) {
return LargeMessageStoringClient.serialize(serialize(text));
}
private static byte[] createBackedText(final String bucket, final String key) {
final String uri = "foo://" + bucket + "/" + key;
return LargeMessageStoringClient.serialize(uri);
}
private static byte[] serialize(final String s) {
return STRING_SERIALIZER.serialize(null, s);
}
private LargeMessageRetrievingClient createRetriever() {
return new LargeMessageRetrievingClient(Collections.singletonMap("foo", () -> this.client));
}
@Test
void shouldReadNonBackedText() {
final LargeMessageRetrievingClient retriever = this.createRetriever();
assertThat(retriever.retrieveBytes(createNonBackedText("foo")))
.isEqualTo(serialize("foo"));
}
@Test
void shouldReadNull() {
final LargeMessageRetrievingClient retriever = this.createRetriever();
assertThat(retriever.retrieveBytes(null))
.isNull();
}
@Test
void shouldReadBackedText() {
final String bucket = "bucket";
final String key = "key";
when(this.client.getObject(bucket, key)).thenReturn(serialize("foo"));
final LargeMessageRetrievingClient retriever = this.createRetriever();
assertThat(retriever.retrieveBytes(createBackedText(bucket, key)))
.isEqualTo(serialize("foo"));
}
@Test
void shouldThrowExceptionOnErroneousFlag() {
final LargeMessageRetrievingClient retriever = this.createRetriever();
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> retriever.retrieveBytes(new byte[]{2}))
.withMessage("Message can only be marked as backed or non-backed");
}
@Test
void shouldThrowExceptionError() {
final String bucket = "bucket";
final String key = "key";
when(this.client.getObject(bucket, key)).thenThrow(UncheckedIOException.class);
final LargeMessageRetrievingClient retriever = this.createRetriever();
final byte[] backedText = createBackedText(bucket, key);
assertThatExceptionOfType(UncheckedIOException.class)
.isThrownBy(() -> retriever.retrieveBytes(backedText));
}
}
<|start_filename|>large-message-connect/src/test/java/com/bakdata/kafka/LargeMessageConverterTest.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2020 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import static com.bakdata.kafka.LargeMessageRetrievingClient.deserializeUri;
import static com.bakdata.kafka.LargeMessageRetrievingClient.getBytes;
import static org.assertj.core.api.Assertions.assertThat;
import com.adobe.testing.s3mock.junit5.S3MockExtension;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.google.common.collect.ImmutableMap;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.connect.data.SchemaAndValue;
import org.apache.kafka.connect.storage.Converter;
import org.apache.kafka.connect.storage.StringConverter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
class LargeMessageConverterTest {
@RegisterExtension
static final S3MockExtension S3_MOCK = S3MockExtension.builder().silent()
.withSecureConnection(false).build();
private static final String TOPIC = "topic";
private static final Converter STRING_CONVERTER = new StringConverter();
private static final Serializer<String> STRING_SERIALIZER = new StringSerializer();
private static final Deserializer<String> STRING_DESERIALIZER = new StringDeserializer();
private final AmazonS3 s3Client = S3_MOCK.createS3Client();
private LargeMessageConverter converter = null;
private static byte[] createBackedText(final String bucket, final String key) {
final String uri = "s3://" + bucket + "/" + key;
return LargeMessageStoringClient.serialize(uri);
}
private static byte[] readBytes(final BlobStorageURI uri) {
try (final S3Object object = S3_MOCK.createS3Client().getObject(uri.getBucket(), uri.getKey());
final S3ObjectInputStream objectContent = object.getObjectContent()) {
return IOUtils.toByteArray(objectContent);
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
private static Map<String, String> createProperties(final int maxSize, final String basePath) {
return ImmutableMap.<String, String>builder()
.put(AbstractLargeMessageConfig.S3_ENDPOINT_CONFIG, "http://localhost:" + S3_MOCK.getHttpPort())
.put(AbstractLargeMessageConfig.S3_REGION_CONFIG, "us-east-1")
.put(AbstractLargeMessageConfig.S3_ACCESS_KEY_CONFIG, "foo")
.put(AbstractLargeMessageConfig.S3_SECRET_KEY_CONFIG, "bar")
.put(AbstractLargeMessageConfig.S3_ENABLE_PATH_STYLE_ACCESS_CONFIG, "true")
.put(AbstractLargeMessageConfig.MAX_BYTE_SIZE_CONFIG, Integer.toString(maxSize))
.put(AbstractLargeMessageConfig.BASE_PATH_CONFIG, basePath)
.put(LargeMessageConverterConfig.CONVERTER_CLASS_CONFIG, StringConverter.class.getName())
.build();
}
private static SchemaAndValue toConnectData(final String text) {
return STRING_CONVERTER.toConnectData(null, text.getBytes());
}
private static byte[] createNonBackedText(final String text) {
return LargeMessageStoringClient.serialize(STRING_SERIALIZER.serialize(null, text));
}
private static void expectBackedText(final String basePath, final String expected, final byte[] s3BackedText,
final String type) {
final BlobStorageURI uri = deserializeUri(s3BackedText);
assertThat(uri).asString().startsWith(basePath + TOPIC + "/" + type + "/");
final byte[] bytes = readBytes(uri);
final String deserialized = Serdes.String().deserializer()
.deserialize(null, bytes);
assertThat(deserialized).isEqualTo(expected);
}
private static void expectNonBackedText(final String expected, final byte[] s3BackedText) {
assertThat(STRING_DESERIALIZER.deserialize(null, getBytes(s3BackedText)))
.isInstanceOf(String.class)
.isEqualTo(expected);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldConvertNonBackedToConnectData(final boolean isKey) {
this.initSetup(isKey, 5000, "s3://bucket/base");
final String text = "test";
final SchemaAndValue expected = toConnectData(text);
final SchemaAndValue schemaAndValue = this.converter.toConnectData(TOPIC, createNonBackedText(text));
assertThat(schemaAndValue).isEqualTo(expected);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldConvertNonBackedNullToConnectData(final boolean isKey) {
this.initSetup(isKey, 5000, "s3://bucket/base");
final SchemaAndValue expected = STRING_CONVERTER.toConnectData(null, null);
final SchemaAndValue schemaAndValue = this.converter.toConnectData(TOPIC, null);
assertThat(schemaAndValue).isEqualTo(expected);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldConvertBackedToConnectData(final boolean isKey) {
this.initSetup(isKey, 0, "s3://bucket/base");
final String bucket = "bucket";
final String key = "key";
final String text = "test";
this.s3Client.createBucket("bucket");
final SchemaAndValue expected = toConnectData(text);
this.store(bucket, key, text, TOPIC);
final SchemaAndValue schemaAndValue = this.converter.toConnectData(TOPIC, createBackedText(bucket, key));
assertThat(schemaAndValue).isEqualTo(expected);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldConvertBackedNullToConnectData(final boolean isKey) {
this.initSetup(isKey, 0, "s3://bucket/base");
final SchemaAndValue expected = STRING_CONVERTER.toConnectData(null, null);
final SchemaAndValue schemaAndValue = this.converter.toConnectData(TOPIC, null);
assertThat(schemaAndValue).isEqualTo(expected);
}
@Test
void shouldCreateBackedDataKey() {
final String bucket = "bucket";
final String basePath = "s3://" + bucket + "/base";
this.initSetup(true, 0, basePath);
final String text = "test";
final SchemaAndValue data = toConnectData(text);
this.s3Client.createBucket(bucket);
final byte[] bytes = this.converter.fromConnectData(TOPIC, data.schema(), data.value());
expectBackedText(basePath, text, bytes, "keys");
}
@Test
void shouldCreateBackedDataValue() {
final String bucket = "bucket";
final String basePath = "s3://" + bucket + "/base";
this.initSetup(false, 0, basePath);
final String text = "test";
final SchemaAndValue data = toConnectData(text);
this.s3Client.createBucket(bucket);
final byte[] bytes = this.converter.fromConnectData(TOPIC, data.schema(), data.value());
expectBackedText(basePath, text, bytes, "values");
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldCreateBackedNullData(final boolean isKey) {
this.initSetup(isKey, 0, "s3://bucket/base");
final SchemaAndValue data = STRING_CONVERTER.toConnectData(null, null);
final byte[] bytes = this.converter.fromConnectData(TOPIC, data.schema(), data.value());
assertThat(bytes).isNull();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldCreateNonBackedData(final boolean isKey) {
this.initSetup(isKey, 5000, "s3://bucket/base");
final String text = "test";
final SchemaAndValue data = toConnectData(text);
final byte[] bytes = this.converter.fromConnectData(TOPIC, data.schema(), data.value());
expectNonBackedText(text, bytes);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldCreateNonBackedNullData(final boolean isKey) {
this.initSetup(isKey, 5000, "s3://bucket/base");
final SchemaAndValue data = STRING_CONVERTER.toConnectData(null, null);
final byte[] bytes = this.converter.fromConnectData(TOPIC, data.schema(), data.value());
assertThat(bytes).isNull();
}
private void store(final String bucket, final String key, final String s, final String topic) {
this.s3Client.putObject(bucket, key, new ByteArrayInputStream(STRING_SERIALIZER.serialize(topic, s)),
new ObjectMetadata());
}
private void initSetup(final boolean isKey, final int maxSize, final String basePath) {
final Map<String, String> properties = createProperties(maxSize, basePath);
this.converter = new LargeMessageConverter();
this.converter.configure(properties, isKey);
}
}
<|start_filename|>large-message-core/src/main/java/com/bakdata/kafka/BlobStorageClient.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2021 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
/**
* Interface to access blob storage for getting, putting, and deleting blobs.
*/
public interface BlobStorageClient {
/**
* Delete all objects in a bucket associated with a specified prefix
*
* @param bucket the bucket to delete from
* @param prefix the prefix for which blobs should be deleted
*/
void deleteAllObjects(String bucket, String prefix);
/**
* Store a payload in a bucket
*
* @param bytes the payload
* @param bucket the bucket where the payload should be stored
* @param key the identifier for the payload within the bucket
* @return unique identifier to retrieve the payload
*/
String putObject(byte[] bytes, String bucket, String key);
/**
* Retrieve a payload from a bucket
*
* @param bucket the bucket where the payload is stored
* @param key the identifier for the payload within the bucket
* @return the payload
*/
byte[] getObject(String bucket, String key);
}
<|start_filename|>large-message-core/src/test/java/com/bakdata/kafka/AzureBlobStorageIntegrationTest.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2021 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import com.azure.storage.blob.BlobServiceClient;
import com.azure.storage.blob.BlobServiceClientBuilder;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
@Testcontainers
abstract class AzureBlobStorageIntegrationTest {
@Container
private final GenericContainer<?> azure = new GenericContainer<>("mcr.microsoft.com/azure-storage/azurite")
.withExposedPorts(10000)
.withCommand("azurite-blob", "--blobHost", "0.0.0.0");
String generateConnectionString() {
final int port = this.azure.getMappedPort(10000);
final String host = this.azure.getHost();
return String.format("DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;"
+ "AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq"
+ "/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://%s:%d/devstoreaccount1;",
host, port);
}
BlobServiceClient getBlobServiceClient() {
return new BlobServiceClientBuilder()
.connectionString(this.generateConnectionString())
.buildClient();
}
}
<|start_filename|>large-message-core/src/test/java/com/bakdata/kafka/LargeMessageRetrievingClientAzureIntegrationTest.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2019 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import static com.bakdata.kafka.LargeMessageStoringClient.serialize;
import static org.assertj.core.api.Assertions.assertThat;
import com.azure.core.util.BinaryData;
import com.azure.storage.blob.BlobContainerClient;
import com.google.common.collect.ImmutableMap;
import io.confluent.common.config.ConfigDef;
import java.util.Map;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.junit.jupiter.api.Test;
class LargeMessageRetrievingClientAzureIntegrationTest extends AzureBlobStorageIntegrationTest {
private static final Serializer<String> STRING_SERIALIZER = Serdes.String().serializer();
private static void store(final BlobContainerClient containerClient, final String key, final String s) {
containerClient.getBlobClient(key)
.upload(BinaryData.fromBytes(s.getBytes()));
}
private static byte[] createBackedText(final String bucket, final String key) {
final String uri = "abs://" + bucket + "/" + key;
return serialize(uri);
}
private Map<String, Object> createProperties() {
return ImmutableMap.<String, Object>builder()
.put(AbstractLargeMessageConfig.AZURE_CONNECTION_STRING_CONFIG, this.generateConnectionString())
.build();
}
private LargeMessageRetrievingClient createRetriever() {
final Map<String, Object> properties = this.createProperties();
final ConfigDef configDef = AbstractLargeMessageConfig.baseConfigDef();
final AbstractLargeMessageConfig config = new AbstractLargeMessageConfig(configDef, properties);
return config.getRetriever();
}
@Test
void shouldReadBackedText() {
final String bucket = "bucket";
final BlobContainerClient containerClient = this.getBlobServiceClient().getBlobContainerClient(bucket);
try {
containerClient.create();
final String key = "key";
store(containerClient, key, "foo");
final LargeMessageRetrievingClient retriever = this.createRetriever();
assertThat(retriever.retrieveBytes(createBackedText(bucket, key)))
.isEqualTo(STRING_SERIALIZER.serialize(null, "foo"));
} finally {
containerClient.delete();
}
}
}
<|start_filename|>large-message-core/src/main/java/com/bakdata/kafka/LargeMessageRetrievingClient.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2020 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import static com.bakdata.kafka.LargeMessageStoringClient.CHARSET;
import static com.bakdata.kafka.LargeMessageStoringClient.IS_BACKED;
import static com.bakdata.kafka.LargeMessageStoringClient.IS_NOT_BACKED;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Supplier;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
/**
* Client for retrieving actual bytes of messages stored with {@link LargeMessageStoringClient}.
*/
@Slf4j
@RequiredArgsConstructor
public class LargeMessageRetrievingClient {
private final @NonNull Map<String, Supplier<BlobStorageClient>> clientFactories;
private final @NonNull Map<String, BlobStorageClient> clientCache = new HashMap<>();
static BlobStorageURI deserializeUri(final byte[] data) {
final byte[] uriBytes = getBytes(data);
final String rawUri = new String(uriBytes, CHARSET);
return BlobStorageURI.create(rawUri);
}
static byte[] getBytes(final byte[] data) {
final byte[] bytes = new byte[data.length - 1];
// flag is stored in first byte
System.arraycopy(data, 1, bytes, 0, data.length - 1);
return bytes;
}
/**
* Retrieve a payload that may have been stored on blob storage
*
* @param data payload
* @return actual payload retrieved from blob storage
*/
public byte[] retrieveBytes(final byte[] data) {
if (data == null) {
return null;
}
if (data[0] == IS_NOT_BACKED) {
return getBytes(data);
}
if (data[0] != IS_BACKED) {
throw new IllegalArgumentException("Message can only be marked as backed or non-backed");
}
return this.retrieveBackedBytes(data);
}
private byte[] retrieveBackedBytes(final byte[] data) {
final BlobStorageURI uri = deserializeUri(data);
final BlobStorageClient client = this.getClient(uri);
Objects.requireNonNull(client);
final byte[] bytes = client.getObject(uri.getBucket(), uri.getKey());
log.debug("Extracted large message from blob storage: {}", uri);
return bytes;
}
private BlobStorageClient getClient(final BlobStorageURI uri) {
final String scheme = uri.getScheme();
return this.clientCache.computeIfAbsent(scheme, this::createClient);
}
private BlobStorageClient createClient(final String scheme) {
return Optional.ofNullable(this.clientFactories.get(scheme))
.map(Supplier::get)
.orElseThrow(() -> AbstractLargeMessageConfig.unknownScheme(scheme));
}
}
<|start_filename|>large-message-core/src/test/java/com/bakdata/kafka/AzureBlobStorageClientIntegrationTest.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2021 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import com.azure.core.util.BinaryData;
import com.azure.storage.blob.BlobContainerClient;
import com.azure.storage.blob.BlobServiceClient;
import com.azure.storage.blob.models.BlobStorageException;
import com.azure.storage.blob.models.ListBlobsOptions;
import java.util.stream.Collectors;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.junit.jupiter.api.Test;
class AzureBlobStorageClientIntegrationTest extends AzureBlobStorageIntegrationTest {
private static final Serializer<String> STRING_SERIALIZER = Serdes.String().serializer();
private static void store(final BlobContainerClient bucket, final String key, final String s) {
bucket.getBlobClient(key)
.upload(BinaryData.fromBytes(s.getBytes()));
}
private static byte[] serialize(final String s) {
return STRING_SERIALIZER.serialize(null, s);
}
private static ListBlobsOptions withPrefix(final String prefix) {
return new ListBlobsOptions().setPrefix(prefix);
}
@Test
void shouldReadBackedText() {
final String bucket = "bucket";
final BlobServiceClient blobServiceClient = this.getBlobServiceClient();
final BlobContainerClient containerClient = blobServiceClient.getBlobContainerClient(bucket);
try {
containerClient.create();
final String key = "key";
store(containerClient, key, "foo");
final BlobStorageClient client = new AzureBlobStorageClient(blobServiceClient);
assertThat(client.getObject(bucket, key))
.isEqualTo(serialize("foo"));
} finally {
containerClient.delete();
}
}
@Test
void shouldWriteBackedText() {
final String bucket = "bucket";
final String key = "key";
final BlobServiceClient blobServiceClient = this.getBlobServiceClient();
final BlobContainerClient containerClient = blobServiceClient.getBlobContainerClient(bucket);
try {
containerClient.create();
final BlobStorageClient client = new AzureBlobStorageClient(blobServiceClient);
assertThat(client.putObject(serialize("foo"), bucket, key))
.isEqualTo("abs://" + bucket + "/key");
} finally {
containerClient.delete();
}
}
@Test
void shouldDeleteFiles() {
final String bucket = "bucket";
final BlobServiceClient blobServiceClient = this.getBlobServiceClient();
final BlobContainerClient containerClient = blobServiceClient.getBlobContainerClient(bucket);
containerClient.create();
try {
final BlobStorageClient client = new AzureBlobStorageClient(blobServiceClient);
client.putObject(serialize("foo"), bucket, "base/foo/1");
client.putObject(serialize("foo"), bucket, "base/foo/2");
client.putObject(serialize("foo"), bucket, "base/bar/1");
assertThat(containerClient.listBlobs(withPrefix("base/"), null).stream()
.collect(Collectors.toList())).hasSize(3);
client.deleteAllObjects(bucket, "base/foo/");
assertThat(containerClient.listBlobs(withPrefix("base/"), null).stream()
.collect(Collectors.toList())).hasSize(1);
} finally {
containerClient.delete();
}
}
@Test
void shouldThrowExceptionOnMissingObject() {
final String bucket = "bucket";
final BlobServiceClient blobServiceClient = this.getBlobServiceClient();
final BlobContainerClient containerClient = blobServiceClient.getBlobContainerClient(bucket);
try {
containerClient.create();
final String key = "key";
final BlobStorageClient client = new AzureBlobStorageClient(blobServiceClient);
assertThatExceptionOfType(BlobStorageException.class)
.isThrownBy(() -> client.getObject(bucket, key))
.withMessageContaining("The specified blob does not exist.");
} finally {
containerClient.delete();
}
}
@Test
void shouldThrowExceptionOnMissingBucketForGet() {
final String bucket = "bucket";
final String key = "key";
final BlobStorageClient client = new AzureBlobStorageClient(this.getBlobServiceClient());
assertThatExceptionOfType(BlobStorageException.class)
.isThrownBy(() -> client.getObject(bucket, key))
.withMessageContaining("The specified container does not exist.");
}
@Test
void shouldThrowExceptionOnMissingBucketForPut() {
final String bucket = "bucket";
final String key = "key";
final BlobStorageClient client = new AzureBlobStorageClient(this.getBlobServiceClient());
final byte[] foo = serialize("foo");
assertThatExceptionOfType(BlobStorageException.class)
.isThrownBy(() -> client.putObject(foo, bucket, key))
.withMessageContaining("The specified container does not exist.");
}
}
<|start_filename|>large-message-core/src/main/java/com/bakdata/kafka/LargeMessageStoringClient.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2020 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Objects;
import lombok.Builder;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
/**
* Client for storing large {@code byte[]} on blob storage if the size exceeds a defined limit.
*/
@Slf4j
@Builder
public class LargeMessageStoringClient {
static final byte IS_NOT_BACKED = 0;
static final byte IS_BACKED = 1;
static final Charset CHARSET = StandardCharsets.UTF_8;
private static final String VALUE_PREFIX = "values";
private static final String KEY_PREFIX = "keys";
private final @NonNull BlobStorageClient client;
private final BlobStorageURI basePath;
private final int maxSize;
private final IdGenerator idGenerator;
private static String toString(final String s) {
return s == null ? "" : s;
}
static byte[] serialize(final String uri) {
final byte[] uriBytes = uri.getBytes(CHARSET);
return serialize(uriBytes, IS_BACKED);
}
static byte[] serialize(final byte[] bytes) {
return serialize(bytes, IS_NOT_BACKED);
}
private static byte[] serialize(final byte[] bytes, final byte flag) {
final byte[] fullBytes = new byte[bytes.length + 1];
fullBytes[0] = flag;
System.arraycopy(bytes, 0, fullBytes, 1, bytes.length);
return fullBytes;
}
/**
* Store bytes on blob storage if they exceed the configured maximum size.
*
* @param topic name of the topic the bytes are associated with
* @param bytes payload
* @param isKey whether the bytes represent the key of a message
* @return bytes representing the payload. Can be read using {@link LargeMessageRetrievingClient}
*/
public byte[] storeBytes(final String topic, final byte[] bytes, final boolean isKey) {
if (bytes == null) {
return null;
}
if (this.needsBacking(bytes)) {
final String key = this.createBlobStorageKey(topic, isKey, bytes);
final String uri = this.uploadToBlobStorage(key, bytes);
return serialize(uri);
} else {
return serialize(bytes);
}
}
/**
* Delete all files associated with a topic from blob storage
*
* @param topic name of the topic
*/
public void deleteAllFiles(final String topic) {
Objects.requireNonNull(this.basePath, "Base path must not be null");
final String prefix = this.createTopicPrefix(topic);
final String bucketName = this.basePath.getBucket();
log.info("Deleting blob storage backed files for topic '{}'", topic);
this.client.deleteAllObjects(bucketName, prefix);
log.info("Finished deleting blob storage backed files for topic '{}'", topic);
}
private String createBlobStorageKey(final String topic, final boolean isKey, final byte[] bytes) {
Objects.requireNonNull(this.basePath, "Base path must not be null");
Objects.requireNonNull(this.idGenerator, "Id generator must not be null");
final String prefix = isKey ? KEY_PREFIX : VALUE_PREFIX;
final String id = this.idGenerator.generateId(bytes);
return this.createTopicPrefix(topic) + prefix + "/" + id;
}
private String createTopicPrefix(final String topic) {
Objects.requireNonNull(topic, "Topic must not be null");
return toString(this.basePath.getKey()) + topic + "/";
}
private boolean needsBacking(final byte[] bytes) {
return bytes.length >= this.maxSize;
}
private String uploadToBlobStorage(final String key, final byte[] bytes) {
final String bucket = this.basePath.getBucket();
final String uri = this.client.putObject(bytes, bucket, key);
log.debug("Stored large message on blob storage: {}", uri);
return uri;
}
}
<|start_filename|>large-message-core/src/test/java/com/bakdata/kafka/LargeMessageRetrievingClientS3IntegrationTest.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2019 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import static com.bakdata.kafka.LargeMessageStoringClient.serialize;
import static org.assertj.core.api.Assertions.assertThat;
import com.adobe.testing.s3mock.junit5.S3MockExtension;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.google.common.collect.ImmutableMap;
import io.confluent.common.config.ConfigDef;
import java.io.ByteArrayInputStream;
import java.util.Map;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
class LargeMessageRetrievingClientS3IntegrationTest {
@RegisterExtension
static final S3MockExtension S3_MOCK = S3MockExtension.builder().silent()
.withSecureConnection(false).build();
private static final Serializer<String> STRING_SERIALIZER = Serdes.String().serializer();
private static Map<String, Object> createProperties() {
return ImmutableMap.<String, Object>builder()
.put(AbstractLargeMessageConfig.S3_ENDPOINT_CONFIG, "http://localhost:" + S3_MOCK.getHttpPort())
.put(AbstractLargeMessageConfig.S3_REGION_CONFIG, "us-east-1")
.put(AbstractLargeMessageConfig.S3_ACCESS_KEY_CONFIG, "foo")
.put(AbstractLargeMessageConfig.S3_SECRET_KEY_CONFIG, "bar")
.put(AbstractLargeMessageConfig.S3_ENABLE_PATH_STYLE_ACCESS_CONFIG, true)
.build();
}
private static void store(final String bucket, final String key, final String s) {
S3_MOCK.createS3Client().putObject(bucket, key, new ByteArrayInputStream(s.getBytes()), new ObjectMetadata());
}
private static byte[] createBackedText(final String bucket, final String key) {
final String uri = "s3://" + bucket + "/" + key;
return serialize(uri);
}
private static LargeMessageRetrievingClient createRetriever() {
final Map<String, Object> properties = createProperties();
final ConfigDef configDef = AbstractLargeMessageConfig.baseConfigDef();
final AbstractLargeMessageConfig config = new AbstractLargeMessageConfig(configDef, properties);
return config.getRetriever();
}
@Test
void shouldReadBackedText() {
final String bucket = "bucket";
final AmazonS3 s3 = S3_MOCK.createS3Client();
s3.createBucket(bucket);
final String key = "key";
store(bucket, key, "foo");
final LargeMessageRetrievingClient retriever = createRetriever();
assertThat(retriever.retrieveBytes(createBackedText(bucket, key)))
.isEqualTo(STRING_SERIALIZER.serialize(null, "foo"));
s3.deleteBucket(bucket);
}
}
<|start_filename|>large-message-core/src/test/java/com/bakdata/kafka/LargeMessageStoringClientAzureIntegrationTest.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2019 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import static com.bakdata.kafka.LargeMessageRetrievingClient.deserializeUri;
import static org.assertj.core.api.Assertions.assertThat;
import com.azure.storage.blob.BlobContainerClient;
import com.azure.storage.blob.BlobServiceClient;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.junit.jupiter.api.Test;
class LargeMessageStoringClientAzureIntegrationTest extends AzureBlobStorageIntegrationTest {
private static final String TOPIC = "output";
private static final Deserializer<String> STRING_DESERIALIZER = Serdes.String().deserializer();
private static final Serializer<String> STRING_SERIALIZER = Serdes.String().serializer();
private static byte[] serialize(final String s) {
return STRING_SERIALIZER.serialize(null, s);
}
private Map<String, Object> createProperties(final Map<String, Object> properties) {
return ImmutableMap.<String, Object>builder()
.putAll(properties)
.put(AbstractLargeMessageConfig.AZURE_CONNECTION_STRING_CONFIG, this.generateConnectionString())
.build();
}
private void expectBackedText(final String basePath, final String expected, final byte[] backedText,
final String type) {
final BlobStorageURI uri = deserializeUri(backedText);
this.expectBackedText(basePath, expected, uri, type);
}
private void expectBackedText(final String basePath, final String expected, final BlobStorageURI uri,
final String type) {
assertThat(uri).asString().startsWith(basePath + TOPIC + "/" + type + "/");
final byte[] bytes = this.readBytes(uri);
final String deserialized = STRING_DESERIALIZER.deserialize(null, bytes);
assertThat(deserialized).isEqualTo(expected);
}
private byte[] readBytes(final BlobStorageURI uri) {
return this.getBlobServiceClient().getBlobContainerClient(uri.getBucket())
.getBlobClient(uri.getKey())
.downloadContent()
.toBytes();
}
private LargeMessageStoringClient createStorer(final Map<String, Object> baseProperties) {
final Map<String, Object> properties = this.createProperties(baseProperties);
final AbstractLargeMessageConfig config = new AbstractLargeMessageConfig(properties);
return config.getStorer();
}
@Test
void shouldWriteBackedTextKey() {
final String bucket = "bucket";
final String basePath = "abs://" + bucket + "/base/";
final Map<String, Object> properties = ImmutableMap.<String, Object>builder()
.put(AbstractLargeMessageConfig.MAX_BYTE_SIZE_CONFIG, 0)
.put(AbstractLargeMessageConfig.BASE_PATH_CONFIG, basePath)
.build();
final BlobServiceClient client = this.getBlobServiceClient();
final BlobContainerClient containerClient = client.getBlobContainerClient(bucket);
try {
containerClient.create();
final LargeMessageStoringClient storer = this.createStorer(properties);
assertThat(storer.storeBytes(TOPIC, serialize("foo"), true))
.satisfies(backedText -> this.expectBackedText(basePath, "foo", backedText, "keys"));
} finally {
containerClient.delete();
}
}
}
<|start_filename|>large-message-core/src/test/java/com/bakdata/kafka/LargeMessageStoringClientTest.java<|end_filename|>
/*
* MIT License
*
* Copyright (c) 2021 bakdata
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bakdata.kafka;
import static com.bakdata.kafka.LargeMessageRetrievingClient.getBytes;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatNullPointerException;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableMap;
import java.io.UncheckedIOException;
import java.util.Map;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.STRICT_STUBS)
class LargeMessageStoringClientTest {
private static final String TOPIC = "output";
private static final Deserializer<String> STRING_DESERIALIZER = Serdes.String().deserializer();
private static final Serializer<String> STRING_SERIALIZER = Serdes.String().serializer();
@Mock
IdGenerator idGenerator;
@Mock
BlobStorageClient client;
private static void expectNonBackedText(final String expected, final byte[] backedText) {
assertThat(STRING_DESERIALIZER.deserialize(null, getBytes(backedText)))
.isInstanceOf(String.class)
.isEqualTo(expected);
}
private static byte[] serialize(String s) {
return STRING_SERIALIZER.serialize(null, s);
}
private static LargeMessageStoringClient createStorer(final Map<String, Object> properties) {
final AbstractLargeMessageConfig config = new AbstractLargeMessageConfig(properties);
return config.getStorer();
}
private LargeMessageStoringClient createStorer(final int maxSize) {
return this.createStorer(maxSize, null);
}
private LargeMessageStoringClient createStorer(final int maxSize, final BlobStorageURI basePath) {
return this.createStorer(maxSize, basePath, this.idGenerator);
}
private LargeMessageStoringClient createStorer(final int maxSize, final BlobStorageURI basePath,
final IdGenerator idGenerator) {
return LargeMessageStoringClient.builder()
.client(this.client)
.basePath(basePath)
.maxSize(maxSize)
.idGenerator(idGenerator)
.build();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldWriteNonBackedText(final boolean isKey) {
final LargeMessageStoringClient storer = this.createStorer(Integer.MAX_VALUE);
assertThat(storer.storeBytes(null, serialize("foo"), isKey))
.satisfies(backedText -> expectNonBackedText("foo", backedText));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldWriteNonBackedNull(final boolean isKey) {
final LargeMessageStoringClient storer = this.createStorer(Integer.MAX_VALUE);
assertThat(storer.storeBytes(null, null, isKey))
.isNull();
}
@Test
void shouldWriteBackedTextKey() {
final String bucket = "bucket";
final String basePath = "foo://" + bucket + "/base/";
when(this.idGenerator.generateId(serialize("foo"))).thenReturn("key");
when(this.client.putObject(serialize("foo"), bucket, "base/" + TOPIC + "/keys/key"))
.thenReturn("uri");
final LargeMessageStoringClient storer = this.createStorer(0, BlobStorageURI.create(basePath));
assertThat(storer.storeBytes(TOPIC, serialize("foo"), true))
.isEqualTo(LargeMessageStoringClient.serialize("uri"));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldWriteBackedNull(final boolean isKey) {
final LargeMessageStoringClient storer = this.createStorer(0);
assertThat(storer.storeBytes(null, null, isKey))
.isNull();
}
@Test
void shouldWriteBackedTextValue() {
final String bucket = "bucket";
final String basePath = "foo://" + bucket + "/base/";
when(this.idGenerator.generateId(serialize("foo"))).thenReturn("key");
when(this.client.putObject(serialize("foo"), bucket, "base/" + TOPIC + "/values/key"))
.thenReturn("uri");
final LargeMessageStoringClient storer = this.createStorer(0, BlobStorageURI.create(basePath));
assertThat(storer.storeBytes(TOPIC, serialize("foo"), false))
.isEqualTo(LargeMessageStoringClient.serialize("uri"));
}
@Test
void shouldDeleteFiles() {
final String bucket = "bucket";
final String basePath = "foo://" + bucket + "/base/";
final LargeMessageStoringClient storer = this.createStorer(0, BlobStorageURI.create(basePath));
storer.deleteAllFiles(TOPIC);
verify(this.client).deleteAllObjects(bucket, "base/" + TOPIC + "/");
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldThrowExceptionOnError(final boolean isKey) {
final String bucket = "bucket";
final String basePath = "foo://" + bucket + "/base/";
when(this.idGenerator.generateId(any())).thenReturn("key");
when(this.client.putObject(any(), eq(bucket), any())).thenThrow(UncheckedIOException.class);
final LargeMessageStoringClient storer = this.createStorer(0, BlobStorageURI.create(basePath));
final byte[] foo = serialize("foo");
assertThatExceptionOfType(UncheckedIOException.class)
.isThrownBy(() -> storer.storeBytes(TOPIC, foo, isKey));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldThrowExceptionOnNullTopic(final boolean isKey) {
final String bucket = "bucket";
final String basePath = "foo://" + bucket + "/base/";
final LargeMessageStoringClient storer = this.createStorer(0, BlobStorageURI.create(basePath));
final byte[] foo = serialize("foo");
assertThatNullPointerException()
.isThrownBy(() -> storer.storeBytes(null, foo, isKey))
.withMessage("Topic must not be null");
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldThrowExceptionOnNullBasePath(final boolean isKey) {
final LargeMessageStoringClient storer = this.createStorer(0, null);
final byte[] foo = serialize("foo");
assertThatNullPointerException()
.isThrownBy(() -> storer.storeBytes(TOPIC, foo, isKey))
.withMessage("Base path must not be null");
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldThrowExceptionOnNullIdGenerator(final boolean isKey) {
final String bucket = "bucket";
final String basePath = "foo://" + bucket + "/base/";
final LargeMessageStoringClient storer = this.createStorer(0, BlobStorageURI.create(basePath), null);
final byte[] foo = serialize("foo");
assertThatNullPointerException()
.isThrownBy(() -> storer.storeBytes(TOPIC, foo, isKey))
.withMessage("Id generator must not be null");
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldWriteNonBackedTextWithConfig(final boolean isKey) {
final Map<String, Object> properties = ImmutableMap.<String, Object>builder()
.put(AbstractLargeMessageConfig.MAX_BYTE_SIZE_CONFIG, Integer.MAX_VALUE)
.build();
final LargeMessageStoringClient storer = createStorer(properties);
assertThat(storer.storeBytes(null, serialize("foo"), isKey))
.satisfies(backedText -> expectNonBackedText("foo", backedText));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void shouldWriteBackedTextWithConfig(final boolean isKey) {
final Map<String, Object> properties = ImmutableMap.<String, Object>builder()
.put(AbstractLargeMessageConfig.MAX_BYTE_SIZE_CONFIG, 0)
.build();
final LargeMessageStoringClient storer = createStorer(properties);
final byte[] foo = serialize("foo");
assertThatNullPointerException()
.isThrownBy(() -> storer.storeBytes(TOPIC, foo, isKey))
.withMessage("Base path must not be null");
}
} | jclarysse/kafka-large-message-serde |
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Transforms/GeneralTransform.cs<|end_filename|>
using System;
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
public class GeneralTransform
{
protected GeneralTransform()
{
}
//public Point TransformPoint(Point point)
//{
// throw new NotImplementedException();
//}
//public bool TryTransform(Point inPoint, out Point outPoint)
//{
// throw new NotImplementedException();
//}
//public Rect TransformBounds(Rect rect)
//{
// throw new NotImplementedException();
//}
//protected virtual bool TryTransformCore(Point inPoint, out Point outPoint)
//{
// throw new NotImplementedException();
//}
//protected virtual Rect TransformBoundsCore(Rect rect)
//{
// throw new NotImplementedException();
//}
//public GeneralTransform Inverse { get; }
//protected virtual GeneralTransform InverseCore { get; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Shapes/Shape.cs<|end_filename|>
using Xamarin.Forms;
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
public class Shape : GraphicsElement
{
public static readonly BindableProperty FillProperty = BindableProperty.Create(
nameof(Fill), typeof(Brush), typeof(Shape), null, propertyChanged: OnFillChanged);
public static readonly BindableProperty StrokeProperty = BindableProperty.Create(
nameof(Stroke), typeof(Brush), typeof(Shape), null, propertyChanged: OnStrokeChanged);
public static readonly BindableProperty StrokeThicknessProperty = BindableProperty.Create(
nameof(StrokeThickness), typeof(double), typeof(Shape), 1.0, propertyChanged: OnStrokeChanged);
private SKPaint fillPaint;
private SKPaint strokePaint;
protected Shape()
{
}
//public DoubleCollection StrokeDashArray { get; set; } = ?;
//public PenLineCap StrokeDashCap { get; set; } = ?;
//public double StrokeDashOffset { get; set; } = ?;
//public PenLineCap StrokeStartLineCap { get; set; } = ?;
//public PenLineCap StrokeEndLineCap { get; set; } = ?;
//public double StrokeMiterLimit { get; set; } = ?;
//public PenLineJoin StrokeLineJoin { get; set; } = ?;
//public Transform GeometryTransform { get; } = ?;
public Brush Fill
{
get { return (Brush)GetValue(FillProperty); }
set { SetValue(FillProperty, value); }
}
public Brush Stroke
{
get { return (Brush)GetValue(StrokeProperty); }
set { SetValue(StrokeProperty, value); }
}
public double StrokeThickness
{
get { return (double)GetValue(StrokeThicknessProperty); }
set { SetValue(StrokeThicknessProperty, value); }
}
public virtual SKPath GetPath()
{
return null;
}
public virtual SKPaint GetFillPaint(SKRect bounds)
{
if (fillPaint != null)
{
return fillPaint;
}
if (Fill == null)
{
return null;
}
fillPaint = Fill.GetPaint(bounds).Clone();
fillPaint.Style = SKPaintStyle.Fill;
return fillPaint;
}
public virtual SKPaint GetStrokePaint(SKRect bounds)
{
if (strokePaint != null)
{
return strokePaint;
}
if (Stroke == null)
{
return null;
}
strokePaint = Stroke.GetPaint(bounds).Clone();
strokePaint.Style = SKPaintStyle.Stroke;
strokePaint.StrokeWidth = (float)StrokeThickness;
return strokePaint;
}
protected override void OnPaint(SKCanvas canvas)
{
base.OnPaint(canvas);
var path = GetPath();
if (path != null)
{
var bounds = path.Bounds;
var fill = GetFillPaint(bounds);
if (fill != null)
{
canvas.DrawPath(path, fill);
}
var stroke = GetStrokePaint(bounds);
if (stroke != null)
{
canvas.DrawPath(path, stroke);
}
}
}
private static void OnFillChanged(BindableObject bindable, object oldValue, object newValue)
{
if (bindable is Shape shape)
{
shape.fillPaint?.Dispose();
shape.fillPaint = null;
}
OnGraphicsChanged(bindable, oldValue, newValue);
}
private static void OnStrokeChanged(BindableObject bindable, object oldValue, object newValue)
{
if (bindable is Shape shape)
{
shape.strokePaint?.Dispose();
shape.strokePaint = null;
}
OnGraphicsChanged(bindable, oldValue, newValue);
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Transforms/ScaleTransform.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public class ScaleTransform : Transform
{
//public double ScaleY { get; set; }
//public double ScaleX { get; set; }
//public double CenterY { get; set; }
//public double CenterX { get; set; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Brushes/LinearGradientBrush.cs<|end_filename|>
using SkiaSharp;
using SkiaSharp.Views.Forms;
using System.Linq;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
public class LinearGradientBrush : GradientBrush
{
private SKShader shader;
public LinearGradientBrush()
{
}
public LinearGradientBrush(Color startColor, Color endColor, double angle)
{
EndPoint = EndPointFromAngle(angle);
GradientStops.Add(new GradientStop(startColor, 0.0f));
GradientStops.Add(new GradientStop(endColor, 1.0f));
}
public LinearGradientBrush(Color startColor, Color endColor, Point startPoint, Point endPoint)
{
StartPoint = startPoint;
EndPoint = endPoint;
GradientStops.Add(new GradientStop(startColor, 0.0f));
GradientStops.Add(new GradientStop(endColor, 1.0f));
}
public LinearGradientBrush(GradientStopCollection gradientStopCollection)
: base(gradientStopCollection)
{
}
public LinearGradientBrush(GradientStopCollection gradientStopCollection, double angle)
: base(gradientStopCollection)
{
EndPoint = EndPointFromAngle(angle);
}
public LinearGradientBrush(GradientStopCollection gradientStopCollection, Point startPoint, Point endPoint)
: base(gradientStopCollection)
{
StartPoint = startPoint;
EndPoint = endPoint;
}
public Point StartPoint { get; set; }
public Point EndPoint { get; set; }
protected override SKShader GetShader(SKRect bounds)
{
if (shader != null)
{
return shader;
}
var mode = GetShaderTileMode();
var start = GetRelative(StartPoint.ToSKPoint(), bounds);
var end = GetRelative(EndPoint.ToSKPoint(), bounds);
var colors = GradientStops.Select(s => s.Color.ToSKColor()).ToArray();
var positions = GradientStops.Select(s => (float)s.Offset).ToArray();
shader = SKShader.CreateLinearGradient(start, end, colors, positions, mode);
return shader;
}
private Point EndPointFromAngle(double angle)
{
// Convert the angle from degrees to radians
angle = angle * (1.0 / 180.0) * System.Math.PI;
return new Point(System.Math.Cos(angle), System.Math.Sin(angle));
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/GradientStop.cs<|end_filename|>
using SkiaSharp;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
public class GradientStop
{
public GradientStop()
{
}
public GradientStop(Color color, double offset)
{
Color = color;
Offset = offset;
}
public Color Color { get; set; } = Color.Transparent;
public double Offset { get; set; } = 0.0f;
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/PenLineCap.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public enum PenLineCap
{
Flat = 0,
Square = 1,
Round = 2,
Triangle = 3
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Transforms/SkewTransform.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public class SkewTransform : Transform
{
//public double CenterY { get; set; }
//public double CenterX { get; set; }
//public double AngleY { get; set; }
//public double AngleX { get; set; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Brushes/TileBrush.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public abstract class TileBrush : Brush
{
protected TileBrush()
{
}
//public Stretch Stretch { get; set; }
//public AlignmentY AlignmentY { get; set; }
//public AlignmentX AlignmentX { get; set; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/PointCollectionConverter.cs<|end_filename|>
using System;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
public class PointCollectionConverter : TypeConverter
{
public override object ConvertFromInvariantString(string value)
{
if (!string.IsNullOrWhiteSpace(value))
{
if (PointCollection.TryParse(value, out PointCollection points))
{
return points;
}
}
throw new InvalidOperationException($"Cannot convert \"{value}\" into {typeof(PointCollection)}.");
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/PointCollection.cs<|end_filename|>
using SkiaSharp;
using SkiaSharp.Views.Forms;
using System;
using System.Collections.Generic;
using System.Linq;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
[TypeConverter(typeof(PointCollectionConverter))]
public class PointCollection : List<Point>
{
public PointCollection()
{
}
public PointCollection(IEnumerable<Point> collection)
: base(collection)
{
}
public PointCollection(int capacity)
: base(capacity)
{
}
public IEnumerable<SKPoint> AsSKPointCollection()
=> this.Select(p => p.ToSKPoint());
public static bool TryParse(string value, out PointCollection pointCollection)
{
if (string.IsNullOrWhiteSpace(value))
{
pointCollection = null;
return false;
}
var collection = new PointCollection();
var points = value.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
foreach (var point in points)
{
var numbers = point.Split(new[] { ',' });
if (numbers.Length != 2)
{
pointCollection = null;
return false;
}
if (!double.TryParse(numbers[0], out double x) || !double.TryParse(numbers[1], out double y))
{
pointCollection = null;
return false;
}
collection.Add(new Point(x, y));
}
pointCollection = collection;
return true;
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Transforms/RotateTransform.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public sealed class RotateTransform : Transform
{
//public double CenterY { get; set; }
//public double CenterX { get; set; }
//public double Angle { get; set; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Transform.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public class Transform : GeneralTransform
{
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/MainPage.xaml.cs<|end_filename|>
using SkiaSharpDemo.Graphics;
using System;
using Xamarin.Forms;
namespace SkiaSharpDemo
{
public partial class MainPage : ContentPage
{
public MainPage()
{
InitializeComponent();
}
private void OnClick(object sender, EventArgs e)
{
//var s = gradientRect.Left;
//this.Animate("test", value =>
//{
// pinkLine.StrokeThickness = value * 20;
// gradientRect.Left = s + (value * 200);
//}, length: 1000, easing: Easing.CubicInOut);
//innerEllipse.Width = 200;
if (pinkLine.Stroke is SolidColorBrush pinkBrush)
{
pinkBrush.Color = Color.Maroon;
}
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Brushes/Brush.cs<|end_filename|>
using System;
using SkiaSharp;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
[TypeConverter(typeof(BrushConverter))]
public class Brush
{
private SKPaint paint;
protected Brush()
{
}
//public Transform Transform { get; set; } = ?;
//public Transform RelativeTransform { get; set; } = ?;
//public double Opacity { get; set; } = ?;
public virtual SKPaint GetPaint(SKRect bounds)
{
if (paint != null)
{
return paint;
}
paint = new SKPaint
{
IsAntialias = true,
FilterQuality = SKFilterQuality.High,
Color = SKColors.Transparent,
Style = SKPaintStyle.Fill
};
return paint;
}
public static bool TryParse(string value, out Brush brush)
{
try
{
var colorConverter = new ColorTypeConverter();
var color = (Color)colorConverter.ConvertFromInvariantString(value);
brush = new SolidColorBrush(color);
return true;
}
catch
{
brush = null;
return false;
}
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Transforms/MatrixTransform.cs<|end_filename|>
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
public class MatrixTransform : Transform
{
//public Matrix Matrix { get; set; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/GraphicsCanvasRenderer.cs<|end_filename|>
using System;
namespace SkiaSharpDemo.Graphics
{
public class GraphicsCanvasRenderer : IGraphicsCanvasRenderer
{
private readonly GraphicsElementCollection children;
private readonly Action onInvalidateSurface;
private int renderSuspendCount;
private bool renderPending;
public GraphicsCanvasRenderer(IGraphicsElementContainer container, Action onInvalidate)
{
children = new GraphicsElementCollection(container);
onInvalidateSurface = onInvalidate;
renderSuspendCount = 0;
renderPending = false;
}
public GraphicsElementCollection Children => children;
public void SuspendRender()
{
renderSuspendCount++;
}
public void ResumeRender(bool performRender = false)
{
if (renderSuspendCount > 0)
{
renderSuspendCount--;
}
if (renderSuspendCount == 0 && renderPending && performRender)
{
Invalidate();
}
}
public void Invalidate()
{
if (renderSuspendCount == 0)
{
onInvalidateSurface();
renderPending = false;
}
else
{
renderPending = true;
}
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Transforms/CompositeTransform.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public class CompositeTransform : Transform
{
//public double TranslateY { get; set; }
//public double TranslateX { get; set; }
//public double SkewY { get; set; }
//public double SkewX { get; set; }
//public double ScaleY { get; set; }
//public double ScaleX { get; set; }
//public double Rotation { get; set; }
//public double CenterY { get; set; }
//public double CenterX { get; set; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Shapes/Polygon.cs<|end_filename|>
using System.Linq;
using Xamarin.Forms;
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
public class Polygon : Shape
{
public static readonly BindableProperty PointsProperty = BindableProperty.Create(
nameof(Points), typeof(PointCollection), typeof(Polygon), new PointCollection(), propertyChanged: OnPathChanged);
public static readonly BindableProperty FillRuleProperty = BindableProperty.Create(
nameof(FillRule), typeof(FillRule), typeof(Polygon), FillRule.EvenOdd, propertyChanged: OnPathChanged);
private SKPath path;
public PointCollection Points
{
get { return (PointCollection)GetValue(PointsProperty); }
set { SetValue(PointsProperty, value); }
}
public FillRule FillRule
{
get { return (FillRule)GetValue(FillRuleProperty); }
set { SetValue(FillRuleProperty, value); }
}
public override SKPath GetPath()
{
if (path != null)
{
return path;
}
if (Points == null || Points.Count == 0)
{
return null;
}
var points = Points.AsSKPointCollection();
path = new SKPath();
path.MoveTo(points.First());
foreach (var point in points.Skip(1))
{
path.LineTo(point);
}
path.Close();
return path;
}
private static void OnPathChanged(BindableObject bindable, object oldValue, object newValue)
{
if (bindable is Polygon polygon)
{
polygon.path?.Dispose();
polygon.path = null;
}
OnGraphicsChanged(bindable, oldValue, newValue);
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Brushes/SolidColorBrush.cs<|end_filename|>
using SkiaSharp;
using SkiaSharp.Views.Forms;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
public class SolidColorBrush : Brush
{
public SolidColorBrush()
{
}
public SolidColorBrush(Color color)
{
Color = color;
}
public Color Color { get; set; } = Color.Transparent;
public override SKPaint GetPaint(SKRect bounds)
{
var paint = base.GetPaint(bounds);
paint.Color = Color.ToSKColor();
return paint;
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Brushes/BrushMappingMode.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public enum BrushMappingMode
{
Absolute = 0,
RelativeToBoundingBox = 1
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/GraphicsElementCollection.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
public class GraphicsElementCollection : IList<GraphicsElement>, IEnumerable<GraphicsElement>
{
private readonly List<GraphicsElement> items;
private readonly IGraphicsElementContainer container;
public GraphicsElementCollection(IGraphicsElementContainer elementContainer)
{
items = new List<GraphicsElement>();
container = elementContainer;
}
public int Count => items.Count;
public bool IsReadOnly => false;
public bool Contains(GraphicsElement item) => items.Contains(item);
public void CopyTo(GraphicsElement[] array, int arrayIndex) => items.CopyTo(array, arrayIndex);
public IEnumerator<GraphicsElement> GetEnumerator() => items.GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => items.GetEnumerator();
public int IndexOf(GraphicsElement item) => items.IndexOf(item);
public GraphicsElement this[int index]
{
get => items[index];
set
{
OnChildRemoved(items[index]);
items[index] = value;
OnChildAdded(value);
}
}
public void Add(GraphicsElement item)
{
items.Add(item);
OnChildAdded(item);
}
public void Clear()
{
var temp = items.ToArray();
foreach (var t in temp)
{
OnChildRemoved(t);
}
items.Clear();
}
public void Insert(int index, GraphicsElement item)
{
items.Insert(index, item);
OnChildAdded(item);
}
public bool Remove(GraphicsElement item)
{
var result = items.Remove(item);
if (result)
{
OnChildRemoved(item);
}
return result;
}
public void RemoveAt(int index)
{
OnChildRemoved(items[index]);
items.RemoveAt(index);
}
private void OnChildAdded(GraphicsElement element)
{
if (element != null)
{
if (element.Parent is IGraphicsElementContainer oldContainer)
{
oldContainer.Children.Remove(element);
}
if (container is Element containerElement)
{
element.Parent = containerElement;
}
}
}
private void OnChildRemoved(GraphicsElement element)
{
if (element != null)
{
element.Parent = null;
}
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/PenLineJoin.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public enum PenLineJoin
{
Miter = 0,
Bevel = 1,
Round = 2
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Brushes/GradientBrush.cs<|end_filename|>
using System;
using SkiaSharp;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
[ContentProperty(nameof(GradientStops))]
public class GradientBrush : Brush
{
protected GradientBrush()
{
}
protected GradientBrush(GradientStopCollection gradientStopCollection)
{
GradientStops = gradientStopCollection;
}
public GradientSpreadMethod SpreadMethod { get; set; } = GradientSpreadMethod.Pad;
public BrushMappingMode MappingMode { get; set; } = BrushMappingMode.RelativeToBoundingBox;
public GradientStopCollection GradientStops { get; set; } = new GradientStopCollection();
public override SKPaint GetPaint(SKRect bounds)
{
var paint = base.GetPaint(bounds);
paint.Color = SKColors.Black;
paint.Shader = GetShader(bounds);
return paint;
}
protected virtual SKShader GetShader(SKRect bounds)
{
return null;
}
protected SKShaderTileMode GetShaderTileMode()
{
var mode = SKShaderTileMode.Clamp;
switch (SpreadMethod)
{
case GradientSpreadMethod.Pad:
mode = SKShaderTileMode.Clamp;
break;
case GradientSpreadMethod.Reflect:
mode = SKShaderTileMode.Mirror;
break;
case GradientSpreadMethod.Repeat:
mode = SKShaderTileMode.Repeat;
break;
}
return mode;
}
protected SKPoint GetRelative(SKPoint point, SKRect bounds)
{
if (MappingMode == BrushMappingMode.RelativeToBoundingBox)
{
point = new SKPoint(point.X * bounds.Width, point.Y * bounds.Height);
}
return point;
}
protected double GetRelative(double value, double size)
{
if (MappingMode == BrushMappingMode.RelativeToBoundingBox)
{
value = value * size;
}
return value;
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Shapes/Rectangle.cs<|end_filename|>
using System.Runtime.CompilerServices;
using Xamarin.Forms;
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
public class Rectangle : Shape
{
public static readonly BindableProperty RadiusXProperty = BindableProperty.Create(
nameof(RadiusX), typeof(double), typeof(Rectangle), 0.0, propertyChanged: OnRadiusChanged);
public static readonly BindableProperty RadiusYProperty = BindableProperty.Create(
nameof(RadiusY), typeof(double), typeof(Rectangle), 0.0, propertyChanged: OnRadiusChanged);
private SKPath path;
public double RadiusX
{
get { return (double)GetValue(RadiusXProperty); }
set { SetValue(RadiusXProperty, value); }
}
public double RadiusY
{
get { return (double)GetValue(RadiusYProperty); }
set { SetValue(RadiusYProperty, value); }
}
public override SKPath GetPath()
{
if (path != null)
{
return path;
}
path = new SKPath();
var rect = SKRect.Create(0, 0, (float)Width, (float)Height);
if (RadiusX > 0 || RadiusY > 0)
{
path.AddRoundedRect(rect, (float)RadiusX, (float)RadiusY);
}
else
{
path.AddRect(rect);
}
return path;
}
protected override void OnPropertyChanged([CallerMemberName] string propertyName = null)
{
if (propertyName == WidthProperty.PropertyName || propertyName == HeightProperty.PropertyName)
{
path?.Dispose();
path = null;
}
base.OnPropertyChanged(propertyName);
}
private static void OnRadiusChanged(BindableObject bindable, object oldValue, object newValue)
{
if (bindable is Rectangle rect)
{
rect.path?.Dispose();
rect.path = null;
}
OnGraphicsChanged(bindable, oldValue, newValue);
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/GradientStopCollection.cs<|end_filename|>
using System.Collections.Generic;
namespace SkiaSharpDemo.Graphics
{
public class GradientStopCollection : List<GradientStop>
{
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/GraphicsElement.cs<|end_filename|>
using Xamarin.Forms;
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
[ContentProperty("Children")]
public class GraphicsElement : Element, IGraphicsElementContainer
{
public static readonly BindableProperty IsVisibileProperty = BindableProperty.Create(
nameof(IsVisibile), typeof(bool), typeof(GraphicsElement), true, propertyChanged: OnGraphicsChanged);
public static readonly BindableProperty LeftProperty = BindableProperty.Create(
nameof(Left), typeof(double), typeof(GraphicsElement), 0.0, propertyChanged: OnGraphicsChanged);
public static readonly BindableProperty TopProperty = BindableProperty.Create(
nameof(Top), typeof(double), typeof(GraphicsElement), 0.0, propertyChanged: OnGraphicsChanged);
public static readonly BindableProperty WidthProperty = BindableProperty.Create(
nameof(Width), typeof(double), typeof(GraphicsElement), 0.0, propertyChanged: OnGraphicsChanged);
public static readonly BindableProperty HeightProperty = BindableProperty.Create(
nameof(Height), typeof(double), typeof(GraphicsElement), 0.0, propertyChanged: OnGraphicsChanged);
public static readonly BindableProperty ClipToBoundsProperty = BindableProperty.Create(
nameof(ClipToBounds), typeof(bool), typeof(GraphicsElement), false, propertyChanged: OnGraphicsChanged);
private readonly GraphicsElementCollection children;
public GraphicsElement()
{
children = new GraphicsElementCollection(this);
}
public GraphicsElementCollection Children => children;
//public double Opacity { get; set; } = 1.0f;
//public Rect Clip { get; set; } = ?;
public bool IsVisibile
{
get { return (bool)GetValue(IsVisibileProperty); }
set { SetValue(IsVisibileProperty, value); }
}
public double Left
{
get { return (double)GetValue(LeftProperty); }
set { SetValue(LeftProperty, value); }
}
public double Top
{
get { return (double)GetValue(TopProperty); }
set { SetValue(TopProperty, value); }
}
public double Width
{
get { return (double)GetValue(WidthProperty); }
set { SetValue(WidthProperty, value); }
}
public double Height
{
get { return (double)GetValue(HeightProperty); }
set { SetValue(HeightProperty, value); }
}
public bool ClipToBounds
{
get { return (bool)GetValue(ClipToBoundsProperty); }
set { SetValue(ClipToBoundsProperty, value); }
}
public void Paint(SKCanvas canvas)
{
using (new SKAutoCanvasRestore(canvas, true))
{
var bounds = SKRect.Create((float)Left, (float)Top, (float)Width, (float)Height);
if (ClipToBounds)
{
canvas.ClipRect(bounds, SKClipOperation.Intersect, true);
}
canvas.Translate((float)Left, (float)Top);
OnPaint(canvas);
foreach (var child in children)
{
child.Paint(canvas);
}
}
}
protected virtual void OnPaint(SKCanvas canvas)
{
}
protected IGraphicsCanvasRenderer GetGraphicsCanvasRenderer()
{
var parent = Parent;
while (parent != null)
{
if (parent is IGraphicsCanvasRenderer renderer)
return renderer;
parent = parent.Parent;
}
return null;
}
protected static void InvalidateGraphicsCanvas(GraphicsElement element)
{
element.GetGraphicsCanvasRenderer()?.Invalidate();
}
protected static void OnGraphicsChanged(BindableObject bindable, object oldValue, object newValue)
{
if (bindable is GraphicsElement element)
{
InvalidateGraphicsCanvas(element);
}
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Shapes/Path.cs<|end_filename|>
using System;
using Xamarin.Forms;
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
public class Path : Shape
{
public static readonly BindableProperty DataProperty = BindableProperty.Create(
nameof(Data), typeof(string), typeof(Path), (string)null, propertyChanged: OnDataChanged);
private SKPath path;
public string Data
{
get { return (string)GetValue(DataProperty); }
set { SetValue(DataProperty, value); }
}
public override SKPath GetPath()
{
if (path != null)
{
return path;
}
if (string.IsNullOrWhiteSpace(Data))
{
return null;
}
var fillRule = SKPathFillType.EvenOdd;
var index = 0;
// skip any leading space
while ((index < Data.Length) && char.IsWhiteSpace(Data, index))
{
index++;
}
// is there anything to look at?
if (index < Data.Length)
{
// if so, we only care if the first non-WhiteSpace char encountered is 'F'
if (Data[index] == 'F')
{
index++;
// since we found 'F' the next non-WhiteSpace char must be 0 or 1 - look for it.
while ((index < Data.Length) && char.IsWhiteSpace(Data, index))
{
index++;
}
// if we ran out of text, this is an error, because 'F' cannot be specified without 0 or 1
// also, if the next token isn't 0 or 1, this too is illegal
if ((index == Data.Length) || ((Data[index] != '0') && (Data[index] != '1')))
{
throw new FormatException("An illegal character was encountered while parsing the path data.");
}
fillRule = Data[index] == '0' ? SKPathFillType.EvenOdd : SKPathFillType.Winding;
// increment index to point to the next char
index++;
}
}
path = SKPath.ParseSvgPathData(Data.Substring(index));
path.FillType = fillRule;
return path;
}
private static void OnDataChanged(BindableObject bindable, object oldValue, object newValue)
{
if (bindable is Path pathShape)
{
pathShape.path?.Dispose();
pathShape.path = null;
}
OnGraphicsChanged(bindable, oldValue, newValue);
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/GraphicsCanvas.cs<|end_filename|>
using SkiaSharp;
using SkiaSharp.Views.Forms;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
[ContentProperty("Children")]
public class GraphicsCanvas : SKCanvasView, IGraphicsElementContainer, IGraphicsCanvasRenderer
{
private readonly GraphicsCanvasRenderer renderer;
public GraphicsCanvas()
{
renderer = new GraphicsCanvasRenderer(this, InvalidateSurface);
}
public GraphicsElementCollection Children => renderer.Children;
void IGraphicsCanvasRenderer.Invalidate() => renderer.Invalidate();
public void SuspendRender() => renderer.SuspendRender();
public void ResumeRender(bool performRender = false) => renderer.ResumeRender(performRender);
protected override void OnPaintSurface(SKPaintSurfaceEventArgs e)
{
base.OnPaintSurface(e);
var canvas = e.Surface.Canvas;
canvas.Clear(SKColors.Transparent);
// apply scaling
var scale = e.Info.Width / Width;
canvas.Scale((float)scale);
foreach (var child in Children)
{
if (child.IsVisibile)
{
child.Paint(canvas);
}
}
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/IGraphicsCanvasRenderer.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public interface IGraphicsCanvasRenderer
{
GraphicsElementCollection Children { get; }
void Invalidate();
void SuspendRender();
void ResumeRender(bool performRender = false);
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Transforms/TransformGroup.cs<|end_filename|>
using SkiaSharp;
using System.Collections.Generic;
namespace SkiaSharpDemo.Graphics
{
public sealed class TransformGroup : Transform
{
//public TransformCollection Children { get; set; }
//public Matrix Value { get; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/AlignmentY.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public enum AlignmentY
{
Top = 0,
Center = 1,
Bottom = 2
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/AlignmentX.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public enum AlignmentX
{
Left = 0,
Center = 1,
Right = 2
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Stretch.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public enum Stretch
{
None = 0,
Fill = 1,
Uniform = 2,
UniformToFill = 3
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/IGraphicsElementContainer.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public interface IGraphicsElementContainer
{
GraphicsElementCollection Children { get; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Transforms/TranslateTransform.cs<|end_filename|>
namespace SkiaSharpDemo.Graphics
{
public class TranslateTransform : Transform
{
//public double Y { get; set; }
//public double X { get; set; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Shapes/Line.cs<|end_filename|>
using Xamarin.Forms;
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
public class Line : Shape
{
public static readonly BindableProperty X1Property = BindableProperty.Create(
nameof(X1), typeof(double), typeof(Line), 0.0, propertyChanged: OnLineChanged);
public static readonly BindableProperty Y1Property = BindableProperty.Create(
nameof(Y1), typeof(double), typeof(Line), 0.0, propertyChanged: OnLineChanged);
public static readonly BindableProperty X2Property = BindableProperty.Create(
nameof(X2), typeof(double), typeof(Line), 0.0, propertyChanged: OnLineChanged);
public static readonly BindableProperty Y2Property = BindableProperty.Create(
nameof(Y2), typeof(double), typeof(Line), 0.0, propertyChanged: OnLineChanged);
private SKPath path;
public double X1
{
get { return (double)GetValue(X1Property); }
set { SetValue(X1Property, value); }
}
public double Y1
{
get { return (double)GetValue(Y1Property); }
set { SetValue(Y1Property, value); }
}
public double X2
{
get { return (double)GetValue(X2Property); }
set { SetValue(X2Property, value); }
}
public double Y2
{
get { return (double)GetValue(Y2Property); }
set { SetValue(Y2Property, value); }
}
public override SKPath GetPath()
{
if (path != null)
{
return path;
}
path = new SKPath();
path.MoveTo((float)X1, (float)Y1);
path.LineTo((float)X2, (float)Y2);
return path;
}
private static void OnLineChanged(BindableObject bindable, object oldValue, object newValue)
{
if (bindable is Line line)
{
line.path?.Dispose();
line.path = null;
}
OnGraphicsChanged(bindable, oldValue, newValue);
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Brushes/ImageBrush.cs<|end_filename|>
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
public class ImageBrush : TileBrush
{
public ImageBrush()
{
}
//public ImageSource ImageSource { get; set; }
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Brushes/RadialGradientBrush.cs<|end_filename|>
using System;
using SkiaSharp;
using Xamarin.Forms;
namespace SkiaSharpDemo.Graphics
{
public class RadialGradientBrush : GradientBrush
{
private SKShader shader;
public RadialGradientBrush()
{
}
public RadialGradientBrush(Color startColor, Color endColor)
{
GradientStops.Add(new GradientStop(startColor, 0.0f));
GradientStops.Add(new GradientStop(endColor, 1.0f));
}
public RadialGradientBrush(GradientStopCollection gradientStopCollection)
: base(gradientStopCollection)
{
}
public Point Center { get; set; } = new Point(0.5, 0.5);
public Point GradientOrigin { get; set; } = new Point(0.5, 0.5);
public double RadiusX { get; set; } = 0.5f;
public double RadiusY { get; set; } = 0.5f;
protected override SKShader GetShader(SKRect bounds)
{
throw new NotImplementedException();
}
}
}
<|start_filename|>SkiaSharpDemo/SkiaSharpDemo/Graphics/Shapes/Ellipse.cs<|end_filename|>
using System.Runtime.CompilerServices;
using SkiaSharp;
namespace SkiaSharpDemo.Graphics
{
public class Ellipse : Shape
{
private SKPath path;
public Ellipse()
{
}
public override SKPath GetPath()
{
if (path != null)
{
return path;
}
path = new SKPath();
var rect = SKRect.Create(0, 0, (float)Width, (float)Height);
path.AddOval(rect);
return path;
}
protected override void OnPropertyChanged([CallerMemberName] string propertyName = null)
{
if (propertyName == WidthProperty.PropertyName || propertyName == HeightProperty.PropertyName)
{
path?.Dispose();
path = null;
}
base.OnPropertyChanged(propertyName);
}
}
}
| mattleibow/SkiaSharpGraphics |
<|start_filename|>jadx-core/src/test/java/jadx/tests/integration/others/TestCodeComments.java<|end_filename|>
package jadx.tests.integration.others;
import java.util.Arrays;
import java.util.Collections;
import org.junit.jupiter.api.Test;
import jadx.api.data.ICodeComment;
import jadx.api.data.IJavaNodeRef.RefType;
import jadx.api.data.impl.JadxCodeComment;
import jadx.api.data.impl.JadxCodeData;
import jadx.api.data.impl.JadxNodeRef;
import jadx.core.dex.nodes.ClassNode;
import jadx.tests.api.IntegrationTest;
import static jadx.tests.api.utils.assertj.JadxAssertions.assertThat;
public class TestCodeComments extends IntegrationTest {
public static class TestCls {
private int intField = 5;
public static class A {
}
public int test() {
System.out.println("Hello");
System.out.println("comment");
return intField;
}
}
@Test
public void test() {
int insnOffset = isJavaInput() ? 13 : 11;
String baseClsId = TestCls.class.getName();
ICodeComment clsComment = new JadxCodeComment(JadxNodeRef.forCls(baseClsId), "class comment");
ICodeComment innerClsComment = new JadxCodeComment(JadxNodeRef.forCls(baseClsId + ".A"), "inner class comment");
ICodeComment fldComment = new JadxCodeComment(new JadxNodeRef(RefType.FIELD, baseClsId, "intField:I"), "field comment");
JadxNodeRef mthRef = new JadxNodeRef(RefType.METHOD, baseClsId, "test()I");
ICodeComment mthComment = new JadxCodeComment(mthRef, "method comment");
ICodeComment insnComment = new JadxCodeComment(mthRef, "insn comment", insnOffset);
JadxCodeData codeData = new JadxCodeData();
getArgs().setCodeData(codeData);
codeData.setComments(Arrays.asList(clsComment, innerClsComment, fldComment, mthComment, insnComment));
ClassNode cls = getClassNode(TestCls.class);
assertThat(cls)
.decompile()
.checkCodeOffsets()
.code()
.containsOne("// class comment")
.containsOne("// inner class comment")
.containsOne("// field comment")
.containsOne("// method comment")
.containsOne("System.out.println(\"comment\"); // insn comment");
String code = cls.getCode().getCodeStr();
assertThat(cls)
.reloadCode(this)
.isEqualTo(code);
ICodeComment updInsnComment = new JadxCodeComment(mthRef, "updated insn comment", insnOffset);
codeData.setComments(Collections.singletonList(updInsnComment));
assertThat(cls)
.reloadCode(this)
.containsOne("System.out.println(\"comment\"); // updated insn comment")
.doesNotContain("class comment")
.containsOne(" comment");
}
}
<|start_filename|>jadx-gui/src/main/java/jadx/gui/ui/dialog/CommentDialog.java<|end_filename|>
package jadx.gui.ui.dialog;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dialog;
import java.awt.Dimension;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.SwingConstants;
import javax.swing.WindowConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jadx.api.data.ICodeComment;
import jadx.api.data.impl.JadxCodeComment;
import jadx.api.data.impl.JadxCodeData;
import jadx.gui.settings.JadxProject;
import jadx.gui.ui.codearea.CodeArea;
import jadx.gui.utils.NLS;
import jadx.gui.utils.TextStandardActions;
import jadx.gui.utils.UiUtils;
public class CommentDialog extends JDialog {
private static final long serialVersionUID = -1865682124935757528L;
private static final Logger LOG = LoggerFactory.getLogger(CommentDialog.class);
public static void show(CodeArea codeArea, ICodeComment blankComment) {
ICodeComment existComment = searchForExistComment(codeArea, blankComment);
Dialog dialog;
if (existComment != null) {
dialog = new CommentDialog(codeArea, existComment, true);
} else {
dialog = new CommentDialog(codeArea, blankComment, false);
}
dialog.setVisible(true);
}
private static void updateCommentsData(CodeArea codeArea, Consumer<List<ICodeComment>> updater) {
try {
JadxProject project = codeArea.getProject();
JadxCodeData codeData = project.getCodeData();
if (codeData == null) {
codeData = new JadxCodeData();
}
List<ICodeComment> list = new ArrayList<>(codeData.getComments());
updater.accept(list);
Collections.sort(list);
codeData.setComments(list);
project.setCodeData(codeData);
} catch (Exception e) {
LOG.error("Comment action failed", e);
}
try {
// refresh code
codeArea.refreshClass();
} catch (Exception e) {
LOG.error("Failed to reload code", e);
}
}
private static ICodeComment searchForExistComment(CodeArea codeArea, ICodeComment blankComment) {
try {
JadxProject project = codeArea.getProject();
JadxCodeData codeData = project.getCodeData();
if (codeData == null || codeData.getComments().isEmpty()) {
return null;
}
for (ICodeComment comment : codeData.getComments()) {
if (Objects.equals(comment.getNodeRef(), blankComment.getNodeRef())
&& comment.getOffset() == blankComment.getOffset()
&& comment.getAttachType() == blankComment.getAttachType()) {
return comment;
}
}
} catch (Exception e) {
LOG.error("Error searching for exists comment", e);
}
return null;
}
private final transient CodeArea codeArea;
private final transient ICodeComment comment;
private final transient boolean updateComment;
private transient JTextArea commentArea;
public CommentDialog(CodeArea codeArea, ICodeComment comment, boolean updateComment) {
super(codeArea.getMainWindow());
this.codeArea = codeArea;
this.comment = comment;
this.updateComment = updateComment;
initUI();
}
private void apply() {
String newCommentStr = commentArea.getText().trim();
if (newCommentStr.isEmpty()) {
if (updateComment) {
remove();
} else {
cancel();
}
return;
}
ICodeComment newComment = new JadxCodeComment(comment.getNodeRef(),
newCommentStr, comment.getOffset(), comment.getAttachType());
if (updateComment) {
updateCommentsData(codeArea, list -> {
list.remove(comment);
list.add(newComment);
});
} else {
updateCommentsData(codeArea, list -> list.add(newComment));
}
dispose();
}
private void remove() {
updateCommentsData(codeArea, list -> list.removeIf(c -> c == comment));
dispose();
}
private void cancel() {
dispose();
}
private void initUI() {
commentArea = new JTextArea();
TextStandardActions.attach(commentArea);
commentArea.setEditable(true);
commentArea.setFont(codeArea.getMainWindow().getSettings().getFont());
commentArea.setAlignmentX(Component.LEFT_ALIGNMENT);
commentArea.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
switch (e.getKeyCode()) {
case KeyEvent.VK_ENTER:
if (e.isShiftDown() || e.isControlDown()) {
commentArea.append("\n");
} else {
apply();
}
break;
case KeyEvent.VK_ESCAPE:
cancel();
break;
}
}
});
if (updateComment) {
commentArea.setText(comment.getComment());
}
JScrollPane textAreaScrollPane = new JScrollPane(commentArea);
textAreaScrollPane.setAlignmentX(LEFT_ALIGNMENT);
JLabel commentLabel = new JLabel(NLS.str("comment_dialog.label"), SwingConstants.LEFT);
JLabel usageLabel = new JLabel(NLS.str("comment_dialog.usage"), SwingConstants.LEFT);
JPanel mainPanel = new JPanel();
mainPanel.setLayout(new BoxLayout(mainPanel, BoxLayout.PAGE_AXIS));
mainPanel.add(commentLabel);
mainPanel.add(Box.createRigidArea(new Dimension(0, 5)));
mainPanel.add(textAreaScrollPane);
mainPanel.add(Box.createRigidArea(new Dimension(0, 5)));
mainPanel.add(usageLabel);
mainPanel.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
JPanel buttonPane = initButtonsPanel();
Container contentPane = getContentPane();
contentPane.add(mainPanel, BorderLayout.CENTER);
contentPane.add(buttonPane, BorderLayout.PAGE_END);
if (updateComment) {
setTitle(NLS.str("comment_dialog.title.update"));
} else {
setTitle(NLS.str("comment_dialog.title.add"));
}
pack();
if (!codeArea.getMainWindow().getSettings().loadWindowPos(this)) {
setSize(800, 140);
}
setLocationRelativeTo(null);
setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
setModalityType(ModalityType.APPLICATION_MODAL);
UiUtils.addEscapeShortCutToDispose(this);
}
protected JPanel initButtonsPanel() {
JButton cancelButton = new JButton(NLS.str("common_dialog.cancel"));
cancelButton.addActionListener(event -> cancel());
String applyStr = updateComment ? NLS.str("common_dialog.update") : NLS.str("common_dialog.add");
JButton renameBtn = new JButton(applyStr);
renameBtn.addActionListener(event -> apply());
getRootPane().setDefaultButton(renameBtn);
JButton removeBtn;
if (updateComment) {
removeBtn = new JButton(NLS.str("common_dialog.remove"));
removeBtn.addActionListener(event -> remove());
} else {
removeBtn = null;
}
JPanel buttonPane = new JPanel();
buttonPane.setLayout(new BoxLayout(buttonPane, BoxLayout.LINE_AXIS));
buttonPane.setBorder(BorderFactory.createEmptyBorder(0, 10, 10, 10));
buttonPane.add(Box.createRigidArea(new Dimension(5, 0)));
buttonPane.add(Box.createHorizontalGlue());
buttonPane.add(renameBtn);
if (removeBtn != null) {
buttonPane.add(Box.createRigidArea(new Dimension(10, 0)));
buttonPane.add(removeBtn);
}
buttonPane.add(Box.createRigidArea(new Dimension(10, 0)));
buttonPane.add(cancelButton);
return buttonPane;
}
@Override
public void dispose() {
codeArea.getMainWindow().getSettings().saveWindowPos(this);
super.dispose();
}
}
<|start_filename|>jadx-plugins/jadx-plugins-api/src/main/java/jadx/api/plugins/input/data/annotations/EncodedValue.java<|end_filename|>
package jadx.api.plugins.input.data.annotations;
import java.util.Objects;
import jadx.api.plugins.input.data.attributes.IJadxAttrType;
import jadx.api.plugins.input.data.attributes.IJadxAttribute;
import jadx.api.plugins.input.data.attributes.JadxAttrType;
public class EncodedValue implements IJadxAttribute {
public static final EncodedValue NULL = new EncodedValue(EncodedType.ENCODED_NULL, null);
private final EncodedType type;
private final Object value;
public EncodedValue(EncodedType type, Object value) {
this.type = type;
this.value = value;
}
public EncodedType getType() {
return type;
}
public Object getValue() {
return value;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EncodedValue that = (EncodedValue) o;
return type == that.getType() && Objects.equals(value, that.getValue());
}
@Override
public IJadxAttrType<? extends IJadxAttribute> getAttrType() {
return JadxAttrType.CONSTANT_VALUE;
}
@Override
public int hashCode() {
return Objects.hash(getType(), getValue());
}
@Override
public String toString() {
switch (type) {
case ENCODED_NULL:
return "null";
case ENCODED_STRING:
return (String) value;
case ENCODED_ARRAY:
return "[" + value + "]";
default:
return "{" + type + ": " + value + '}';
}
}
}
<|start_filename|>jadx-core/src/main/java/jadx/core/utils/DebugChecks.java<|end_filename|>
package jadx.core.utils;
import java.util.ArrayList;
import java.util.List;
import jadx.api.ICodeWriter;
import jadx.core.dex.attributes.AFlag;
import jadx.core.dex.attributes.AType;
import jadx.core.dex.attributes.nodes.PhiListAttr;
import jadx.core.dex.instructions.InsnType;
import jadx.core.dex.instructions.PhiInsn;
import jadx.core.dex.instructions.args.InsnArg;
import jadx.core.dex.instructions.args.InsnWrapArg;
import jadx.core.dex.instructions.args.RegisterArg;
import jadx.core.dex.instructions.args.SSAVar;
import jadx.core.dex.instructions.mods.TernaryInsn;
import jadx.core.dex.nodes.BlockNode;
import jadx.core.dex.nodes.InsnNode;
import jadx.core.dex.nodes.MethodNode;
import jadx.core.dex.visitors.IDexTreeVisitor;
import jadx.core.dex.visitors.PrepareForCodeGen;
import jadx.core.dex.visitors.RenameVisitor;
import jadx.core.utils.exceptions.JadxRuntimeException;
/**
* Check invariants and information consistency for registers and SSA variables
*/
public class DebugChecks {
public static boolean /* not final! */ checksEnabled = false;
public static void runChecksAfterVisitor(MethodNode mth, IDexTreeVisitor visitor) {
Class<? extends IDexTreeVisitor> visitorCls = visitor.getClass();
if (visitorCls == PrepareForCodeGen.class || visitorCls == RenameVisitor.class) {
return;
}
try {
checkMethod(mth);
} catch (Exception e) {
throw new JadxRuntimeException("Debug check failed after visitor: " + visitorCls.getSimpleName(), e);
}
}
public static void checkMethod(MethodNode mth) {
List<BlockNode> basicBlocks = mth.getBasicBlocks();
if (Utils.isEmpty(basicBlocks)) {
return;
}
for (BlockNode block : basicBlocks) {
for (InsnNode insn : block.getInstructions()) {
checkInsn(mth, insn);
}
}
checkSSAVars(mth);
// checkPHI(mth);
}
private static void checkInsn(MethodNode mth, InsnNode insn) {
if (insn.getResult() != null) {
checkVar(mth, insn, insn.getResult());
}
for (InsnArg arg : insn.getArguments()) {
if (arg instanceof RegisterArg) {
checkVar(mth, insn, (RegisterArg) arg);
} else if (arg.isInsnWrap()) {
InsnNode wrapInsn = ((InsnWrapArg) arg).getWrapInsn();
checkInsn(mth, wrapInsn);
}
}
if (insn instanceof TernaryInsn) {
TernaryInsn ternaryInsn = (TernaryInsn) insn;
for (RegisterArg arg : ternaryInsn.getCondition().getRegisterArgs()) {
checkVar(mth, insn, arg);
}
}
}
private static void checkVar(MethodNode mth, InsnNode insn, RegisterArg reg) {
checkRegisterArg(mth, reg);
SSAVar sVar = reg.getSVar();
if (sVar == null) {
if (Utils.notEmpty(mth.getSVars())) {
throw new JadxRuntimeException("Null SSA var in " + insn + ", mth: " + mth);
}
return;
}
List<RegisterArg> useList = sVar.getUseList();
boolean assignReg = insn.getResult() == reg;
if (!assignReg && !Utils.containsInListByRef(useList, reg)) {
throw new JadxRuntimeException("Incorrect use list in ssa var: " + sVar + ", register not listed."
+ ICodeWriter.NL + " insn: " + insn);
}
for (RegisterArg useArg : useList) {
checkRegisterArg(mth, useArg);
}
}
private static void checkSSAVars(MethodNode mth) {
for (SSAVar ssaVar : mth.getSVars()) {
RegisterArg assignArg = ssaVar.getAssign();
if (assignArg.contains(AFlag.REMOVE)) {
// ignore removed vars
continue;
}
InsnNode assignInsn = assignArg.getParentInsn();
if (assignInsn != null) {
if (insnMissing(mth, assignInsn)) {
throw new JadxRuntimeException("Insn not found for assign arg in SSAVar: " + ssaVar + ", insn: " + assignInsn);
}
RegisterArg resArg = assignInsn.getResult();
if (resArg == null) {
throw new JadxRuntimeException("SSA assign insn result missing. SSAVar: " + ssaVar + ", insn: " + assignInsn);
}
SSAVar assignVar = resArg.getSVar();
if (!assignVar.equals(ssaVar)) {
throw new JadxRuntimeException("Unexpected SSAVar in assign. "
+ "Expected: " + ssaVar + ", got: " + assignVar + ", insn: " + assignInsn);
}
}
for (RegisterArg arg : ssaVar.getUseList()) {
InsnNode useInsn = arg.getParentInsn();
if (useInsn == null) {
throw new JadxRuntimeException("Parent insn can't be null for arg in use list of SSAVar: " + ssaVar);
}
if (insnMissing(mth, useInsn)) {
throw new JadxRuntimeException("Insn not found for use arg for SSAVar: " + ssaVar + ", insn: " + useInsn);
}
int argIndex = useInsn.getArgIndex(arg);
if (argIndex == -1) {
throw new JadxRuntimeException("Use arg not found in insn for SSAVar: " + ssaVar + ", insn: " + useInsn);
}
InsnArg foundArg = useInsn.getArg(argIndex);
if (!foundArg.equals(arg)) {
throw new JadxRuntimeException(
"Incorrect use arg in insn for SSAVar: " + ssaVar + ", insn: " + useInsn + ", arg: " + foundArg);
}
}
}
}
private static boolean insnMissing(MethodNode mth, InsnNode insn) {
if (insn.contains(AFlag.HIDDEN)) {
// skip search
return false;
}
BlockNode block = BlockUtils.getBlockByInsn(mth, insn);
return block == null;
}
private static void checkRegisterArg(MethodNode mth, RegisterArg reg) {
InsnNode parentInsn = reg.getParentInsn();
if (parentInsn == null) {
if (reg.contains(AFlag.METHOD_ARGUMENT)) {
return;
}
throw new JadxRuntimeException("Null parentInsn for reg: " + reg);
}
if (!parentInsn.contains(AFlag.HIDDEN)) {
if (parentInsn.getResult() != reg && !parentInsn.containsArg(reg)) {
throw new JadxRuntimeException("Incorrect parentInsn: " + parentInsn + ", must contains arg: " + reg);
}
BlockNode parentInsnBlock = BlockUtils.getBlockByInsn(mth, parentInsn);
if (parentInsnBlock == null) {
throw new JadxRuntimeException("Parent insn not found in blocks tree for: " + reg
+ ICodeWriter.NL + " insn: " + parentInsn);
}
}
}
private static void checkPHI(MethodNode mth) {
for (BlockNode block : mth.getBasicBlocks()) {
List<PhiInsn> phis = new ArrayList<>();
for (InsnNode insn : block.getInstructions()) {
if (insn.getType() == InsnType.PHI) {
PhiInsn phi = (PhiInsn) insn;
phis.add(phi);
if (phi.getArgsCount() == 0) {
throw new JadxRuntimeException("No args and binds in PHI");
}
for (InsnArg arg : insn.getArguments()) {
if (arg instanceof RegisterArg) {
BlockNode b = phi.getBlockByArg((RegisterArg) arg);
if (b == null) {
throw new JadxRuntimeException("Predecessor block not found");
}
} else {
throw new JadxRuntimeException("Not register in phi insn");
}
}
}
}
PhiListAttr phiListAttr = block.get(AType.PHI_LIST);
if (phiListAttr == null) {
if (!phis.isEmpty()) {
throw new JadxRuntimeException("Missing PHI list attribute");
}
} else {
List<PhiInsn> phiList = phiListAttr.getList();
if (phiList.isEmpty()) {
throw new JadxRuntimeException("Empty PHI list attribute");
}
if (!phis.containsAll(phiList) || !phiList.containsAll(phis)) {
throw new JadxRuntimeException("Instructions not match");
}
}
}
for (SSAVar ssaVar : mth.getSVars()) {
for (PhiInsn usedInPhi : ssaVar.getUsedInPhi()) {
boolean found = false;
for (RegisterArg useArg : ssaVar.getUseList()) {
InsnNode parentInsn = useArg.getParentInsn();
if (parentInsn != null && parentInsn == usedInPhi) {
found = true;
break;
}
}
if (!found) {
throw new JadxRuntimeException("Used in phi incorrect");
}
}
}
}
}
<|start_filename|>jadx-core/src/main/java/jadx/api/data/impl/JadxNodeRef.java<|end_filename|>
package jadx.api.data.impl;
import java.util.Comparator;
import java.util.Objects;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import jadx.api.JavaClass;
import jadx.api.JavaField;
import jadx.api.JavaMethod;
import jadx.api.JavaNode;
import jadx.api.data.IJavaNodeRef;
public class JadxNodeRef implements IJavaNodeRef {
@Nullable
public static JadxNodeRef forJavaNode(JavaNode javaNode) {
if (javaNode instanceof JavaClass) {
return forCls((JavaClass) javaNode);
}
if (javaNode instanceof JavaMethod) {
return forMth((JavaMethod) javaNode);
}
if (javaNode instanceof JavaField) {
return forFld((JavaField) javaNode);
}
return null;
}
public static JadxNodeRef forCls(JavaClass cls) {
return new JadxNodeRef(RefType.CLASS, cls.getClassNode().getClassInfo().getFullName(), null);
}
public static JadxNodeRef forCls(String clsFullName) {
return new JadxNodeRef(RefType.CLASS, clsFullName, null);
}
public static JadxNodeRef forMth(JavaMethod mth) {
return new JadxNodeRef(RefType.METHOD,
mth.getDeclaringClass().getClassNode().getClassInfo().getFullName(),
mth.getMethodNode().getMethodInfo().getShortId());
}
public static JadxNodeRef forFld(JavaField fld) {
return new JadxNodeRef(RefType.FIELD,
fld.getDeclaringClass().getClassNode().getClassInfo().getFullName(),
fld.getFieldNode().getFieldInfo().getShortId());
}
private RefType refType;
private String declClass;
@Nullable
private String shortId;
public JadxNodeRef(RefType refType, String declClass, @Nullable String shortId) {
this.refType = refType;
this.declClass = declClass;
this.shortId = shortId;
}
public JadxNodeRef() {
// for json deserialization
}
@Override
public RefType getType() {
return refType;
}
public void setRefType(RefType refType) {
this.refType = refType;
}
@Override
public String getDeclaringClass() {
return declClass;
}
public void setDeclClass(String declClass) {
this.declClass = declClass;
}
@Nullable
@Override
public String getShortId() {
return shortId;
}
public void setShortId(@Nullable String shortId) {
this.shortId = shortId;
}
private static final Comparator<IJavaNodeRef> COMPARATOR = Comparator
.comparing(IJavaNodeRef::getType)
.thenComparing(IJavaNodeRef::getDeclaringClass)
.thenComparing(IJavaNodeRef::getShortId);
@Override
public int compareTo(@NotNull IJavaNodeRef other) {
return COMPARATOR.compare(this, other);
}
@Override
public int hashCode() {
return Objects.hash(refType, declClass, shortId);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof JadxNodeRef)) {
return false;
}
JadxNodeRef that = (JadxNodeRef) o;
return refType == that.refType
&& Objects.equals(declClass, that.declClass)
&& Objects.equals(shortId, that.shortId);
}
@Override
public String toString() {
switch (refType) {
case CLASS:
return declClass;
case FIELD:
case METHOD:
return declClass + "->" + shortId;
default:
return "unknown node ref type";
}
}
}
<|start_filename|>jadx-gui/src/main/java/jadx/gui/utils/search/ResourceIndex.java<|end_filename|>
package jadx.gui.utils.search;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.swing.tree.TreeNode;
import io.reactivex.BackpressureStrategy;
import io.reactivex.Flowable;
import io.reactivex.FlowableEmitter;
import jadx.api.ResourceFile;
import jadx.api.ResourceType;
import jadx.core.utils.files.FileUtils;
import jadx.gui.treemodel.JResSearchNode;
import jadx.gui.treemodel.JResource;
import jadx.gui.utils.CacheObject;
import static jadx.core.utils.StringUtils.countLinesByPos;
import static jadx.core.utils.StringUtils.getLine;
public class ResourceIndex {
private final List<JResource> resNodes = new ArrayList<>();
private final Set<String> extSet = new HashSet<>();
private CacheObject cache;
private String fileExts;
private boolean anyExt;
private int sizeLimit;
public ResourceIndex(CacheObject cache) {
this.cache = cache;
}
private void search(final JResource resNode,
FlowableEmitter<JResSearchNode> emitter,
SearchSettings searchSettings) {
int pos = 0;
int line = 0;
int lastPos = 0;
int lastLineOccurred = -1;
JResSearchNode lastNode = null;
int searchStrLen = searchSettings.getSearchString().length();
String content;
try {
content = resNode.getContent();
} catch (Exception e) {
e.printStackTrace();
return;
}
do {
searchSettings.setStartPos(lastPos);
pos = searchSettings.find(content);
if (pos > -1) {
line += countLinesByPos(content, pos, lastPos);
lastPos = pos + searchStrLen;
String lineText = getLine(content, pos, lastPos);
if (lastLineOccurred != line) {
lastLineOccurred = line;
if (lastNode != null) {
emitter.onNext(lastNode);
}
lastNode = new JResSearchNode(resNode, lineText.trim(), line + 1, pos);
}
} else {
if (lastNode != null) { // commit the final result node.
emitter.onNext(lastNode);
}
break;
}
} while (!emitter.isCancelled() && lastPos < content.length());
}
public Flowable<JResSearchNode> search(SearchSettings settings) {
refreshSettings();
if (resNodes.size() == 0) {
return Flowable.empty();
}
return Flowable.create(emitter -> {
for (JResource resNode : resNodes) {
if (!emitter.isCancelled()) {
search(resNode, emitter, settings);
}
}
emitter.onComplete();
}, BackpressureStrategy.BUFFER);
}
public void index() {
refreshSettings();
}
private void clear() {
anyExt = false;
sizeLimit = -1;
fileExts = "";
extSet.clear();
resNodes.clear();
}
private void traverseTree(TreeNode root, ZipFile zip) {
for (int i = 0; i < root.getChildCount(); i++) {
TreeNode node = root.getChildAt(i);
if (node instanceof JResource) {
JResource resNode = (JResource) node;
try {
resNode.loadNode();
} catch (Exception e) {
e.printStackTrace();
return;
}
ResourceFile resFile = resNode.getResFile();
if (resFile == null) {
traverseTree(node, zip);
} else {
if (resFile.getType() == ResourceType.ARSC && shouldSearchXML()) {
resFile.loadContent();
resNode.getFiles().forEach(t -> traverseTree(t, null));
} else {
filter(resNode, zip);
}
}
}
}
}
private boolean shouldSearchXML() {
return anyExt || fileExts.contains(".xml");
}
private ZipFile getZipFile(TreeNode res) {
for (int i = 0; i < res.getChildCount(); i++) {
TreeNode node = res.getChildAt(i);
if (node instanceof JResource) {
JResource resNode = (JResource) node;
try {
resNode.loadNode();
} catch (Exception e) {
e.printStackTrace();
return null;
}
ResourceFile file = resNode.getResFile();
if (file == null) {
ZipFile zip = getZipFile(resNode);
if (zip != null) {
return zip;
}
} else {
File zfile = file.getZipRef().getZipFile();
if (FileUtils.isZipFile(zfile)) {
try {
return new ZipFile(zfile);
} catch (IOException ignore) {
}
}
}
}
}
return null;
}
private void filter(JResource resNode, ZipFile zip) {
ResourceFile resFile = resNode.getResFile();
if (JResource.isSupportedForView(resFile.getType())) {
long size = -1;
if (zip != null) {
ZipEntry entry = zip.getEntry(resFile.getOriginalName());
if (entry != null) {
size = entry.getSize();
}
}
if (size == -1) { // resource from ARSC is unknown size
try {
size = resNode.getContent().length();
} catch (Exception ignore) {
return;
}
}
if (size <= sizeLimit) {
if (!anyExt) {
for (String ext : extSet) {
if (resFile.getOriginalName().endsWith(ext)) {
resNodes.add(resNode);
break;
}
}
} else {
resNodes.add(resNode);
}
}
}
}
private void refreshSettings() {
int size = cache.getJadxSettings().getSrhResourceSkipSize() * 10240;
if (size != sizeLimit
|| !cache.getJadxSettings().getSrhResourceFileExt().equals(fileExts)) {
clear();
sizeLimit = size;
fileExts = cache.getJadxSettings().getSrhResourceFileExt();
String[] exts = fileExts.split("\\|");
for (String ext : exts) {
ext = ext.trim();
if (!ext.isEmpty()) {
anyExt = ext.equals("*");
if (anyExt) {
break;
}
extSet.add(ext);
}
}
try {
ZipFile zipFile = getZipFile(cache.getJRoot());
traverseTree(cache.getJRoot(), zipFile); // reindex
if (zipFile != null) {
zipFile.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
<|start_filename|>jadx-core/src/main/java/jadx/core/utils/ListUtils.java<|end_filename|>
package jadx.core.utils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import org.jetbrains.annotations.Nullable;
import jadx.core.dex.nodes.BlockNode;
public class ListUtils {
public static <T> boolean isSingleElement(@Nullable List<T> list, T obj) {
if (list == null || list.size() != 1) {
return false;
}
return Objects.equals(list.get(0), obj);
}
public static <T> boolean unorderedEquals(List<T> first, List<T> second) {
if (first.size() != second.size()) {
return false;
}
return first.containsAll(second);
}
public static <T, R> List<R> map(Collection<T> list, Function<T, R> mapFunc) {
if (list == null || list.isEmpty()) {
return Collections.emptyList();
}
List<R> result = new ArrayList<>(list.size());
for (T t : list) {
result.add(mapFunc.apply(t));
}
return result;
}
public static <T> T first(List<T> list) {
return list.get(0);
}
public static <T> T last(List<T> list) {
return list.get(list.size() - 1);
}
public static List<BlockNode> distinctList(List<BlockNode> list) {
return new ArrayList<>(new LinkedHashSet<>(list));
}
}
| zhongqingsong/jadx |
<|start_filename|>release/PPA/debian.ex/mouse-speed.cron.d.ex<|end_filename|>
#
# Regular cron jobs for the mouse-speed package
#
0 4 * * * root [ -x /usr/bin/mouse-speed_maintenance ] && /usr/bin/mouse-speed_maintenance
<|start_filename|>release/PPA/debian.ex/menu.ex<|end_filename|>
?package(mouse-speed):needs="X11|text|vc|wm" section="Applications/see-menu-manual"\
title="mouse-speed" command="/usr/bin/mouse-speed"
| rubo77/mouse-speed |
<|start_filename|>dubbo-filter/dubbo-filter-cache/src/main/java/org/apache/dubbo/cache/support/expiring/ExpiringCache.java<|end_filename|>
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.cache.support.expiring;
import org.apache.dubbo.cache.Cache;
import org.apache.dubbo.common.URL;
import java.util.Map;
/**
* ExpiringCache - With the characteristic of expiration time.
*/
/**
* This class store the cache value with the characteristic of expiration time. If a service,method,consumer or provided is configured with key <b>cache</b>
* with value <b>expiring</b>, dubbo initialize the instance of this class using {@link ExpiringCacheFactory} to store method's returns value
* to server from store without making method call.
* <pre>
* e.g. 1) <dubbo:service cache="expiring" cache.seconds="60" cache.interval="10"/>
* 2) <dubbo:consumer cache="expiring" />
* </pre>
* <li>It used constructor argument url instance <b>cache.seconds</b> value to decide time to live of cached object.Default value of it is 180 second.</li>
* <li>It used constructor argument url instance <b>cache.interval</b> value for cache value expiration interval.Default value of this is 4 second</li>
* @see Cache
* @see ExpiringCacheFactory
* @see org.apache.dubbo.cache.support.AbstractCacheFactory
* @see org.apache.dubbo.cache.filter.CacheFilter
*/
public class ExpiringCache implements Cache {
private final Map<Object, Object> store;
public ExpiringCache(URL url) {
// cache time (second)
final int secondsToLive = url.getParameter("cache.seconds", 180);
// Cache check interval (second)
final int intervalSeconds = url.getParameter("cache.interval", 4);
ExpiringMap<Object, Object> expiringMap = new ExpiringMap<Object, Object>(secondsToLive, intervalSeconds);
expiringMap.getExpireThread().startExpiryIfNotStarted();
this.store = expiringMap;
}
/**
* API to store value against a key in the calling thread scope.
* @param key Unique identifier for the object being store.
* @param value Value getting store
*/
@Override
public void put(Object key, Object value) {
store.put(key, value);
}
/**
* API to return stored value using a key against the calling thread specific store.
* @param key Unique identifier for cache lookup
* @return Return stored object against key
*/
@Override
public Object get(Object key) {
return store.get(key);
}
}
<|start_filename|>dubbo-monitor/dubbo-monitor-default/src/test/java/org/apache/dubbo/monitor/dubbo/DubboMonitorFactoryTest.java<|end_filename|>
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.monitor.dubbo;
import org.apache.dubbo.common.Constants;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.monitor.Monitor;
import org.apache.dubbo.rpc.Invoker;
import org.apache.dubbo.rpc.ProxyFactory;
import org.apache.dubbo.rpc.protocol.dubbo.DubboProtocol;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.verify;
import static org.mockito.MockitoAnnotations.initMocks;
public class DubboMonitorFactoryTest {
private DubboMonitorFactory dubboMonitorFactory;
@Mock
private ProxyFactory proxyFactory;
@BeforeEach
public void setUp() throws Exception {
initMocks(this);
this.dubboMonitorFactory = new DubboMonitorFactory();
this.dubboMonitorFactory.setProtocol(new DubboProtocol());
this.dubboMonitorFactory.setProxyFactory(proxyFactory);
}
@Test
public void testCreateMonitor() {
URL urlWithoutPath = URL.valueOf("http://10.10.10.11");
Monitor monitor = dubboMonitorFactory.createMonitor(urlWithoutPath);
assertThat(monitor, not(nullValue()));
URL urlWithFilterKey = URL.valueOf("http://10.10.10.11/").addParameter(Constants.REFERENCE_FILTER_KEY, "testFilter");
monitor = dubboMonitorFactory.createMonitor(urlWithFilterKey);
assertThat(monitor, not(nullValue()));
ArgumentCaptor<Invoker> invokerArgumentCaptor = ArgumentCaptor.forClass(Invoker.class);
verify(proxyFactory, atLeastOnce()).getProxy(invokerArgumentCaptor.capture());
Invoker invoker = invokerArgumentCaptor.getValue();
assertThat(invoker.getUrl().getParameter(Constants.REFERENCE_FILTER_KEY), containsString("testFilter"));
}
} | gongbinglai/incubator-dubbo |
<|start_filename|>Library/src/main/java/com/example/zhouwei/library/CustomPopWindow.java<|end_filename|>
package com.example.zhouwei.library;
import android.app.Activity;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.util.Log;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.PopupWindow;
/**
*
* 自定义PopWindow类,封装了PopWindow的一些常用属性,用Builder模式支持链式调用
* Created by zhouwei on 16/11/28.
*/
public class CustomPopWindow implements PopupWindow.OnDismissListener{
private static final String TAG = "CustomPopWindow";
private static final float DEFAULT_ALPHA = 0.7f;
private Context mContext;
private int mWidth;
private int mHeight;
private boolean mIsFocusable = true;
private boolean mIsOutside = true;
private int mResLayoutId = -1;
private View mContentView;
private PopupWindow mPopupWindow;
private int mAnimationStyle = -1;
private boolean mClippEnable = true;//default is true
private boolean mIgnoreCheekPress = false;
private int mInputMode = -1;
private PopupWindow.OnDismissListener mOnDismissListener;
private int mSoftInputMode = -1;
private boolean mTouchable = true;//default is ture
private View.OnTouchListener mOnTouchListener;
private Window mWindow;//当前Activity 的窗口
/**
* 弹出PopWindow 背景是否变暗,默认不会变暗。
*/
private boolean mIsBackgroundDark = false;
private float mBackgroundDrakValue = 0;// 背景变暗的值,0 - 1
/**
* 设置是否允许点击 PopupWindow之外的地方,关闭PopupWindow
*/
private boolean enableOutsideTouchDisMiss = true;// 默认点击pop之外的地方可以关闭
private CustomPopWindow(Context context){
mContext = context;
}
public int getWidth() {
return mWidth;
}
public int getHeight() {
return mHeight;
}
/**
*
* @param anchor
* @param xOff
* @param yOff
* @return
*/
public CustomPopWindow showAsDropDown(View anchor, int xOff, int yOff){
if(mPopupWindow!=null){
mPopupWindow.showAsDropDown(anchor,xOff,yOff);
}
return this;
}
public CustomPopWindow showAsDropDown(View anchor){
if(mPopupWindow!=null){
mPopupWindow.showAsDropDown(anchor);
}
return this;
}
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
public CustomPopWindow showAsDropDown(View anchor, int xOff, int yOff, int gravity){
if(mPopupWindow!=null){
mPopupWindow.showAsDropDown(anchor,xOff,yOff,gravity);
}
return this;
}
/**
* 相对于父控件的位置(通过设置Gravity.CENTER,下方Gravity.BOTTOM等 ),可以设置具体位置坐标
* @param parent 父控件
* @param gravity
* @param x the popup's x location offset
* @param y the popup's y location offset
* @return
*/
public CustomPopWindow showAtLocation(View parent, int gravity, int x, int y){
if(mPopupWindow!=null){
mPopupWindow.showAtLocation(parent,gravity,x,y);
}
return this;
}
/**
* 添加一些属性设置
* @param popupWindow
*/
private void apply(PopupWindow popupWindow){
popupWindow.setClippingEnabled(mClippEnable);
if(mIgnoreCheekPress){
popupWindow.setIgnoreCheekPress();
}
if(mInputMode!=-1){
popupWindow.setInputMethodMode(mInputMode);
}
if(mSoftInputMode!=-1){
popupWindow.setSoftInputMode(mSoftInputMode);
}
if(mOnDismissListener!=null){
popupWindow.setOnDismissListener(mOnDismissListener);
}
if(mOnTouchListener!=null){
popupWindow.setTouchInterceptor(mOnTouchListener);
}
popupWindow.setTouchable(mTouchable);
}
private PopupWindow build(){
if(mContentView == null){
mContentView = LayoutInflater.from(mContext).inflate(mResLayoutId,null);
}
// 2017.3.17 add
// 获取当前Activity的window
Activity activity = (Activity) mContentView.getContext();
if(activity!=null && mIsBackgroundDark){
//如果设置的值在0 - 1的范围内,则用设置的值,否则用默认值
final float alpha = (mBackgroundDrakValue > 0 && mBackgroundDrakValue < 1) ? mBackgroundDrakValue : DEFAULT_ALPHA;
mWindow = activity.getWindow();
WindowManager.LayoutParams params = mWindow.getAttributes();
params.alpha = alpha;
mWindow.addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
mWindow.setAttributes(params);
}
if(mWidth != 0 && mHeight!=0 ){
mPopupWindow = new PopupWindow(mContentView,mWidth,mHeight);
}else{
mPopupWindow = new PopupWindow(mContentView, ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
}
if(mAnimationStyle!=-1){
mPopupWindow.setAnimationStyle(mAnimationStyle);
}
apply(mPopupWindow);//设置一些属性
if(mWidth == 0 || mHeight == 0){
mPopupWindow.getContentView().measure(View.MeasureSpec.UNSPECIFIED, View.MeasureSpec.UNSPECIFIED);
//如果外面没有设置宽高的情况下,计算宽高并赋值
mWidth = mPopupWindow.getContentView().getMeasuredWidth();
mHeight = mPopupWindow.getContentView().getMeasuredHeight();
}
// 添加dissmiss 监听
mPopupWindow.setOnDismissListener(this);
//2017.6.27 add:fix 设置 setOutsideTouchable(false)点击外部取消的bug.
// 判断是否点击PopupWindow之外的地方关闭 popWindow
if(!enableOutsideTouchDisMiss){
//注意这三个属性必须同时设置,不然不能disMiss,以下三行代码在Android 4.4 上是可以,然后在Android 6.0以上,下面的三行代码就不起作用了,就得用下面的方法
mPopupWindow.setFocusable(true);
mPopupWindow.setOutsideTouchable(false);
mPopupWindow.setBackgroundDrawable(null);
//注意下面这三个是contentView 不是PopupWindow
mPopupWindow.getContentView().setFocusable(true);
mPopupWindow.getContentView().setFocusableInTouchMode(true);
mPopupWindow.getContentView().setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
mPopupWindow.dismiss();
return true;
}
return false;
}
});
//在Android 6.0以上 ,只能通过拦截事件来解决
mPopupWindow.setTouchInterceptor(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
final int x = (int) event.getX();
final int y = (int) event.getY();
if ((event.getAction() == MotionEvent.ACTION_DOWN)
&& ((x < 0) || (x >= mWidth) || (y < 0) || (y >= mHeight))) {
Log.e(TAG,"out side ");
Log.e(TAG,"width:"+mPopupWindow.getWidth()+"height:"+mPopupWindow.getHeight()+" x:"+x+" y :"+y);
return true;
} else if (event.getAction() == MotionEvent.ACTION_OUTSIDE) {
Log.e(TAG,"out side ...");
return true;
}
return false;
}
});
}else{
mPopupWindow.setFocusable(mIsFocusable);
mPopupWindow.setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
mPopupWindow.setOutsideTouchable(mIsOutside);
}
// update
mPopupWindow.update();
return mPopupWindow;
}
@Override
public void onDismiss() {
dissmiss();
}
/**
* 关闭popWindow
*/
public void dissmiss(){
if(mOnDismissListener!=null){
mOnDismissListener.onDismiss();
}
//如果设置了背景变暗,那么在dissmiss的时候需要还原
if(mWindow!=null){
WindowManager.LayoutParams params = mWindow.getAttributes();
params.alpha = 1.0f;
mWindow.setAttributes(params);
}
if(mPopupWindow!=null && mPopupWindow.isShowing()){
mPopupWindow.dismiss();
}
}
public PopupWindow getPopupWindow() {
return mPopupWindow;
}
public static class PopupWindowBuilder{
private CustomPopWindow mCustomPopWindow;
public PopupWindowBuilder(Context context){
mCustomPopWindow = new CustomPopWindow(context);
}
public PopupWindowBuilder size(int width,int height){
mCustomPopWindow.mWidth = width;
mCustomPopWindow.mHeight = height;
return this;
}
public PopupWindowBuilder setFocusable(boolean focusable){
mCustomPopWindow.mIsFocusable = focusable;
return this;
}
public PopupWindowBuilder setView(int resLayoutId){
mCustomPopWindow.mResLayoutId = resLayoutId;
mCustomPopWindow.mContentView = null;
return this;
}
public PopupWindowBuilder setView(View view){
mCustomPopWindow.mContentView = view;
mCustomPopWindow.mResLayoutId = -1;
return this;
}
public PopupWindowBuilder setOutsideTouchable(boolean outsideTouchable){
mCustomPopWindow.mIsOutside = outsideTouchable;
return this;
}
/**
* 设置弹窗动画
* @param animationStyle
* @return
*/
public PopupWindowBuilder setAnimationStyle(int animationStyle){
mCustomPopWindow.mAnimationStyle = animationStyle;
return this;
}
public PopupWindowBuilder setClippingEnable(boolean enable){
mCustomPopWindow.mClippEnable =enable;
return this;
}
public PopupWindowBuilder setIgnoreCheekPress(boolean ignoreCheekPress){
mCustomPopWindow.mIgnoreCheekPress = ignoreCheekPress;
return this;
}
public PopupWindowBuilder setInputMethodMode(int mode){
mCustomPopWindow.mInputMode = mode;
return this;
}
public PopupWindowBuilder setOnDissmissListener(PopupWindow.OnDismissListener onDissmissListener){
mCustomPopWindow.mOnDismissListener = onDissmissListener;
return this;
}
public PopupWindowBuilder setSoftInputMode(int softInputMode){
mCustomPopWindow.mSoftInputMode = softInputMode;
return this;
}
public PopupWindowBuilder setTouchable(boolean touchable){
mCustomPopWindow.mTouchable = touchable;
return this;
}
public PopupWindowBuilder setTouchIntercepter(View.OnTouchListener touchIntercepter){
mCustomPopWindow.mOnTouchListener = touchIntercepter;
return this;
}
/**
* 设置背景变暗是否可用
* @param isDark
* @return
*/
public PopupWindowBuilder enableBackgroundDark(boolean isDark){
mCustomPopWindow.mIsBackgroundDark = isDark;
return this;
}
/**
* 设置背景变暗的值
* @param darkValue
* @return
*/
public PopupWindowBuilder setBgDarkAlpha(float darkValue){
mCustomPopWindow.mBackgroundDrakValue = darkValue;
return this;
}
/**
* 设置是否允许点击 PopupWindow之外的地方,关闭PopupWindow
* @param disMiss
* @return
*/
public PopupWindowBuilder enableOutsideTouchableDissmiss(boolean disMiss){
mCustomPopWindow.enableOutsideTouchDisMiss = disMiss;
return this;
}
public CustomPopWindow create(){
//构建PopWindow
mCustomPopWindow.build();
return mCustomPopWindow;
}
}
}
<|start_filename|>app/src/main/java/com/example/zhouwei/simple/MainActivity.java<|end_filename|>
package com.example.zhouwei.simple;
import android.content.Context;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.AppCompatSeekBar;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.PopupWindow;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;
import com.example.zhouwei.library.CustomPopWindow;
import java.util.ArrayList;
import java.util.List;
public class MainActivity extends AppCompatActivity implements View.OnClickListener{
private TextView mButton1,mButton2,mButton3,mButton4,mButton5,mButton6,mButton7;
private CustomPopWindow mCustomPopWindow;
private CustomPopWindow mListPopWindow;
private AppCompatSeekBar mAppCompatSeekBar;
private CustomPopWindow mPopWindow;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mButton1 = (TextView) findViewById(R.id.button1);
mButton1.setOnClickListener(this);
mButton2 = (TextView) findViewById(R.id.button2);
mButton2.setOnClickListener(this);
mButton3 = (TextView) findViewById(R.id.button3);
mButton3.setOnClickListener(this);
mButton4 = (TextView) findViewById(R.id.button4);
mButton4.setOnClickListener(this);
mButton5 = (TextView) findViewById(R.id.button5);
mButton5.setOnClickListener(this);
mButton6 = (TextView) findViewById(R.id.button6);
mButton6.setOnClickListener(this);
mButton7 = (TextView) findViewById(R.id.button7);
mButton7.setOnClickListener(this);
mAppCompatSeekBar = (AppCompatSeekBar) findViewById(R.id.seek_bar);
mAppCompatSeekBar.setMax(100);
mAppCompatSeekBar.setProgress(100);
mAppCompatSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
float alpha = seekBar.getProgress() * 1.0f / 100 ;
if(alpha < 0.2){
alpha = 0.2f;
}
Window mWindow = getWindow();
WindowManager.LayoutParams params = mWindow.getAttributes();
params.alpha = alpha;
mWindow.setAttributes(params);
Log.e("zhouwei","progress:"+progress);
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
}
@Override
public void onClick(View v) {
switch (v.getId()){
case R.id.button1:
showPopBottom();
//test();
break;
case R.id.button2:
showPopTop();
break;
case R.id.button3:
showPopMenu();
break;
case R.id.button4:
showPopListView();
//showListView();
break;
case R.id.button5:
showPopTopWithDarkBg();
break;
case R.id.button6:
useInAndOutAnim();
break;
case R.id.button7:
touchOutsideDontDisMiss();
break;
}
}
private void showPopBottom(){
CustomPopWindow popWindow = new CustomPopWindow.PopupWindowBuilder(this)
.setView(R.layout.pop_layout1)
.setFocusable(true)
.setOutsideTouchable(true)
.create();
popWindow.showAsDropDown(mButton1,0,10);
}
/**
* 点击 PopupWindow 之外的地方不消失
*/
private void touchOutsideDontDisMiss(){
View view = LayoutInflater.from(this).inflate(R.layout.pop_layout_close,null);
View.OnClickListener listener = new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.e("FK","onClick.....");
mPopWindow.dissmiss();
}
};
view.findViewById(R.id.close_pop).setOnClickListener(listener);
mPopWindow = new CustomPopWindow.PopupWindowBuilder(this)
.setView(view)
.enableOutsideTouchableDissmiss(false)// 设置点击PopupWindow之外的地方,popWindow不关闭,如果不设置这个属性或者为true,则关闭
.create();
mPopWindow.showAsDropDown(mButton7,0,10);
}
private void test(){
LayoutInflater inflater = (LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View contentview = inflater.inflate(R.layout.pop_layout1, null);
final PopupWindow popupWindow = new PopupWindow(contentview, ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
//popupWindow
popupWindow.setFocusable(true);
popupWindow.setOutsideTouchable(false);
popupWindow.setBackgroundDrawable(null);
popupWindow.getContentView().setFocusable(true); // 这个很重要
popupWindow.getContentView().setFocusableInTouchMode(true);
popupWindow.getContentView().setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
popupWindow.dismiss();
return true;
}
return false;
}
});
popupWindow.showAsDropDown(mButton1, 0, 10);
}
private void showPopTop(){
CustomPopWindow popWindow = new CustomPopWindow.PopupWindowBuilder(this)
.setView(R.layout.pop_layout2)
.create();
popWindow .showAsDropDown(mButton2,0, - (mButton2.getHeight() + popWindow.getHeight()));
//popWindow.showAtLocation(mButton1, Gravity.NO_GRAVITY,0,0);
}
/**
* 显示PopupWindow 同时背景变暗
*/
private void showPopTopWithDarkBg(){
View contentView = LayoutInflater.from(this).inflate(R.layout.pop_menu,null);
//处理popWindow 显示内容
handleLogic(contentView);
//创建并显示popWindow
mCustomPopWindow= new CustomPopWindow.PopupWindowBuilder(this)
.setView(contentView)
.enableBackgroundDark(true) //弹出popWindow时,背景是否变暗
.setBgDarkAlpha(0.7f) // 控制亮度
.setOnDissmissListener(new PopupWindow.OnDismissListener() {
@Override
public void onDismiss() {
Log.e("TAG","onDismiss");
}
})
.create()
.showAsDropDown(mButton5,0,20);
}
private void useInAndOutAnim(){
CustomPopWindow popWindow = new CustomPopWindow.PopupWindowBuilder(this)
.setView(R.layout.pop_layout1)
.setFocusable(true)
.setOutsideTouchable(true)
.setAnimationStyle(R.style.CustomPopWindowStyle)
.create()
.showAsDropDown(mButton6,0,10);
}
private void showPopMenu(){
View contentView = LayoutInflater.from(this).inflate(R.layout.pop_menu,null);
//处理popWindow 显示内容
handleLogic(contentView);
//创建并显示popWindow
mCustomPopWindow= new CustomPopWindow.PopupWindowBuilder(this)
.setView(contentView)
.create()
.showAsDropDown(mButton3,0,20);
}
private void showPopListView(){
View contentView = LayoutInflater.from(this).inflate(R.layout.pop_list,null);
//处理popWindow 显示内容
handleListView(contentView);
//创建并显示popWindow
mListPopWindow= new CustomPopWindow.PopupWindowBuilder(this)
.setView(contentView)
.size(ViewGroup.LayoutParams.MATCH_PARENT,ViewGroup.LayoutParams.MATCH_PARENT)//显示大小
.create()
.showAsDropDown(mButton4,0,20);
}
private void handleListView(View contentView){
RecyclerView recyclerView = (RecyclerView) contentView.findViewById(R.id.recyclerView);
LinearLayoutManager manager = new LinearLayoutManager(this);
manager.setOrientation(LinearLayoutManager.VERTICAL);
recyclerView.setLayoutManager(manager);
MyAdapter adapter = new MyAdapter();
adapter.setData(mockData());
recyclerView.setAdapter(adapter);
adapter.notifyDataSetChanged();
}
private List<String> mockData(){
List<String> data = new ArrayList<>();
for (int i=0;i<100;i++){
data.add("Item:"+i);
}
return data;
}
/**
* 处理弹出显示内容、点击事件等逻辑
* @param contentView
*/
private void handleLogic(View contentView){
View.OnClickListener listener = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(mCustomPopWindow!=null){
mCustomPopWindow.dissmiss();
}
String showContent = "";
switch (v.getId()){
case R.id.menu1:
showContent = "点击 Item菜单1";
break;
case R.id.menu2:
showContent = "点击 Item菜单2";
break;
case R.id.menu3:
showContent = "点击 Item菜单3";
break;
case R.id.menu4:
showContent = "点击 Item菜单4";
break;
case R.id.menu5:
showContent = "点击 Item菜单5" ;
break;
}
Toast.makeText(MainActivity.this,showContent,Toast.LENGTH_SHORT).show();
}
};
contentView.findViewById(R.id.menu1).setOnClickListener(listener);
contentView.findViewById(R.id.menu2).setOnClickListener(listener);
contentView.findViewById(R.id.menu3).setOnClickListener(listener);
contentView.findViewById(R.id.menu4).setOnClickListener(listener);
contentView.findViewById(R.id.menu5).setOnClickListener(listener);
}
}
| lqjideos/CustomPopwindow |
<|start_filename|>src/Graph/Tree/Diameter/diameter.cpp<|end_filename|>
#include<iostream>
#include<vector>
#include<algorithm>
using namespace std;
vector<vector<int>> tree ;
vector<int> visited, toLeaf, path_length;
void dfs(int node){
visited[node] = true;
vector<int> length = {-1};
for(int child : tree[node]){
if(visited[child])
continue;
dfs(child);
toLeaf[node] = max(toLeaf[node], 1 + toLeaf[child]);
length.push_back(toLeaf[child]);
}
int n = length.size(), m = min((int)length.size(),2);
for(int i = 0; i < m; i++){
for(int j = i+1; j < n; j++){
if(length[i] < length[j])
swap(length[i], length[j]);
}
path_length[node] += length[i] + 1;
}
}
int main(){
int n;
cin >> n;
int m = n - 1;
tree.resize(n+1), toLeaf.resize(n+1,0), path_length.resize(n+1,0), visited.resize(n+1,false);
while(m--){
int x, y;
cin >> x >> y;
tree[x].push_back(y);
tree[y].push_back(x);
}
int root = 1;
dfs(root);
int diameter = *max_element(path_length.begin(), path_length.end());
cout << diameter << "\n";
return 0;
} | iammanish17/Algorithms |
<|start_filename|>src/main/java/io/reliza/versioning/Constants.java<|end_filename|>
/**
* Copyright 2019 Reliza Incorporated. Licensed under MIT License.
* https://reliza.io
*/
package io.reliza.versioning;
/**
*
* This class contains various constants used across the project.
*
*/
public class Constants {
private Constants () {}
public static final String SEMVER = "semver";
public static final String BASE_MODIFIER = "Snapshot";
public static final String MAVEN_STYLE_SNAPSHOT = "-SNAPSHOT";
}
<|start_filename|>src/main/java/io/reliza/versioning/VersionType.java<|end_filename|>
/**
* Copyright 2019 Reliza Incorporated. Licensed under MIT License.
* https://reliza.io
*/
package io.reliza.versioning;
/**
* We will use this enum to initialize some predefined recommended versions
*
*/
public enum VersionType {
CALVER_UBUNTU("YY.0M.Micro"),
CALVER_RELIZA("YYYY.0M.Calvermodifier.Micro+Metadata"),
CALVER_RELIZA_2020("YYYY.0M.Calvermodifier.Minor.Micro+Metadata"),
SEMVER_FULL_NOTATION("Major.Minor.Patch-Modifier+Metadata"),
SEMVER_SHORT_NOTATION("Major.Minor.Patch"),
FEATURE_BRANCH("Branch.Micro"),
FEATURE_BRANCH_CALVER("YYYY.0M.Branch.Micro")
;
private String schema;
private VersionType(String schema) {
this.schema = schema;
}
public String getSchema() {
return schema;
}
}
| jeehyun-lee/versioning |
<|start_filename|>src/components/main/Challenges/videoPart.java<|end_filename|>
int gcd(int a, int b){
if(b > a){
int temp = a;
a = b;
b = temp;
}
while(b != 0){
int temp = a;
a = b;
b = temp % b;
}
return a;
}
int[] videoPart(String part, String total) {
String[] partTime = part.split(":");
String[] totalTime = total.split(":");
int partTimeSeconds = Integer.parseInt(partTime[0]) * 3600 + Integer.parseInt(partTime[1]) * 60 + Integer.parseInt(partTime[2]);
int totalTimeSeconds = Integer.parseInt(totalTime[0]) * 3600 + Integer.parseInt(totalTime[1]) * 60 + Integer.parseInt(totalTime[2]);
int div = gcd(partTimeSeconds, totalTimeSeconds);
return new int[]{(partTimeSeconds / div), (totalTimeSeconds/div)};
}
<|start_filename|>src/components/main/Challenges/find_highest_and_lowest_element_of_a_matrix.cpp<|end_filename|>
#include<iostream>
using namespace std;
int main()
{
int m,n,a[10][10],i,j,high,low;
cout<<"Enter no. of rows and coloumns:";
cin>>m>>n;
cout<<"\nEnter matrix:\n";
for(i=0;i<m;++i)
{
for(j=0;j<n;++j)
cin>>a[i][j];
}
high=a[0][0];
low=a[0][0];
for(i=0;i<m;++i)
{
for(j=0;j<n;++j)
{
if(a[i][j]>high)
high=a[i][j];
else
if(a[i][j]<low)
low=a[i][j];
}
}
cout<<"\nHeighst Element:"<<high<<"\nLowest Element:"<<low<<"\n";
return 0;
}
// Output
// Enter no. of rows and columns:3
// 3
// Enter matrix:
// 3 5 9
// 15 6 0
// 12 4 7
// Highest Element:15
// Lowest Element:0
<|start_filename|>src/components/main/Challenges/phoneCall.java<|end_filename|>
int phoneCall(int min1, int min2_10, int min11, int s) {
double duration = 0;
if(s < min1)
return 0;
duration += 1;
s -= min1;
for(int i = 0; i < 9; i++){
if(s <= 0)
return (int)Math.floor(duration);
else if(s < min2_10){
duration += s / (min2_10 * 1.0);
s = 0;
}
else{
duration += 1;
s -= min2_10;
}
}
duration += s / (min11 * 1.0);
return (int)Math.floor(duration);
}
<|start_filename|>src/components/main/Challenges/companyBotStrategy.java<|end_filename|>
double companyBotStrategy(int[][] trainingData) {
int cnt = 0, sum = 0;
for(int[] values : trainingData){
int score = values[0];
int validity = values[1];
if(validity == 1){
sum += score;
cnt += 1;
}
}
if(sum == 0)
return 0.0;
else
return sum / (cnt * 1.0);
}
<|start_filename|>src/components/main/Challenges/fareEstimator.java<|end_filename|>
double[] fareEstimator(int ride_time, int ride_distance, double[] cost_per_minute, double[] cost_per_mile) {
double[] fares = new double[cost_per_minute.length];
for(int i = 0; i < cost_per_minute.length; i++){
fares[i] = cost_per_minute[i] * ride_time;
}
for(int i = 0; i < cost_per_minute.length; i++){
fares[i] += cost_per_mile[i] * ride_distance;
}
return fares;
}
<|start_filename|>src/components/main/Challenges/lineUp.java<|end_filename|>
int lineUp(String commands) {
char[] arr = commands.toCharArray();
int cnt = 0, same = 0;
for(int i = 0; i < arr.length; i++){
if(arr[i] == 'L' || arr[i] == 'R')
cnt += 1;
if(arr[i] == 'A')
cnt += 2;
if(cnt % 2 == 0)
same += 1;
}
return same;
}
<|start_filename|>src/components/main/Challenges/isAdmissibleOverpayment.java<|end_filename|>
boolean isAdmissibleOverpayment(double[] prices, String[] notes, double x) {
double instore = 0, instacart = 0;
for(int i = 0; i < notes.length; i++){
String[] temp = notes[i].split(" ");
if(temp[1].equals("higher")){
double percentage = Double.parseDouble(temp[0].replaceAll("%", ""));
instacart += prices[i];
instore = ( prices[i] * 100 / (100 - percentage));
}
if(temp[1].equals("lower")){
double percentage = Double.parseDouble(temp[0].replaceAll("%", ""));
instacart += prices[i];
instore += ( prices[i] * 100 / (100 - percentage));
}
System.out.println(instacart + " "+ instore);
}
return x >= instacart - instore;
}
| riju234/CodeSignal-Practice_Solutions |
<|start_filename|>connectors/jk/jkstatus/src/share/org/apache/jk/status/package.html<|end_filename|>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<body>
<p>This package contains a set of <code>Task</code> implementations for
<em>Ant (version 1.6.x or later)</em> that can be used to interact with the
Apaache mod_jk status page to show, update, disable and stop mod_jk worker.
For more information, see
<a href="http://tomcat.apache.org/connectors-doc/index.html"><strong>JK Documenation</strong></a>.</p>
<p>The attributes of each task element correspond
exactly to the request parameters that are included with an HTTP request
sent directly to jk status page. They are summarized as follows:
</p>
<b>General parameter</b><br/>
<table>
<tr>
<th align="center" width="15%">Attribute</th>
<th align="center" width="85%">Description</th>
</tr>
<tr>
<td align="center">url</td>
<td>
The URL of the jk status page you will use to
perform the requested operations. If not specified, defaults to
<code>http://localhost:80/jkstatus</code> (which corresponds
to a standard installation of Apache mod_jk).
</td>
</tr>
<tr>
<td align="center">username</td>
<td>
The username of a mod_jk status user that has been configured with the
<code>Allow user</code> Apache Location constraint. This attribute is optional.
</td>
</tr>
<tr>
<td align="center">password</td>
<td>
The password of a mod_jk status user that has been configured with the
<code>Allow user</code> Apache Location constraint. This attribute is optional.
</td>
</tr>
<tr>
<td align="center">resultProperty</td>
<td>
Bind all show results with this prefix property name. This attribute is optional.
</td>
</tr>
<tr>
<td align="center">echo</td>
<td>
show result at ant console. (default false)
</td>
</tr>
<tr>
<td align="center">errorProperty</td>
<td>
set this property, as a failure detected. This attribute is optional.
</td>
</tr>
</table>
<b>Command show parameter</b><br/>
<table>
<tr>
<th align="center" width="15%">Attribute</th>
<th align="center" width="85%">Description</th>
</tr>
<tr>
<td align="center">worker</td>
<td>
only bind properties from this balancer tcp worker (node)
</td>
</tr>
<tr>
<td align="center">loadbalancer</td>
<td>
only bind properties from this loadbalancer worker
</td>
</tr>
</table>
<b>Command reset parameter</b><br/>
<table>
<tr>
<th align="center" width="15%">Attribute</th>
<th align="center" width="85%">Description</th>
</tr>
<tr>
<td align="center">workerLb</td>
<td>
name of loadbalancer worker.
</td>
</tr>
</table>
<b>Command update loadbalancer parameter</b><br/>
<table>
<tr>
<th align="center" width="15%">Attribute</th>
<th align="center" width="85%">Description</th>
</tr>
<tr>
<td align="center">workerType=loadbalancer</td>
<td>
type of update
</td>
</tr>
<tr>
<td align="center">workerLb</td>
<td>
name of loadbalancer worker.
</td>
</tr>
<tr>
<td align="center">lbForceSession</td>
<td>
Force Sticky Session. (true/false)
</td>
</tr>
<tr>
<td align="center">lbStickySession</td>
<td>
Sticky Session. (true/false)
</td>
</tr>
<tr>
<td align="center">lbRetries</td>
<td>
loadbalancer retries after worker connection failure (int)
</td>
</tr>
<tr>
<td align="center">lbRecovertime</td>
<td>
Recover timeout after a worker set to "error" state (int sec's)
</td>
</tr>
</table>
<b>Command update worker parameter</b><br/>
<table>
<tr>
<th align="center" width="15%">Attribute</th>
<th align="center" width="85%">Description</th>
</tr>
<tr>
<td align="center">workerType=worker</td>
<td>
type of update
</td>
</tr>
<tr>
<td align="center">worker</td>
<td>
name of tcp worker.
</td>
</tr>
<tr>
<td align="center">workerActivation (>=1.2.19</td>
<td>
set worker activation (1 Active, 2 Disabled, 3 Stopped)
</td>
</tr>
<tr>
<td align="center">workerDisabled (< 1.2.19)</td>
<td>
set disable state. (true/false)
</td>
</tr>
<tr>
<td align="center">workerStoppend (< 1.2.19)</td>
<td>
set stopped state. (true/false)
</td>
</tr>
<tr>
<td align="center">workerJvmRoute</td>
<td>
set jvm route
</td>
</tr>
<tr>
<td align="center">workerLaodFactor</td>
<td>
set load factor (int)
</td>
</tr>
<tr>
<td align="center">workerDistance</td>
<td>
set worker distance (int)
</td>
<tr>
<td align="center">workerRedirect</td>
<td>
other worker name to redirect after failure
</td>
</tr>
<tr>
<td align="center">workerClusterDomain</td>
<td>
cluster domain name, group of worker at a repliation cluster.
</td>
</tr>
</table>
</body>
<|start_filename|>connectors/jk/native/scripts/build/rules.mk<|end_filename|>
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# That an extract of what is in APR.
#
# Compile commands
#VPATH=.:../common
COMPILE = $(CC) $(CFLAGS)
LT_COMPILE = $(LIBTOOL) --mode=compile $(COMPILE) -c $< -o $@
# Implicit rules for creating outputs from input files
.SUFFIXES:
.SUFFIXES: .c .lo .o .slo .s
.c.o:
$(COMPILE) -c $<
.s.o:
$(COMPILE) -c $<
.c.lo:
$(LT_COMPILE)
.s.lo:
$(LT_COMPILE)
.c.slo:
$(SH_COMPILE)
<|start_filename|>container/tester/web/Include06.jsp<|end_filename|>
<%@ page contentType="text/plain" %>==========
<jsp:include page="servlet/org.apache.tester.Include06a" flush="false"/>==========
<jsp:include page="/Include06b.jsp"/>==========
<|start_filename|>connectors/jk/native/common/ap_snprintf.h<|end_filename|>
/* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* The ap_vsnprintf/ap_snprintf functions are based on, and used with the
* permission of, the SIO stdio-replacement strx_* functions by Panos
<NAME> <<EMAIL>> for xinetd.
*/
#ifndef APACHE_AP_SNPRINTF_H
#define APACHE_AP_SNPRINTF_H
#include <stdio.h>
#include <stdarg.h>
#include <limits.h>
#ifdef __cplusplus
extern "C" {
#endif
/* stuff marked API_EXPORT is part of the API, and intended for use
* by modules
*/
#ifndef API_EXPORT
#define API_EXPORT(type) type
#endif
/* Stuff marked API_EXPORT_NONSTD is part of the API, and intended for
* use by modules. The difference between API_EXPORT and
* API_EXPORT_NONSTD is that the latter is required for any functions
* which use varargs or are used via indirect function call. This
* is to accomodate the two calling conventions in windows dlls.
*/
#ifndef API_EXPORT_NONSTD
#define API_EXPORT_NONSTD(type) type
#endif
#if !defined(__GNUC__) || __GNUC__ < 2 || \
(__GNUC__ == 2 && __GNUC_MINOR__ < 7) ||\
defined(NEXT)
#define __attribute__(__x)
#endif
/* These macros allow correct support of 8-bit characters on systems which
* support 8-bit characters. Pretty dumb how the cast is required, but
* that's legacy libc for ya. These new macros do not support EOF like
* the standard macros do. Tough.
*/
#define ap_isalpha(c) (isalpha(((unsigned char)(c))))
#define ap_isdigit(c) (isdigit(((unsigned char)(c))))
#define ap_islower(c) (islower(((unsigned char)(c))))
/* ap_vformatter() is a generic printf-style formatting routine
* with some extensions. The extensions are:
*
* %pA takes a struct in_addr *, and prints it as a.b.c.d
* %pI takes a struct sockaddr_in * and prints it as a.b.c.d:port
* %pp takes a void * and outputs it in hex
*
* The %p hacks are to force gcc's printf warning code to skip
* over a pointer argument without complaining. This does
* mean that the ANSI-style %p (output a void * in hex format) won't
* work as expected at all, but that seems to be a fair trade-off
* for the increased robustness of having printf-warnings work.
*
* Additionally, ap_vformatter allows for arbitrary output methods
* using the ap_vformatter_buff and flush_func.
*
* The ap_vformatter_buff has two elements curpos and endpos.
* curpos is where ap_vformatter will write the next byte of output.
* It proceeds writing output to curpos, and updating curpos, until
* either the end of output is reached, or curpos == endpos (i.e. the
* buffer is full).
*
* If the end of output is reached, ap_vformatter returns the
* number of bytes written.
*
* When the buffer is full, the flush_func is called. The flush_func
* can return -1 to indicate that no further output should be attempted,
* and ap_vformatter will return immediately with -1. Otherwise
* the flush_func should flush the buffer in whatever manner is
* appropriate, re-initialize curpos and endpos, and return 0.
*
* Note that flush_func is only invoked as a result of attempting to
* write another byte at curpos when curpos >= endpos. So for
* example, it's possible when the output exactly matches the buffer
* space available that curpos == endpos will be true when
* ap_vformatter returns.
*
* ap_vformatter does not call out to any other code, it is entirely
* self-contained. This allows the callers to do things which are
* otherwise "unsafe". For example, ap_psprintf uses the "scratch"
* space at the unallocated end of a block, and doesn't actually
* complete the allocation until ap_vformatter returns. ap_psprintf
* would be completely broken if ap_vformatter were to call anything
* that used a pool. Similarly http_bprintf() uses the "scratch"
* space at the end of its output buffer, and doesn't actually note
* that the space is in use until it either has to flush the buffer
* or until ap_vformatter returns.
*/
typedef struct {
char *curpos;
char *endpos;
} ap_vformatter_buff;
API_EXPORT(int) ap_vformatter(int (*flush_func)(ap_vformatter_buff *),
ap_vformatter_buff *, const char *fmt, va_list ap);
/* These are snprintf implementations based on ap_vformatter().
*
* Note that various standards and implementations disagree on the return
* value of snprintf, and side-effects due to %n in the formatting string.
* ap_snprintf behaves as follows:
*
* Process the format string until the entire string is exhausted, or
* the buffer fills. If the buffer fills then stop processing immediately
* (so no further %n arguments are processed), and return the buffer
* length. In all cases the buffer is NUL terminated. The return value
* is the number of characters placed in the buffer, excluding the
* terminating NUL. All this implies that, at most, (len-1) characters
* will be copied over; if the return value is >= len, then truncation
* occured.
*
* In no event does ap_snprintf return a negative number.
*/
API_EXPORT_NONSTD(int) ap_snprintf(char *buf, size_t len, const char *format,...)
__attribute__((format(printf,3,4)));
API_EXPORT(int) ap_vsnprintf(char *buf, size_t len, const char *format,
va_list ap);
#ifdef __cplusplus
}
#endif
#endif /* !APACHE_AP_SNPRINTF_H */
<|start_filename|>container/tester/web/Forward04b.jsp<|end_filename|>
<%@ page contentType="text/plain" %>Forward04b.jsp PASSED
<|start_filename|>container/tester/web/Include05.jsp<|end_filename|>
<%@ page contentType="text/plain" %><jsp:include page="/Include05a.jsp" flush="false"/><jsp:forward page="/Include05b.jsp"/>
<|start_filename|>connectors/jk/jkstatus/test/src/share/org/apache/jk/status/JkStatusParserTest.java<|end_filename|>
/* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jk.status;
import java.io.IOException;
import java.io.StringReader;
import junit.framework.TestCase;
import org.apache.tomcat.util.digester.Digester;
import org.xml.sax.SAXException;
/**
* @author <NAME>
*
*/
public class JkStatusParserTest extends TestCase {
public void testDigester() throws IOException, SAXException {
Digester digester = JkStatusParser.createDigester();
String example = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>"
+ "<jk:status xmlns:jk=\"http://tomcat.apache.org\">"
+ "<jk:server name=\"localhost\" port=\"80\" software=\"Apache/2.0.58 (Unix) mod_jk/1.2.19\" version=\"1.2.19\" />"
+ "<jk:balancers>"
+ "<jk:balancer id=\"0\" name=\"loadbalancer\" type=\"lb\" sticky=\"True\" stickyforce=\"False\" retries=\"2\" recover=\"60\" >"
+ "<jk:member id=\"0\" name=\"node1\" type=\"ajp13\" host=\"localhost\" port=\"9012\" address=\"127.0.0.1:9012\" activation=\"ACT\" state=\"OK/IDLE\" distance=\"0\" lbfactor=\"1\" lbmult=\"1\" lbvalue=\"0\" elected=\"0\" errors=\"0\" transferred=\"0\" readed=\"0\" busy=\"0\" maxbusy=\"0\" jvm_route=\"node1\" />"
+ "<jk:member id=\"0\" name=\"node2\" type=\"ajp13\" host=\"localhost\" port=\"9022\" address=\"127.0.0.1:9022\" activation=\"ACT\" state=\"OK/IDLE\" distance=\"0\" lbfactor=\"1\" lbmult=\"1\" lbvalue=\"0\" elected=\"0\" errors=\"0\" transferred=\"0\" readed=\"0\" busy=\"0\" maxbusy=\"0\" jvm_route=\"node2\" />"
+ "<jk:map type=\"Wildchar\" uri=\"/ClusterTest/*\" context=\"/ClusterTest/*\" />"
+ "<jk:map type=\"Exact\" uri=\"/ClusterTest\" context=\"/ClusterTest\" />"
+ "<jk:map type=\"Wildchar\" uri=\"/myapps/*\" context=\"/myapps/*\" />"
+ "<jk:map type=\"Exact\" uri=\"/myapps\" context=\"/myapps\" />"
+ "</jk:balancer>"
+ "</jk:balancers>"
+ "</jk:status>" ;
StringReader reader = new StringReader(example);
JkStatus status = (JkStatus) digester
.parse(reader);
assertNotNull(status);
assertNotNull(status.getServer());
assertEquals(1,status.getBalancers().size());
JkBalancer balancer = (JkBalancer)status.getBalancers().get(0);
assertEquals(2,balancer.getBalancerMembers().size());
assertEquals("node1",((JkBalancerMember)balancer.getBalancerMembers().get(0)).getName());
assertEquals("node2",((JkBalancerMember)balancer.getBalancerMembers().get(1)).getName());
assertEquals(4,balancer.getBalancerMappings().size());
}
}
<|start_filename|>container/tester/web/Encoding02.jsp<|end_filename|>
<html>
<head>
<title>Encoding02.jsp</title>
</head>
<body bgcolor="white">
This is legal in the spec:<br>
<%= "Joe said %\> foo" %>.
</body>
</html>
<|start_filename|>container/tester/web/Encoding03.jsp<|end_filename|>
<html>
<head>
<title>Encoding03.jsp</title>
</head>
<body bgcolor="white">
This is not recognized as a delimiter either:<br>
<%= "Joe said %\\> bar" %>.
</body>
</html>
<|start_filename|>container/tester/web/ErrorPage06.jsp<|end_filename|>
<%@ page contentType="text/plain" %><%
// Accumulate all the reasons this request might fail
StringBuffer sb = new StringBuffer();
Object value = null;
value = request.getAttribute("javax.servlet.error.exception");
if (value == null) {
sb.append(" exception is missing/");
} else if (!(value instanceof java.lang.ArrayIndexOutOfBoundsException)) {
sb.append(" exception class is ");
sb.append(value.getClass().getName());
sb.append("/");
}
value = request.getAttribute("javax.servlet.error.exception_type");
if (value == null)
sb.append(" exception_type is missing/");
else if (!(value instanceof Class)) {
sb.append(" exception_type class is ");
sb.append(value.getClass().getName());
sb.append("/");
} else {
Class clazz = (Class) value;
String name = clazz.getName();
if (!"java.lang.ArrayIndexOutOfBoundsException".equals(name)) {
sb.append(" exception_type is ");
sb.append(name);
sb.append("/");
}
}
value = request.getAttribute("javax.servlet.error.message");
if (value == null)
sb.append(" message is missing/");
else if (!(value instanceof String)) {
sb.append(" message class is ");
sb.append(value.getClass().getName());
sb.append("/");
} else if (!"ErrorPage05 Threw ArrayIndexOutOfBoundsException".equals(value) &&
!"ErrorPage08 Threw ArrayIndexOutOfBoundsException".equals(value)) {
sb.append(" message is not correct");
}
value = request.getAttribute("javax.servlet.error.request_uri");
if (value == null)
sb.append(" request_uri is missing/");
else if (!(value instanceof String)) {
sb.append(" request_uri class is ");
sb.append(value.getClass().getName());
sb.append("/");
} else {
String request_uri = (String) value;
String test1 = request.getContextPath() + "/ErrorPage05";
String test2 = request.getContextPath() + "/WrappedErrorPage05";
String test3 = request.getContextPath() + "/ErrorPage08";
String test4 = request.getContextPath() + "/WrappedErrorPage08";
if (!request_uri.equals(test1) && !request_uri.equals(test2) &&
!request_uri.equals(test3) && !request_uri.equals(test4)) {
sb.append(" request_uri is ");
sb.append(request_uri);
sb.append("/");
}
}
value = request.getAttribute("javax.servlet.error.servlet_name");
if (value == null)
sb.append(" servlet_name is missing/");
else if (!(value instanceof String)) {
sb.append(" servlet_name class is ");
sb.append(value.getClass().getName());
sb.append("/");
} else {
String servlet_name = (String) value;
if (!"ErrorPage05".equals(servlet_name) &&
!"ErrorPage08".equals(servlet_name)) {
sb.append(" servlet_name is ");
sb.append(servlet_name);
sb.append("/");
}
}
// Report ultimate success or failure
if (sb.length() < 1)
out.println("ErrorPage06 PASSED - JSP");
else
out.println("ErrorPage06 FAILED -" + sb.toString());
%>
<%
Exception e = (Exception)
request.getAttribute("javax.servlet.error.exception");
out.println("EXCEPTION: " + e);
Class et = (Class)
request.getAttribute("javax.servlet.error.exception_type");
out.println("EXCEPTION_TYPE: " + et.getName());
String m = (String)
request.getAttribute("javax.servlet.error.message");
out.println("MESSAGE: " + m);
String ru = (String)
request.getAttribute("javax.servlet.error.request_uri");
out.println("REQUEST_URI: " + ru);
String sn = (String)
request.getAttribute("javax.servlet.error.servlet_name");
out.println("SERVLET_NAME: " + sn);
%>
<|start_filename|>container/tester/web/ResponseWrap01b.jsp<|end_filename|>
<%@ page contentType="text/plain"%>ResponseWrap01b PASSED
<|start_filename|>container/tester/web/ErrorPage08.jsp<|end_filename|>
<%@ page contentType="text/plain" %><%
// Write a FAILED message that should get replaced by the error text
out.println("ErrorPage08 FAILED - Original response returned");
// Throw the specified exception
String type = request.getParameter("type");
if ("Arithmetic".equals(type)) {
throw new ArithmeticException
("ErrorPage08 Threw ArithmeticException");
} else if ("Array".equals(type)) {
throw new ArrayIndexOutOfBoundsException
("ErrorPage08 Threw ArrayIndexOutOfBoundsException");
} else if ("Number".equals(type)) {
throw new NumberFormatException
("ErrorPage08 Threw NumberFormatException");
}
%>
<|start_filename|>container/tester/web/ErrorPage10.jsp<|end_filename|>
<%@ page contentType="text/plain" isErrorPage="true" %><%
// Accumulate all the reasons this request might fail
StringBuffer sb = new StringBuffer();
Object value = null;
if (exception == null) {
sb.append(" exception is missing/");
} else {
if (!(exception instanceof java.lang.ArrayIndexOutOfBoundsException)) {
sb.append(" exception class is ");
sb.append(exception.getClass().getName());
sb.append("/");
}
if (!"ErrorPage09 Threw ArrayIndexOutOfBoundsException".equals(exception.getMessage())) {
sb.append(" exception message is ");
sb.append(exception.getMessage());
sb.append("/");
}
}
// Report ultimate success or failure
if (sb.length() < 1)
out.println("ErrorPage10 PASSED");
else
out.println("ErrorPage10 FAILED -" + sb.toString());
%>
<%
out.println("EXCEPTION: " + exception);
%>
<|start_filename|>container/tester/web/Xerces00.jsp<|end_filename|>
<!-- This File is generated automatically by jsp2XML converter tool -->
<!-- Written By <NAME>/<NAME> -->
<jsp:root
xmlns:jsp="http://java.sun.com/JSP/Page" version="1.2"
><jsp:directive.page contentType="text/plain"
/><jsp:text><![CDATA[Xerces00 PASSED]]></jsp:text>
</jsp:root>
<|start_filename|>container/tester/web/JspParams02.jsp<|end_filename|>
<jsp:forward page="JspParams01a.jsp">
<jsp:params>
<jsp:param name="foo" value="bar"/>
</jsp:params>
</jsp:forward>
<|start_filename|>container/tester/web/WrappedFilterResponse02.jsp<|end_filename|>
<%@ page contentType="text/plain" %>FilterResponse02 PASSED
<|start_filename|>container/tester/web/Include03b.jsp<|end_filename|>
<% request.setAttribute("Include03b.jsp", "This is a new attribute"); %>
<|start_filename|>container/tester/web/Xerces02.jsp<|end_filename|>
<%@ page contentType="text/plain"%>Xerces02 PASSED
<|start_filename|>container/tester/web/Forward05.jsp<|end_filename|>
<%@ page contentType="text/plain" %><jsp:forward page="/Forward05a"/>Forward05.jsp FAILED - Content should not be visible
<|start_filename|>connectors/jk/support/get_ver.awk<|end_filename|>
BEGIN {
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# fetch mod_jk version numbers from input file and writes them to STDOUT
while ((getline < ARGV[1]) > 0) {
if (match ($0, /^#define JK_VERMAJOR [^"]+/)) {
jk_ver_major = substr($3, 1, length($3));
}
else if (match ($0, /^#define JK_VERMINOR [^"]+/)) {
jk_ver_minor = substr($3, 1, length($3));
}
else if (match ($0, /^#define JK_VERFIX [^"]+/)) {
jk_ver_fix = substr($3, 1, length($3));
}
else if (match ($0, /^#define JK_VERISRELEASE [^"]+/)) {
jk_ver_isrelease = substr($3, 1, length($3));
}
else if (match ($0, /^#define JK_VERBETA [^"]+/)) {
jk_ver_isbeta = substr($3, 1, length($3));
}
else if (match ($0, /^#define JK_BETASTRING [^"]+/)) {
jk_ver_betastr = substr($3, 2, length($3) - 2);
}
}
jk_ver = jk_ver_major "," jk_ver_minor "," jk_ver_fix;
jk_ver_str = jk_ver_major "." jk_ver_minor "." jk_ver_fix;
if (jk_ver_isrelease != 1) {
jk_ver_str = jk_ver_str "-dev";
}
if (jk_ver_isbeta == 1) {
jk_ver_str = jk_ver_str "-beta-" jk_ver_betastr;
}
# fetch Apache version numbers from input file and writes them to STDOUT
if (ARGV[2]) {
if (match (ARGV[2], /ap_release.h/)) {
while ((getline < ARGV[2]) > 0) {
if (match ($0, /^#define AP_SERVER_MAJORVERSION "[^"]+"/)) {
ap_ver_major = substr($3, 2, length($3) - 2);
}
else if (match ($0, /^#define AP_SERVER_MINORVERSION "[^"]+"/)) {
ap_ver_minor = substr($3, 2, length($3) - 2);
}
else if (match ($0, /^#define AP_SERVER_PATCHLEVEL/)) {
ap_ver_str_patch = substr($3, 2, length($3) - 2);
if (match (ap_ver_str_patch, /[0-9][0-9]*/)) {
ap_ver_patch = substr(ap_ver_str_patch, RSTART, RLENGTH);
}
}
}
ap_ver_str = ap_ver_major "." ap_ver_minor "." ap_ver_str_patch;
}
if (match (ARGV[2], /httpd.h/)) {
while ((getline < ARGV[2]) > 0) {
if (match ($0, /^#define SERVER_BASEREVISION "[^"]+"/)) {
ap_ver_str = substr($3, 2, length($3) - 2);
}
}
}
print "AP_VERSION_STR = " ap_ver_str "";
}
print "JK_VERSION = " jk_ver "";
print "JK_VERSION_STR = " jk_ver_str "";
}
| jinbaizhe/tomcat-4 |
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/model/item/ItemModelProvider.java<|end_filename|>
package net.modificationstation.stationapi.api.client.model.item;
import net.minecraft.item.ItemInstance;
import net.modificationstation.stationapi.api.client.model.Model;
import net.modificationstation.stationapi.impl.client.model.BakedModelRenderer;
public interface ItemModelProvider extends ItemWithRenderer {
Model getModel(ItemInstance itemInstance);
@Override
default void render(ItemInstance itemInstance) {
BakedModelRenderer.renderInventory(getModel(itemInstance).getBaked());
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/model/BakedModel.java<|end_filename|>
package net.modificationstation.stationapi.api.client.model;
import com.google.common.collect.ImmutableList;
import net.minecraft.level.BlockView;
import net.minecraft.util.Vec3i;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.util.math.Direction;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public interface BakedModel {
ImmutableList<Vertex> getVertexes(@Nullable BlockView blockView, @Nullable Vec3i blockPos, @Nullable Direction face, Random random);
boolean useAmbientOcclusion();
boolean hasDepth();
boolean isSideLit();
boolean isBuiltin();
Atlas.Sprite getSprite();
ModelTransformation getTransformation();
ModelOverrideList getOverrides();
}
<|start_filename|>src/testmod/java/net/modificationstation/sltest/block/Blocks.java<|end_filename|>
package net.modificationstation.sltest.block;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.minecraft.block.BlockBase;
import net.minecraft.block.material.Material;
import net.modificationstation.stationapi.api.event.registry.BlockRegistryEvent;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.api.template.block.BlockTemplate;
import net.modificationstation.stationapi.api.template.block.TemplateBlockBase;
import java.util.*;
import java.util.function.*;
import static net.modificationstation.sltest.SLTest.MODID;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
public enum Blocks {
TEST_BLOCK("test_block", "testBlock", id -> new TemplateBlockBase(id, Material.CLAY).setHardness(1)),
TEST_ANIMATED_BLOCK("test_animated_block", "testAnimatedBlock", id -> new ModdedMetaBlock(id, Material.PORTAL)),
CUSTOM_MODEL_BLOCK("farlands_block", "farlands_block", id -> new ModdedModelBlock(id, Material.DIRT).setHardness(1)),
FREEZER("freezer", "freezer", id -> new BlockFreezer(id).setHardness(2.5F).setSounds(TemplateBlockBase.STONE_SOUNDS)),
ALTAR("altar", "altar", id -> new BlockAltar(id, Material.STONE).setHardness(3));
private final Runnable register;
private BlockBase block;
<T extends BlockBase & BlockTemplate<T>> Blocks(String id, String translationKey, Function<Identifier, T> factory) {
this.register = () -> block = factory.apply(of(MODID, id)).setTranslationKey(MODID, translationKey);
}
public <T extends BlockBase & BlockTemplate<? super T>> T get() {
//noinspection unchecked
return (T) block;
}
public <T extends BlockBase & BlockTemplate<? super T>> T get(@SuppressWarnings("unused") Class<T> type) {
//noinspection unchecked
return (T) block;
}
public static class Init {
@EventListener
private static void registerBlocks(BlockRegistryEvent event) {
Arrays.stream(values()).forEach(blocks -> blocks.register.run());
}
}
}
<|start_filename|>station-blocks-v0/src/main/java/net/modificationstation/stationapi/impl/block/BlockRegistryInit.java<|end_filename|>
package net.modificationstation.stationapi.impl.block;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.mine_diver.unsafeevents.listener.ListenerPriority;
import net.modificationstation.stationapi.api.event.registry.BlockRegistryEvent;
import net.modificationstation.stationapi.api.mod.entrypoint.Entrypoint;
import net.modificationstation.stationapi.api.mod.entrypoint.EventBusPolicy;
import net.modificationstation.stationapi.api.registry.BlockRegistry;
import static net.minecraft.block.BlockBase.BED;
import static net.minecraft.block.BlockBase.BEDROCK;
import static net.minecraft.block.BlockBase.BOOKSHELF;
import static net.minecraft.block.BlockBase.BRICKS;
import static net.minecraft.block.BlockBase.BROWN_MUSHROOM;
import static net.minecraft.block.BlockBase.BUTTON;
import static net.minecraft.block.BlockBase.CACTUS;
import static net.minecraft.block.BlockBase.CAKE;
import static net.minecraft.block.BlockBase.CHEST;
import static net.minecraft.block.BlockBase.CLAY;
import static net.minecraft.block.BlockBase.COAL_ORE;
import static net.minecraft.block.BlockBase.COBBLESTONE;
import static net.minecraft.block.BlockBase.COBBLESTONE_STAIRS;
import static net.minecraft.block.BlockBase.COBWEB;
import static net.minecraft.block.BlockBase.CROPS;
import static net.minecraft.block.BlockBase.DANDELION;
import static net.minecraft.block.BlockBase.DEADBUSH;
import static net.minecraft.block.BlockBase.DETECTOR_RAIL;
import static net.minecraft.block.BlockBase.DIAMOND_BLOCK;
import static net.minecraft.block.BlockBase.DIAMOND_ORE;
import static net.minecraft.block.BlockBase.DIRT;
import static net.minecraft.block.BlockBase.DISPENSER;
import static net.minecraft.block.BlockBase.DOUBLE_STONE_SLAB;
import static net.minecraft.block.BlockBase.FARMLAND;
import static net.minecraft.block.BlockBase.FENCE;
import static net.minecraft.block.BlockBase.FIRE;
import static net.minecraft.block.BlockBase.FLOWING_LAVA;
import static net.minecraft.block.BlockBase.FLOWING_WATER;
import static net.minecraft.block.BlockBase.FURNACE;
import static net.minecraft.block.BlockBase.FURNACE_LIT;
import static net.minecraft.block.BlockBase.GLASS;
import static net.minecraft.block.BlockBase.GLOWSTONE;
import static net.minecraft.block.BlockBase.GOLDEN_RAIL;
import static net.minecraft.block.BlockBase.GOLD_BLOCK;
import static net.minecraft.block.BlockBase.GOLD_ORE;
import static net.minecraft.block.BlockBase.GRASS;
import static net.minecraft.block.BlockBase.GRAVEL;
import static net.minecraft.block.BlockBase.ICE;
import static net.minecraft.block.BlockBase.IRON_BLOCK;
import static net.minecraft.block.BlockBase.IRON_DOOR;
import static net.minecraft.block.BlockBase.IRON_ORE;
import static net.minecraft.block.BlockBase.JACK_O_LANTERN;
import static net.minecraft.block.BlockBase.JUKEBOX;
import static net.minecraft.block.BlockBase.LADDER;
import static net.minecraft.block.BlockBase.LAPIS_LAZULI_BLOCK;
import static net.minecraft.block.BlockBase.LAPIS_LAZULI_ORE;
import static net.minecraft.block.BlockBase.LEAVES;
import static net.minecraft.block.BlockBase.LEVER;
import static net.minecraft.block.BlockBase.LOCKED_CHEST;
import static net.minecraft.block.BlockBase.LOG;
import static net.minecraft.block.BlockBase.MOB_SPAWNER;
import static net.minecraft.block.BlockBase.MOSSY_COBBLESTONE;
import static net.minecraft.block.BlockBase.MOVING_PISTON;
import static net.minecraft.block.BlockBase.NETHERRACK;
import static net.minecraft.block.BlockBase.NOTEBLOCK;
import static net.minecraft.block.BlockBase.OBSIDIAN;
import static net.minecraft.block.BlockBase.PISTON;
import static net.minecraft.block.BlockBase.PISTON_HEAD;
import static net.minecraft.block.BlockBase.PORTAL;
import static net.minecraft.block.BlockBase.PUMPKIN;
import static net.minecraft.block.BlockBase.RAIL;
import static net.minecraft.block.BlockBase.REDSTONE_DUST;
import static net.minecraft.block.BlockBase.REDSTONE_ORE;
import static net.minecraft.block.BlockBase.REDSTONE_ORE_LIT;
import static net.minecraft.block.BlockBase.REDSTONE_REPEATER;
import static net.minecraft.block.BlockBase.REDSTONE_REPEATER_LIT;
import static net.minecraft.block.BlockBase.REDSTONE_TORCH;
import static net.minecraft.block.BlockBase.REDSTONE_TORCH_LIT;
import static net.minecraft.block.BlockBase.RED_MUSHROOM;
import static net.minecraft.block.BlockBase.ROSE;
import static net.minecraft.block.BlockBase.SAND;
import static net.minecraft.block.BlockBase.SANDSTONE;
import static net.minecraft.block.BlockBase.SAPLING;
import static net.minecraft.block.BlockBase.SNOW;
import static net.minecraft.block.BlockBase.SNOW_BLOCK;
import static net.minecraft.block.BlockBase.SOUL_SAND;
import static net.minecraft.block.BlockBase.SPONGE;
import static net.minecraft.block.BlockBase.STANDING_SIGN;
import static net.minecraft.block.BlockBase.STICKY_PISTON;
import static net.minecraft.block.BlockBase.STILL_LAVA;
import static net.minecraft.block.BlockBase.STILL_WATER;
import static net.minecraft.block.BlockBase.STONE;
import static net.minecraft.block.BlockBase.STONE_PRESSURE_PLATE;
import static net.minecraft.block.BlockBase.STONE_SLAB;
import static net.minecraft.block.BlockBase.SUGAR_CANES;
import static net.minecraft.block.BlockBase.TALLGRASS;
import static net.minecraft.block.BlockBase.TNT;
import static net.minecraft.block.BlockBase.TORCH;
import static net.minecraft.block.BlockBase.TRAPDOOR;
import static net.minecraft.block.BlockBase.WALL_SIGN;
import static net.minecraft.block.BlockBase.WOOD;
import static net.minecraft.block.BlockBase.WOODEN_PRESSURE_PLATE;
import static net.minecraft.block.BlockBase.WOOD_DOOR;
import static net.minecraft.block.BlockBase.WOOD_STAIRS;
import static net.minecraft.block.BlockBase.WOOL;
import static net.minecraft.block.BlockBase.WORKBENCH;
import static net.modificationstation.stationapi.api.StationAPI.LOGGER;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
/**
* @author mine_diver
*/
@Entrypoint(eventBus = @EventBusPolicy(registerInstance = false))
public class BlockRegistryInit {
@EventListener(priority = ListenerPriority.HIGH)
private static void registerBlocks(BlockRegistryEvent event) {
BlockRegistry r = event.registry;
r.register(of("stone"), STONE);
r.register(of("grass_block"), GRASS);
r.register(of("dirt"), DIRT);
r.register(of("cobblestone"), COBBLESTONE);
r.register(of("planks"), WOOD);
r.register(of("sapling"), SAPLING);
r.register(of("bedrock"), BEDROCK);
r.register(of("flowing_water"), FLOWING_WATER);
r.register(of("water"), STILL_WATER);
r.register(of("flowing_lava"), FLOWING_LAVA);
r.register(of("lava"), STILL_LAVA);
r.register(of("sand"), SAND);
r.register(of("gravel"), GRAVEL);
r.register(of("gold_ore"), GOLD_ORE);
r.register(of("iron_ore"), IRON_ORE);
r.register(of("coal_ore"), COAL_ORE);
r.register(of("log"), LOG);
r.register(of("leaves"), LEAVES);
r.register(of("sponge"), SPONGE);
r.register(of("glass"), GLASS);
r.register(of("lapis_ore"), LAPIS_LAZULI_ORE);
r.register(of("lapis_block"), LAPIS_LAZULI_BLOCK);
r.register(of("dispenser"), DISPENSER);
r.register(of("sandstone"), SANDSTONE);
r.register(of("note_block"), NOTEBLOCK);
r.register(of("bed"), BED);
r.register(of("powered_rail"), GOLDEN_RAIL);
r.register(of("detector_rail"), DETECTOR_RAIL);
r.register(of("sticky_piston"), STICKY_PISTON);
r.register(of("cobweb"), COBWEB);
r.register(of("grass"), TALLGRASS);
r.register(of("dead_bush"), DEADBUSH);
r.register(of("piston"), PISTON);
r.register(of("piston_head"), PISTON_HEAD);
r.register(of("wool"), WOOL);
r.register(of("moving_piston"), MOVING_PISTON);
r.register(of("dandelion"), DANDELION);
r.register(of("rose"), ROSE);
r.register(of("brown_mushroom"), BROWN_MUSHROOM);
r.register(of("red_mushroom"), RED_MUSHROOM);
r.register(of("gold_block"), GOLD_BLOCK);
r.register(of("iron_block"), IRON_BLOCK);
r.register(of("double_slab"), DOUBLE_STONE_SLAB);
r.register(of("slab"), STONE_SLAB);
r.register(of("bricks"), BRICKS);
r.register(of("tnt"), TNT);
r.register(of("bookshelf"), BOOKSHELF);
r.register(of("mossy_cobblestone"), MOSSY_COBBLESTONE);
r.register(of("obsidian"), OBSIDIAN);
r.register(of("torch"), TORCH);
r.register(of("fire"), FIRE);
r.register(of("spawner"), MOB_SPAWNER);
r.register(of("oak_stairs"), WOOD_STAIRS);
r.register(of("chest"), CHEST);
r.register(of("redstone_wire"), REDSTONE_DUST);
r.register(of("diamond_ore"), DIAMOND_ORE);
r.register(of("diamond_block"), DIAMOND_BLOCK);
r.register(of("crafting_table"), WORKBENCH);
r.register(of("wheat"), CROPS);
r.register(of("farmland"), FARMLAND);
r.register(of("furnace"), FURNACE);
r.register(of("furnace_lit"), FURNACE_LIT);
r.register(of("sign"), STANDING_SIGN);
r.register(of("oak_door"), WOOD_DOOR);
r.register(of("ladder"), LADDER);
r.register(of("rail"), RAIL);
r.register(of("cobblestone_stairs"), COBBLESTONE_STAIRS);
r.register(of("wall_sign"), WALL_SIGN);
r.register(of("lever"), LEVER);
r.register(of("oak_pressure_plate"), WOODEN_PRESSURE_PLATE);
r.register(of("iron_door"), IRON_DOOR);
r.register(of("stone_pressure_plate"), STONE_PRESSURE_PLATE);
r.register(of("redstone_ore"), REDSTONE_ORE);
r.register(of("redstone_ore_lit"), REDSTONE_ORE_LIT);
r.register(of("redstone_torch"), REDSTONE_TORCH);
r.register(of("redstone_torch_lit"), REDSTONE_TORCH_LIT);
r.register(of("button"), BUTTON);
r.register(of("snow"), SNOW);
r.register(of("ice"), ICE);
r.register(of("snow_block"), SNOW_BLOCK);
r.register(of("cactus"), CACTUS);
r.register(of("clay"), CLAY);
r.register(of("sugar_cane"), SUGAR_CANES);
r.register(of("jukebox"), JUKEBOX);
r.register(of("fence"), FENCE);
r.register(of("pumpkin"), PUMPKIN);
r.register(of("netherrack"), NETHERRACK);
r.register(of("soul_sand"), SOUL_SAND);
r.register(of("glowstone"), GLOWSTONE);
r.register(of("portal"), PORTAL);
r.register(of("jack_o_lantern"), JACK_O_LANTERN);
r.register(of("cake"), CAKE);
r.register(of("repeater"), REDSTONE_REPEATER);
r.register(of("repeater_lit"), REDSTONE_REPEATER_LIT);
r.register(of("locked_chest"), LOCKED_CHEST);
r.register(of("trapdoor"), TRAPDOOR);
LOGGER.info("Added vanilla blocks to the registry.");
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/texture/binder/AnimationTextureBinder.java<|end_filename|>
package net.modificationstation.stationapi.api.client.texture.binder;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.client.Minecraft;
import net.minecraft.client.resource.TexturePack;
import net.modificationstation.stationapi.api.client.texture.TextureAnimationData;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.util.math.MathHelper;
import net.modificationstation.stationapi.mixin.render.client.TextureManagerAccessor;
import java.awt.image.*;
public class AnimationTextureBinder extends StationTextureBinder {
private final TextureAnimationData animationData;
private final byte[][] frames;
private final byte[][][] interpolatedFrames;
@SuppressWarnings("deprecation")
private final TexturePack currentTexturePack = ((Minecraft) FabricLoader.getInstance().getGameInstance()).texturePackManager.texturePack;
private final boolean customFrameData;
private TextureAnimationData.Frame currentFrame;
private int currentFrameIndex;
private int timer;
public AnimationTextureBinder(BufferedImage image, Atlas.Sprite staticReference, TextureAnimationData animationData) {
super(staticReference);
this.animationData = animationData;
int
targetWidth = staticReference.getWidth(),
targetHeight = staticReference.getHeight(),
images = image.getHeight() / targetHeight;
frames = new byte[images][];
for (int i = 0; i < images; i++) {
int[] temp = new int[targetWidth * targetHeight];
image.getRGB(0, targetHeight * i, targetWidth, targetHeight, temp, 0, targetWidth);
frames[i] = new byte[targetWidth * targetHeight * 4];
for (int j = 0; j < temp.length; j++) {
int
a = temp[j] >> 24 & 0xff,
r = temp[j] >> 16 & 0xff,
g = temp[j] >> 8 & 0xff,
b = temp[j] & 0xff;
frames[i][j * 4] = (byte) r;
frames[i][j * 4 + 1] = (byte) g;
frames[i][j * 4 + 2] = (byte) b;
frames[i][j * 4 + 3] = (byte) a;
}
}
customFrameData = animationData.frames.size() > 0;
if (customFrameData) {
currentFrame = animationData.frames.get(currentFrameIndex);
grid = frames[currentFrame.index];
if (animationData.interpolate) {
interpolatedFrames = new byte[animationData.frames.size()][][];
for (int frameIndex = 0, framesSize = animationData.frames.size(); frameIndex < framesSize; frameIndex++) {
TextureAnimationData.Frame
frame = animationData.frames.get(frameIndex),
nextFrame = animationData.frames.get(frameIndex == framesSize - 1 ? 0 : frameIndex + 1);
byte[]
frameGrid = frames[frame.index],
nextFrameGrid = frames[nextFrame.index];
byte[][] interpolations = new byte[frame.time - 1][];
for (int interpolatedFrame = 0; interpolatedFrame < frame.time - 1; interpolatedFrame++) {
byte[] interpolatedFrameGrid = new byte[frameGrid.length];
for (int i = 0; i < interpolatedFrameGrid.length; i++)
interpolatedFrameGrid[i] = (byte) MathHelper.lerp((double) (interpolatedFrame + 1) / frame.time, Byte.toUnsignedInt(frameGrid[i]), Byte.toUnsignedInt(nextFrameGrid[i]));
interpolations[interpolatedFrame] = interpolatedFrameGrid;
}
interpolatedFrames[frameIndex] = interpolations;
}
} else
interpolatedFrames = null;
} else {
grid = frames[currentFrameIndex];
if (animationData.interpolate) {
interpolatedFrames = new byte[frames.length][][];
for (int frameIndex = 0; frameIndex < frames.length; frameIndex++) {
byte[]
frameGrid = frames[frameIndex],
nextFrameGrid = frames[frameIndex == frames.length - 1 ? 0 : frameIndex + 1];
byte[][] interpolations = new byte[animationData.frametime - 1][];
for (int interpolatedFrame = 0; interpolatedFrame < animationData.frametime - 1; interpolatedFrame++) {
byte[] interpolatedFrameGrid = new byte[frameGrid.length];
for (int i = 0; i < interpolatedFrameGrid.length; i++)
interpolatedFrameGrid[i] = (byte) MathHelper.lerp((double) (interpolatedFrame + 1) / animationData.frametime, Byte.toUnsignedInt(frameGrid[i]), Byte.toUnsignedInt(nextFrameGrid[i]));
interpolations[interpolatedFrame] = interpolatedFrameGrid;
}
interpolatedFrames[frameIndex] = interpolations;
}
} else
interpolatedFrames = null;
}
}
@Override
public void reloadFromTexturePack(TexturePack newTexturePack) {
if (!currentTexturePack.equals(newTexturePack))
//noinspection deprecation
((TextureManagerAccessor) ((Minecraft) FabricLoader.getInstance().getGameInstance()).textureManager).getTextureBinders().remove(this);
}
@Override
public void update() {
if (customFrameData) {
if (++timer >= currentFrame.time) {
timer = 0;
if (++currentFrameIndex >= animationData.frames.size())
currentFrameIndex = 0;
currentFrame = animationData.frames.get(currentFrameIndex);
grid = frames[currentFrame.index];
} else if (animationData.interpolate)
grid = interpolatedFrames[currentFrameIndex][timer - 1];
} else {
if (++timer >= animationData.frametime) {
timer = 0;
if (++currentFrameIndex >= frames.length)
currentFrameIndex = 0;
grid = frames[currentFrameIndex];
} else if (animationData.interpolate)
grid = interpolatedFrames[currentFrameIndex][timer - 1];
}
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/mixin/render/client/MixinDigging.java<|end_filename|>
package net.modificationstation.stationapi.mixin.render.client;
import lombok.Getter;
import net.minecraft.block.BlockBase;
import net.minecraft.client.render.Tessellator;
import net.minecraft.client.render.particle.Digging;
import net.minecraft.level.Level;
import net.modificationstation.stationapi.impl.client.texture.StationDiggingParticle;
import net.modificationstation.stationapi.impl.client.texture.StationDiggingParticleProvider;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
@Mixin(Digging.class)
public class MixinDigging implements StationDiggingParticleProvider {
@Getter
private StationDiggingParticle stationDiggingParticle;
@Inject(
method = "<init>(Lnet/minecraft/level/Level;DDDDDDLnet/minecraft/block/BlockBase;II)V",
at = @At("RETURN")
)
private void onCor(Level arg, double d, double d1, double d2, double d3, double d4, double d5, BlockBase arg1, int i, int j, CallbackInfo ci) {
stationDiggingParticle = new StationDiggingParticle((Digging) (Object) this);
}
@Inject(
method = "method_1856(III)Lnet/minecraft/client/render/particle/Digging;",
at = @At("HEAD")
)
private void checkBlockCoords(int i, int j, int k, CallbackInfoReturnable<Digging> cir) {
stationDiggingParticle.checkBlockCoords(i, j, k);
}
@Inject(
method = "method_2002(Lnet/minecraft/client/render/Tessellator;FFFFFF)V",
at = @At("HEAD"),
cancellable = true
)
private void redirectRender(Tessellator arg, float f, float f1, float f2, float f3, float f4, float f5, CallbackInfo ci) {
stationDiggingParticle.render(f, f1, f2, f3, f4, f5);
ci.cancel();
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/registry/ModelRegistry.java<|end_filename|>
package net.modificationstation.stationapi.api.client.registry;
import net.modificationstation.stationapi.api.client.model.Model;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.api.registry.Registry;
import org.jetbrains.annotations.NotNull;
import static net.modificationstation.stationapi.api.StationAPI.MODID;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
public final class ModelRegistry extends Registry<Model> {
public static final ModelRegistry INSTANCE = new ModelRegistry(of(MODID, "models"));
/**
* Default registry constructor.
*
* @param identifier registry's identifier.
*/
private ModelRegistry(@NotNull Identifier identifier) {
super(identifier);
}
}
<|start_filename|>station-items-v0/src/main/java/net/modificationstation/stationapi/mixin/item/client/MixinContainerBase.java<|end_filename|>
package net.modificationstation.stationapi.mixin.item.client;
import net.minecraft.client.gui.screen.ScreenBase;
import net.minecraft.client.gui.screen.container.ContainerBase;
import net.minecraft.container.slot.Slot;
import net.minecraft.entity.player.PlayerInventory;
import net.modificationstation.stationapi.api.StationAPI;
import net.modificationstation.stationapi.api.client.event.gui.TooltipRenderEvent;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Unique;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
// TODO: make this use translation keys instead and automatically add lines depending on the translated text's width.
@Mixin(ContainerBase.class)
public class MixinContainerBase extends ScreenBase {
@Unique
private boolean cancelTooltipRender;
@Inject(
method = "render(IIF)V",
at = @At(
value = "INVOKE",
target = "Ljava/lang/String;length()I"
),
locals = LocalCapture.CAPTURE_FAILHARD
)
private void renderTooltip(int mouseX, int mouseY, float delta, CallbackInfo ci, int containerX, int containerY, Slot slot, PlayerInventory inventory, String originalTooltip) {
cancelTooltipRender = StationAPI.EVENT_BUS.post(new TooltipRenderEvent(slot.getItem(), (ContainerBase) (Object) this, this.textManager, inventory, containerX, containerY, mouseX, mouseY, delta, originalTooltip)).isCancelled();
}
@Redirect(
method = "render(IIF)V",
at = @At(
value = "INVOKE",
target = "Ljava/lang/String;length()I"
)
)
private int cancelTooltipRender(String s) {
if (cancelTooltipRender) {
cancelTooltipRender = false;
return 0;
} else
return s.length();
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/mixin/render/MixinItemBase.java<|end_filename|>
package net.modificationstation.stationapi.mixin.render;
import net.minecraft.item.ItemBase;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlases;
import net.modificationstation.stationapi.api.client.texture.atlas.CustomAtlasProvider;
import org.spongepowered.asm.mixin.Mixin;
@Mixin(ItemBase.class)
public class MixinItemBase implements CustomAtlasProvider {
@Override
public Atlas getAtlas() {
return Atlases.getGuiItems();
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/texture/StationRenderAPI.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.texture;
import net.fabricmc.loader.api.FabricLoader;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.mine_diver.unsafeevents.listener.ListenerPriority;
import net.minecraft.block.BlockBase;
import net.minecraft.client.Minecraft;
import net.minecraft.client.render.Tessellator;
import net.minecraft.client.render.TextureBinder;
import net.minecraft.client.texture.TextureManager;
import net.modificationstation.stationapi.api.StationAPI;
import net.modificationstation.stationapi.api.client.event.resource.TexturePackLoadedEvent;
import net.modificationstation.stationapi.api.client.event.texture.TextureRegisterEvent;
import net.modificationstation.stationapi.api.client.registry.ModelRegistry;
import net.modificationstation.stationapi.api.client.texture.TexturePackDependent;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.client.texture.atlas.ExpandableAtlas;
import net.modificationstation.stationapi.api.client.texture.atlas.JsonModelAtlas;
import net.modificationstation.stationapi.api.client.texture.atlas.SquareAtlas;
import net.modificationstation.stationapi.api.mod.entrypoint.Entrypoint;
import net.modificationstation.stationapi.api.mod.entrypoint.EventBusPolicy;
import net.modificationstation.stationapi.api.registry.ModID;
import net.modificationstation.stationapi.api.util.Null;
import net.modificationstation.stationapi.mixin.render.client.TessellatorAccessor;
import net.modificationstation.stationapi.mixin.render.client.TextureManagerAccessor;
import org.lwjgl.opengl.GL11;
import java.util.*;
import java.util.stream.*;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
@Entrypoint(eventBus = @EventBusPolicy(registerInstance = false))
public class StationRenderAPI {
@Entrypoint.ModID
public static final ModID MODID = Null.get();
public static SquareAtlas
TERRAIN,
GUI_ITEMS;
public static ExpandableAtlas
STATION_TERRAIN,
STATION_GUI_ITEMS;
public static JsonModelAtlas STATION_JSON_MODELS;
@EventListener(priority = ListenerPriority.HIGH)
private static void init(TextureRegisterEvent event) {
TERRAIN = new SquareAtlas("/terrain.png", 16).setTessellator(Tessellator.INSTANCE);
GUI_ITEMS = new SquareAtlas("/gui/items.png", 16);
STATION_TERRAIN = new ExpandableAtlas(of(StationAPI.MODID, "terrain"), TERRAIN).initTessellator();
STATION_GUI_ITEMS = new ExpandableAtlas(of(StationAPI.MODID, "gui_items"), GUI_ITEMS);
STATION_JSON_MODELS = new JsonModelAtlas(of(StationAPI.MODID, "json_textures")).setTessellator(TessellatorAccessor.newInst(8388608));
//noinspection deprecation
TextureManager textureManager = ((Minecraft) FabricLoader.getInstance().getGameInstance()).textureManager;
TextureBinder textureBinder;
textureManager.addTextureBinder(new StationVanillaTextureBinder(TERRAIN.getTexture(BlockBase.FLOWING_WATER.texture), new StationStillWaterTextureBinder(), "/custom_water_still.png"));
textureBinder = new StationVanillaTextureBinder(TERRAIN.getTexture(BlockBase.FLOWING_WATER.texture + 1), new StationFlowingWaterTextureBinder(), "/custom_water_flowing.png");
textureBinder.textureSize = 2;
textureManager.addTextureBinder(textureBinder);
textureManager.addTextureBinder(new StationVanillaTextureBinder(TERRAIN.getTexture(BlockBase.FLOWING_LAVA.texture), new StationStillLavaTextureBinder(), "/custom_lava_still.png"));
textureBinder = new StationVanillaTextureBinder(TERRAIN.getTexture(BlockBase.FLOWING_LAVA.texture + 1), new StationFlowingLavaTextureBinder(), "/custom_lava_flowing.png");
textureBinder.textureSize = 2;
textureManager.addTextureBinder(textureBinder);
textureManager.addTextureBinder(new StationVanillaTextureBinder(TERRAIN.getTexture(BlockBase.FIRE.texture), new StationFireTextureBinder(0), "/custom_fire_e_w.png"));
textureManager.addTextureBinder(new StationVanillaTextureBinder(TERRAIN.getTexture(BlockBase.FIRE.texture + 16), new StationFireTextureBinder(1), "/custom_fire_n_s.png"));
textureManager.addTextureBinder(new StationVanillaTextureBinder(TERRAIN.getTexture(BlockBase.PORTAL.texture), new StationPortalTextureBinder(), "/custom_portal.png"));
textureManager.addTextureBinder(new StationCompassTextureBinder());
textureManager.addTextureBinder(new StationClockTextureBinder());
}
@EventListener(priority = ListenerPriority.HIGH)
private static void beforeTexturePackApplied(TexturePackLoadedEvent.Before event) {
Map<String, Integer> textureMap = ((TextureManagerAccessor) event.textureManager).getTextures();
textureMap.keySet().stream().filter(s -> event.newTexturePack.getResourceAsStream(s) == null).collect(Collectors.toList()).forEach(s -> GL11.glDeleteTextures(textureMap.remove(s)));
}
@EventListener(priority = ListenerPriority.HIGH)
private static void texturePackApplied(TexturePackLoadedEvent.After event) {
Atlas.getAtlases().forEach(atlas -> atlas.reloadFromTexturePack(event.newTexturePack));
ModelRegistry.INSTANCE.forEach((identifier, model) -> model.reloadFromTexturePack(event.newTexturePack));
new ArrayList<>(((TextureManagerAccessor) event.textureManager).getTextureBinders()).stream().filter(textureBinder -> textureBinder instanceof TexturePackDependent).forEach(textureBinder -> ((TexturePackDependent) textureBinder).reloadFromTexturePack(event.newTexturePack));
}
}
<|start_filename|>src/testmod/resources/fabric.mod.json<|end_filename|>
{
"schemaVersion": 1,
"id": "sltest",
"version": "1.0",
"name": "Test Mod",
"description": "stationapi test mod",
"authors": [
"mine_diver"
],
"contact": {
"discord": "mine_diver#0618"
},
"license": "CC0-1.0",
"icon": "assets/sltest/icon.png",
"environment": "*",
"entrypoints": {
"stationapi:event_bus": [
"net.modificationstation.sltest.block.Blocks$Init",
"net.modificationstation.sltest.item.ItemListener",
"net.modificationstation.sltest.packet.MessageListenerListener",
"net.modificationstation.sltest.SLTest",
"net.modificationstation.sltest.recipe.RecipeListener",
"net.modificationstation.sltest.achievement.AchievementListener",
"net.modificationstation.sltest.entity.player.PlayerHandlerListener",
"net.modificationstation.sltest.level.gen.ChunkListener",
"net.modificationstation.sltest.entity.EntityListener",
"net.modificationstation.sltest.tileentity.TileEntityListener",
"net.modificationstation.sltest.item.tool.ToolListener"
],
"stationapi:event_bus_client": [
"net.modificationstation.sltest.gui.GuiListener",
"net.modificationstation.sltest.option.OptionListener",
"net.modificationstation.sltest.keyboard.KeyboardListener",
"net.modificationstation.sltest.texture.TextureListener",
"net.modificationstation.sltest.render.entity.EntityRendererListener"
],
"main": [
"net.modificationstation.sltest.MainTest"
]
},
"mixins": [
"sltest.mixins.json"
],
"depends": {
"minecraft": "1.0.0-beta.7.3",
"stationapi": "*"
},
"custom": {
"stationapi:verify_client": true
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/texture/TextureAnimationData.java<|end_filename|>
package net.modificationstation.stationapi.api.client.texture;
import com.google.common.collect.ImmutableList;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import lombok.RequiredArgsConstructor;
import net.modificationstation.stationapi.api.client.resource.Resource;
import java.io.*;
import java.util.*;
@RequiredArgsConstructor
public class TextureAnimationData {
public final int frametime;
public final ImmutableList<Frame> frames;
public final boolean interpolate;
public static Optional<TextureAnimationData> parse(Resource resource) {
if (resource.getMeta().isPresent()) {
InputStream inputStream = resource.getMeta().get();
JsonElement tmp = JsonParser.parseReader(new InputStreamReader(inputStream));
if (tmp.isJsonObject()) {
JsonObject meta = tmp.getAsJsonObject();
if (meta.has("animation")) {
JsonObject animation = meta.getAsJsonObject("animation");
int frametime = animation.has("frametime") ? animation.getAsJsonPrimitive("frametime").getAsInt() : 1;
ImmutableList.Builder<Frame> frames = ImmutableList.builder();
if (animation.has("frames")) {
for (JsonElement element : animation.getAsJsonArray("frames")) {
if (element.isJsonPrimitive())
frames.add(new Frame(element.getAsInt(), frametime));
else if (element.isJsonObject()) {
JsonObject frame = element.getAsJsonObject();
frames.add(new Frame(frame.getAsJsonPrimitive("index").getAsInt(), frame.has("time") ? frame.getAsJsonPrimitive("time").getAsInt() : frametime));
} else
throw new RuntimeException("Unknown frame entry: " + element);
}
}
boolean interpolate = animation.has("interpolate") && animation.getAsJsonPrimitive("interpolate").getAsBoolean();
return Optional.of(new TextureAnimationData(frametime, frames.build(), interpolate));
}
}
}
return Optional.empty();
}
@RequiredArgsConstructor
public static class Frame {
public final int index;
public final int time;
}
}
<|start_filename|>station-dimensions-v0/src/main/java/net/modificationstation/stationapi/api/registry/DimensionRegistry.java<|end_filename|>
package net.modificationstation.stationapi.api.registry;
import net.modificationstation.stationapi.api.level.dimension.DimensionContainer;
import org.jetbrains.annotations.NotNull;
import java.util.*;
import static net.modificationstation.stationapi.api.StationAPI.MODID;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
public final class DimensionRegistry extends LevelSerialRegistry<DimensionContainer<?>> {
public static final DimensionRegistry INSTANCE = new DimensionRegistry();
private final TreeMap<Integer, DimensionContainer<?>> values = new TreeMap<>();
public final NavigableMap<Integer, DimensionContainer<?>> serialView = Collections.unmodifiableNavigableMap(values);
private boolean badcode;
private DimensionRegistry() {
super(of(MODID, "dimensions"));
}
@Override
public int getSize() {
return Integer.MAX_VALUE;
}
@Override
public int getSerialID(@NotNull DimensionContainer<?> value) {
return value.serialID;
}
@Override
public @NotNull Optional<DimensionContainer<?>> get(int serialID) {
return Optional.ofNullable(serialView.get(serialID));
}
@Override
public int getSerialIDShift() {
return 0;
}
@Override
protected void remap(int newSerialID, @NotNull DimensionContainer<?> value) {
Identifier id = getIdentifier(value);
unregister(id);
values.remove(getSerialID(value));
if (serialView.containsKey(newSerialID))
remap(getNextSerialID(), serialView.get(newSerialID));
value.serialID = newSerialID;
super.register(id, value);
values.put(newSerialID, value);
}
@Override
public void register(@NotNull Identifier identifier, @NotNull DimensionContainer<?> value) {
if (badcode)
super.register(identifier, value);
else {
badcode = true;
register(identifier, id -> {
value.serialID = id;
values.put(id, value);
return value;
});
badcode = false;
}
}
public void register(@NotNull Identifier identifier, int serialID, @NotNull DimensionContainer<?> value) {
value.serialID = serialID;
values.put(serialID, value);
super.register(identifier, value);
}
}
<|start_filename|>station-lifecycle-events-v0/src/main/resources/station-lifecycle-events-v0.mixins.json<|end_filename|>
{
"required": true,
"minVersion": "0.8",
"package": "net.modificationstation.stationapi.mixin.lifecycle",
"compatibilityLevel": "JAVA_8",
"mixins": [
],
"server": [
"server.MixinPendingConnection"
],
"client": [
"client.MixinClientPlayNetworkHandler",
"client.MixinTextureManager"
],
"injectors": {
"defaultRequire": 1
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/model/ModelOverrideList.java<|end_filename|>
package net.modificationstation.stationapi.api.client.model;
public class ModelOverrideList {
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/texture/atlas/Atlas.java<|end_filename|>
package net.modificationstation.stationapi.api.client.texture.atlas;
import lombok.Getter;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.client.Minecraft;
import net.minecraft.client.render.Tessellator;
import net.minecraft.client.resource.TexturePack;
import net.modificationstation.stationapi.api.client.texture.TextureHelper;
import net.modificationstation.stationapi.api.client.texture.TexturePackDependent;
import net.modificationstation.stationapi.api.client.texture.binder.StationTextureBinder;
import net.modificationstation.stationapi.mixin.render.client.TessellatorAccessor;
import uk.co.benjiweber.expressions.function.ObjIntFunction;
import java.awt.image.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.*;
public abstract class Atlas implements TexturePackDependent {
private static final Set<Atlas> atlases = new HashSet<>();
public static Collection<Atlas> getAtlases() {
return Collections.unmodifiableSet(atlases);
}
public final String spritesheet;
protected final Atlas parent;
protected int size;
public final boolean fixedSize;
protected final List<Sprite> textures = new CopyOnWriteArrayList<>();
private Tessellator tessellator;
protected BufferedImage imageCache;
public Atlas(final String spritesheet, final int size, final boolean fixedSize) {
this(spritesheet, size, fixedSize, null);
}
public Atlas(final String spritesheet, final int size, final boolean fixedSize, final Atlas parent) {
this.spritesheet = spritesheet;
if (parent == null)
this.size = size;
else {
if (parent.fixedSize)
this.size = parent.size + size;
else
throw new UnsupportedOperationException("Parent atlas can't have dynamic size!");
}
this.fixedSize = fixedSize;
this.parent = parent;
atlases.add(this);
init();
}
protected abstract void init();
public InputStream getStream() {
return TextureHelper.getTextureStream(spritesheet);
}
public BufferedImage getImage() {
return imageCache == null ? imageCache = TextureHelper.getTexture(spritesheet) : imageCache;
}
@Override
public void reloadFromTexturePack(TexturePack newTexturePack) {
imageCache = null;
}
public final <E extends Atlas> E setTessellator(Tessellator tessellator) {
if (this.tessellator == null) {
this.tessellator = tessellator;
//noinspection unchecked
return (E) this;
} else
throw new UnsupportedOperationException("Tried setting a new tessellator for " + spritesheet + " texture atlas, but there's already a tessellator set up.");
}
public final <E extends Atlas> E initTessellator() {
return setTessellator(TessellatorAccessor.newInst(2097152));
}
protected final <T> T applyInherited(int textureIndex, IntFunction<T> atlasBounds, ObjIntFunction<Atlas, T> parentBounds) {
if (parent == null) {
if (0 <= textureIndex && textureIndex < size)
return atlasBounds.apply(textureIndex);
} else {
if (textureIndex < parent.size)
return parentBounds.apply(parent, textureIndex);
else if (textureIndex < size)
return atlasBounds.apply(textureIndex - parent.size);
}
throw new IllegalArgumentException("Texture index " + textureIndex + " out of bounds of " + spritesheet + " atlas!");
}
public final <T extends Atlas> T of(int textureIndex) {
//noinspection unchecked
return (T) applyInherited(textureIndex, value -> this, Atlas::of);
}
public final Tessellator getTessellator() {
return tessellator;
}
public final int getAtlasTextureID() {
//noinspection deprecation
return ((Minecraft) FabricLoader.getInstance().getGameInstance()).textureManager.getTextureId(spritesheet);
}
public final void bindAtlas() {
//noinspection deprecation
((Minecraft) FabricLoader.getInstance().getGameInstance()).textureManager.bindTexture(getAtlasTextureID());
}
public final Sprite getTexture(int textureIndex) {
return applyInherited(textureIndex, textures::get, Atlas::getTexture);
}
public final int getUnitSize() {
return parent == null ? size : size - parent.size;
}
public <T extends StationTextureBinder> T addTextureBinder(int staticReferenceTextureIndex, Function<Sprite, T> initializer) {
return addTextureBinder(getTexture(staticReferenceTextureIndex), initializer);
}
public <T extends StationTextureBinder> T addTextureBinder(Sprite staticReference, Function<Sprite, T> initializer) {
T textureBinder = initializer.apply(staticReference);
//noinspection deprecation
((Minecraft) FabricLoader.getInstance().getGameInstance()).textureManager.addTextureBinder(textureBinder);
return textureBinder;
}
public class Sprite {
public final int index;
@Getter
protected int
x, y,
width, height;
@Getter
protected double
startU, endU,
startV, endV;
protected Sprite(int index, int x, int y, int width, int height) {
this.index = index;
this.x = x;
this.y = y;
this.width = width;
this.height = height;
updateUVs();
}
public final Atlas getAtlas() {
return Atlas.this;
}
protected final void updateUVs() {
BufferedImage image = getAtlas().getImage();
int
atlasWidth = image.getWidth(),
atlasHeight = image.getHeight();
this.startU = (double) x / atlasWidth;
this.endU = (double) (x + width) / atlasWidth;
this.startV = (double) y / atlasHeight;
this.endV = (double) (y + height) / atlasHeight;
}
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/model/JsonFaceData.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.model;
import com.google.gson.annotations.SerializedName;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.util.Null;
import net.modificationstation.stationapi.api.util.math.Direction;
import java.awt.image.*;
import java.util.stream.*;
@RequiredArgsConstructor
public class JsonFaceData {
@SerializedName("uv")
double[] localUVs;
@Getter
private transient double[] uv;
@Getter
int rotation = 0;
@SerializedName("texture")
public final String textureId;
@Getter
private transient Atlas.Sprite texture;
public final Direction cullface = Null.get();
public void postprocess(Atlas.Sprite texture) {
this.texture = texture;
IntStream.range(0, localUVs.length).forEach(i -> localUVs[i] /= 16);
}
public void updateUVs() {
if (texture != null) {
BufferedImage atlasImage = texture.getAtlas().getImage();
int
textureX = texture.getX(),
textureY = texture.getY(),
textureWidth = texture.getWidth(),
textureHeight = texture.getHeight(),
atlasWidth = atlasImage.getWidth(),
atlasHeight = atlasImage.getHeight();
double[] uv = new double[localUVs.length];
for (int i = 0; i < localUVs.length; i += 2) {
uv[i] = (textureX + localUVs[i] * textureWidth) / atlasWidth;
uv[i + 1] = (textureY + localUVs[i + 1] * textureHeight) / atlasHeight;
}
this.uv = uv;
}
}
}
<|start_filename|>station-dimensions-v0/src/main/java/net/modificationstation/stationapi/mixin/dimension/MixinMcRegionDimensionFile.java<|end_filename|>
package net.modificationstation.stationapi.mixin.dimension;
import net.minecraft.level.dimension.Dimension;
import net.minecraft.level.dimension.McRegionDimensionFile;
import net.modificationstation.stationapi.api.level.dimension.VanillaDimensions;
import net.modificationstation.stationapi.api.registry.DimensionRegistry;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Constant;
import org.spongepowered.asm.mixin.injection.ModifyConstant;
import org.spongepowered.asm.mixin.injection.Redirect;
@Mixin(McRegionDimensionFile.class)
public class MixinMcRegionDimensionFile {
@Redirect(
method = "getChunkIO(Lnet/minecraft/level/dimension/Dimension;)Lnet/minecraft/level/chunk/ChunkIO;",
at = @At(
value = "CONSTANT",
args = "classValue=net/minecraft/level/dimension/Nether"
)
)
private boolean isNotOverworld(Object dimension, Class<?> netherClass) {
return !DimensionRegistry.INSTANCE.getIdentifier(((Dimension) dimension).id).map(VanillaDimensions.OVERWORLD::equals).orElse(true);
}
@ModifyConstant(
method = "getChunkIO(Lnet/minecraft/level/dimension/Dimension;)Lnet/minecraft/level/chunk/ChunkIO;",
constant = @Constant(stringValue = "DIM-1")
)
private String modifyDimensionPath(String constant, Dimension dimension) {
return "DIM" + dimension.id;
}
}
<|start_filename|>station-vanilla-checker-v0/src/main/resources/station-vanilla-checker-v0.mixins.json<|end_filename|>
{
"required": true,
"minVersion": "0.8",
"package": "net.modificationstation.stationapi.mixin.network",
"compatibilityLevel": "JAVA_8",
"mixins": [
"MixinLoginRequest0x1Packet",
"MixinPacketHandler"
],
"server": [
"server.MixinServerPacketHandler"
],
"client": [
],
"injectors": {
"defaultRequire": 1
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/model/JsonCuboidData.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.model;
import com.google.common.primitives.Doubles;
import lombok.Getter;
import net.modificationstation.stationapi.api.util.Null;
import net.modificationstation.stationapi.api.util.math.Direction;
import java.util.*;
import static net.modificationstation.stationapi.api.util.math.Direction.DOWN;
import static net.modificationstation.stationapi.api.util.math.Direction.UP;
public class JsonCuboidData {
public final double[] from = Null.get();
public final double[] to = Null.get();
public final EnumMap<Direction, JsonFaceData> faces = Null.get();
@SuppressWarnings("FieldMayBeFinal")
@Getter
private boolean shade = true;
public void postprocess() {
Collections.reverse(Doubles.asList(from));
Collections.reverse(Doubles.asList(to));
double fromTmp = from[2];
from[2] = 16 - to[2];
to[2] = 16 - fromTmp;
faces.forEach((direction, face) -> {
if (face.localUVs == null)
face.localUVs = getRotatedMatrix(direction);
});
from[0] /= 16;
from[1] /= 16;
from[2] /= 16;
to[0] /= 16;
to[1] /= 16;
to[2] /= 16;
faces.forEach((direction, face) -> {
if (direction == DOWN || direction == UP)
face.rotation += 90;
face.rotation = (face.rotation / 90) % 4;
double
tmp,
startU1 = face.localUVs[0],
startV1 = face.localUVs[1],
endU1 = face.localUVs[2],
endV1 = face.localUVs[3];
switch (direction) {
case DOWN:
if (face.rotation % 2 == 0) {
tmp = startV1;
startV1 = endV1;
endV1 = tmp;
} else {
tmp = startU1;
startU1 = endU1;
endU1 = tmp;
}
double
endU2 = endU1,
startU2 = startU1,
startV2 = startV1,
endV2 = endV1;
switch (face.rotation) {
case 1:
startV2 = startV1;
endV2 = endV1;
endU2 = startU1;
startU2 = endU1;
startV1 = endV1;
endV1 = startV2;
break;
case 2:
endU2 = endU1;
startU2 = startU1;
startV2 = startV1;
endV2 = endV1;
break;
case 3:
endU2 = endU1;
startU2 = startU1;
startU1 = endU1;
endU1 = startU2;
startV2 = endV1;
endV2 = startV1;
break;
}
face.localUVs = new double[] {startU1, startV1, endU1, endV1, startU2, startV2, endU2, endV2};
break;
case UP:
startU1 = faces.get(UP).localUVs[0];
startV1 = faces.get(UP).localUVs[1];
endU1 = faces.get(UP).localUVs[2];
endV1 = faces.get(UP).localUVs[3];
endU2 = endU1;
startU2 = startU1;
startV2 = startV1;
endV2 = endV1;
switch (faces.get(UP).rotation) {
case 1:
endU2 = endU1;
startU2 = startU1;
startU1 = endU1;
endU1 = startU2;
startV2 = endV1;
endV2 = startV1;
break;
case 2:
endU2 = endU1;
startU2 = startU1;
startV2 = startV1;
endV2 = endV1;
break;
case 3:
startV2 = startV1;
endV2 = endV1;
endU2 = startU1;
startU2 = endU1;
startV1 = endV1;
endV1 = startV2;
break;
}
face.localUVs = new double[] {startU1, startV1, endU1, endV1, startU2, startV2, endU2, endV2};
break;
}
});
}
private double[] getRotatedMatrix(Direction direction) {
switch(direction) {
case DOWN:
return new double[]{this.from[0], 16 - this.to[2], this.to[0], 16 - this.from[2]};
case UP:
return new double[]{this.from[0], this.from[2], this.to[0], this.to[2]};
case NORTH:
default:
return new double[]{16 - this.to[0], 16 - this.to[1], 16 - this.from[0], 16 - this.from[1]};
case SOUTH:
return new double[]{this.from[0], 16 - this.to[1], this.to[0], 16 - this.from[1]};
case WEST:
return new double[]{this.from[2], 16 - this.to[1], this.to[2], 16 - this.from[1]};
case EAST:
return new double[]{16 - this.to[2], 16 - this.to[1], 16 - this.from[2], 16 - this.from[1]};
}
}
}
<|start_filename|>station-api-base/src/main/java/net/modificationstation/stationapi/api/StationAPI.java<|end_filename|>
package net.modificationstation.stationapi.api;
import net.fabricmc.loader.api.FabricLoader;
import net.fabricmc.loader.api.entrypoint.PreLaunchEntrypoint;
import net.mine_diver.unsafeevents.EventBus;
import net.modificationstation.stationapi.api.event.mod.InitEvent;
import net.modificationstation.stationapi.api.event.mod.PostInitEvent;
import net.modificationstation.stationapi.api.event.mod.PreInitEvent;
import net.modificationstation.stationapi.api.mod.entrypoint.Entrypoint;
import net.modificationstation.stationapi.api.mod.entrypoint.EntrypointManager;
import net.modificationstation.stationapi.api.mod.entrypoint.EventBusPolicy;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.api.registry.ModID;
import net.modificationstation.stationapi.api.util.Null;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.config.Configurator;
/**
* StationAPI main class. Used for some initialization.
* @author mine_diver
*/
@Entrypoint(eventBus = @EventBusPolicy(registerInstance = false, registerStatic = false))
public class StationAPI implements PreLaunchEntrypoint {
/**
* StationAPI's instance.
*/
@Entrypoint.Instance
public static final StationAPI INSTANCE = Null.get();
/**
* StationAPI's ModID.
*/
@Entrypoint.ModID
public static final ModID MODID = Null.get();
@Entrypoint.Logger("Station|API")
public static final Logger LOGGER = Null.get();
public static final EventBus EVENT_BUS = new EventBus();
/**
* Initial setup. Configures logger, entrypoints, and calls the rest of initialization sequence. No Minecraft classes must be referenced here.
*/
@Override
public void onPreLaunch() {
FabricLoader.getInstance().getModContainer("stationapi").ifPresent(modContainer -> EntrypointManager.setup(this, modContainer));
String name = MODID.getName();
LOGGER.info("Initializing " + name + "...");
Configurator.setLevel("mixin", Level.TRACE);
Configurator.setLevel("Fabric|Loader", Level.INFO);
LOGGER.info("Loading entrypoints...");
setupMods();
LOGGER.info("Finished " + name + " setup.");
}
/**
* Loads main entrypoints, also invokes preInit, init and postInit events. No Minecraft classes must be referenced here.
*/
private void setupMods() {
FabricLoader fabricLoader = FabricLoader.getInstance();
fabricLoader.getEntrypointContainers(Identifier.of(MODID, "event_bus").toString(), Object.class).forEach(EntrypointManager::setup);
fabricLoader.getEntrypointContainers(Identifier.of(MODID, "event_bus_" + fabricLoader.getEnvironmentType().name().toLowerCase()).toString(), Object.class).forEach(EntrypointManager::setup);
LOGGER.info("Invoking PreInit event...");
EVENT_BUS.post(new PreInitEvent());
LOGGER.info("Invoking Init event...");
EVENT_BUS.post(new InitEvent());
LOGGER.info("Invoking PostInit event...");
EVENT_BUS.post(new PostInitEvent());
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/texture/StationBlockRenderer.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.texture;
import net.minecraft.block.Bed;
import net.minecraft.block.BlockBase;
import net.minecraft.client.render.Tessellator;
import net.minecraft.client.render.block.BlockRenderer;
import net.minecraft.sortme.GameRenderer;
import net.minecraft.sortme.MagicBedNumbers;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlases;
import net.modificationstation.stationapi.api.client.texture.atlas.CustomAtlasProvider;
import net.modificationstation.stationapi.mixin.render.client.BlockRendererAccessor;
import net.modificationstation.stationapi.mixin.render.client.TessellatorAccessor;
import java.awt.image.*;
import java.util.*;
public class StationBlockRenderer {
public final Set<Atlas> activeAtlases = new HashSet<>();
public final BlockRendererAccessor blockRendererAccessor;
public StationBlockRenderer(BlockRenderer tileRenderer) {
blockRendererAccessor = (BlockRendererAccessor) tileRenderer;
}
public void renderActiveAtlases() {
if (!activeAtlases.isEmpty()) {
activeAtlases.forEach(atlas -> {
atlas.bindAtlas();
Tessellator tessellator = atlas.getTessellator();
tessellator.draw();
tessellator.setOffset(0, 0, 0);
});
activeAtlases.clear();
Atlases.getTerrain().bindAtlas();
}
}
public Tessellator prepareTessellator(Atlas atlas, boolean renderingInInventory) {
Tessellator tessellator;
if (renderingInInventory) {
tessellator = Tessellator.INSTANCE;
atlas.bindAtlas();
} else {
tessellator = atlas.getTessellator();
TessellatorAccessor originalAccessor = (TessellatorAccessor) Tessellator.INSTANCE;
if (!((TessellatorAccessor) tessellator).getDrawing()) {
activeAtlases.add(atlas);
tessellator.start();
tessellator.setOffset(originalAccessor.getXOffset(), originalAccessor.getYOffset(), originalAccessor.getZOffset());
}
if (originalAccessor.getHasColour())
tessellator.colour(originalAccessor.getColour());
}
return tessellator;
}
public boolean renderBed(BlockBase block, int blockX, int blockY, int blockZ, boolean renderingInInventory) {
Atlas atlas = ((CustomAtlasProvider) block).getAtlas();
Tessellator var5 = prepareTessellator(atlas, renderingInInventory);
Atlas.Sprite texture;
int
var6 = blockRendererAccessor.getBlockView().getTileMeta(blockX, blockY, blockZ),
var7 = Bed.orientationOnly(var6);
boolean var8 = Bed.isFoot(var6);
float
var9 = 0.5F,
var10 = 1.0F,
var11 = 0.8F,
var12 = 0.6F,
var25 = block.getBrightness(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ);
var5.colour(var9 * var25, var9 * var25, var9 * var25);
int var26 = block.getTextureForSide(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ, 0);
texture = atlas.getTexture(var26);
double
var37 = (double)blockX + block.minX,
var39 = (double)blockX + block.maxX,
var41 = (double)blockY + block.minY + 0.1875D,
var43 = (double)blockZ + block.minZ,
var45 = (double)blockZ + block.maxZ;
var5.vertex(var37, var41, var45, texture.getStartU(), texture.getEndV());
var5.vertex(var37, var41, var43, texture.getStartU(), texture.getStartV());
var5.vertex(var39, var41, var43, texture.getEndU(), texture.getStartV());
var5.vertex(var39, var41, var45, texture.getEndU(), texture.getEndV());
float var64 = block.getBrightness(blockRendererAccessor.getBlockView(), blockX, blockY + 1, blockZ);
var5.colour(var10 * var64, var10 * var64, var10 * var64);
var26 = block.getTextureForSide(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ, 1);
texture = atlas.getTexture(var26);
double
var30 = texture.getStartU(),
var32 = texture.getEndU(),
var34 = texture.getStartV(),
var36 = texture.getEndV(),
var38 = var30,
var40 = var32,
var42 = var34,
var44 = var34,
var46 = var30,
var48 = var32,
var50 = var36,
var52 = var36;
if (var7 == 0) {
var40 = var30;
var42 = var36;
var46 = var32;
var52 = var34;
} else if (var7 == 2) {
var38 = var32;
var44 = var36;
var48 = var30;
var50 = var34;
} else if (var7 == 3) {
var38 = var32;
var44 = var36;
var48 = var30;
var50 = var34;
var40 = var30;
var42 = var36;
var46 = var32;
var52 = var34;
}
double
var54 = (double)blockX + block.minX,
var56 = (double)blockX + block.maxX,
var58 = (double)blockY + block.maxY,
var60 = (double)blockZ + block.minZ,
var62 = (double)blockZ + block.maxZ;
var5.vertex(var56, var58, var62, var46, var50);
var5.vertex(var56, var58, var60, var38, var42);
var5.vertex(var54, var58, var60, var40, var44);
var5.vertex(var54, var58, var62, var48, var52);
int var65 = MagicBedNumbers.field_792[var7];
if (var8)
var65 = MagicBedNumbers.field_792[MagicBedNumbers.field_793[var7]];
int rotation;
switch(var7) {
case 0:
rotation = 5;
break;
case 1:
rotation = 3;
break;
case 2:
default:
rotation = 4;
break;
case 3:
rotation = 2;
break;
}
if (var65 != 2 && (blockRendererAccessor.getRenderAllSides() || block.isSideRendered(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ - 1, 2))) {
float var69 = block.getBrightness(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ - 1);
if (block.minZ > 0.0D)
var69 = var25;
var5.colour(var11 * var69, var11 * var69, var11 * var69);
blockRendererAccessor.setMirrorTexture(rotation == 2);
this.renderEastFace(block, blockX, blockY, blockZ, block.getTextureForSide(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ, 2), renderingInInventory);
}
if (var65 != 3 && (blockRendererAccessor.getRenderAllSides() || block.isSideRendered(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ + 1, 3))) {
float var70 = block.getBrightness(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ + 1);
if (block.maxZ < 1.0D)
var70 = var25;
var5.colour(var11 * var70, var11 * var70, var11 * var70);
blockRendererAccessor.setMirrorTexture(rotation == 3);
this.renderWestFace(block, blockX, blockY, blockZ, block.getTextureForSide(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ, 3), renderingInInventory);
}
if (var65 != 4 && (blockRendererAccessor.getRenderAllSides() || block.isSideRendered(blockRendererAccessor.getBlockView(), blockX - 1, blockY, blockZ, 4))) {
float var71 = block.getBrightness(blockRendererAccessor.getBlockView(), blockX - 1, blockY, blockZ);
if (block.minX > 0.0D)
var71 = var25;
var5.colour(var12 * var71, var12 * var71, var12 * var71);
blockRendererAccessor.setMirrorTexture(rotation == 4);
this.renderNorthFace(block, blockX, blockY, blockZ, block.getTextureForSide(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ, 4), renderingInInventory);
}
if (var65 != 5 && (blockRendererAccessor.getRenderAllSides() || block.isSideRendered(blockRendererAccessor.getBlockView(), blockX + 1, blockY, blockZ, 5))) {
float var72 = block.getBrightness(blockRendererAccessor.getBlockView(), blockX + 1, blockY, blockZ);
if (block.maxX < 1.0D)
var72 = var25;
var5.colour(var12 * var72, var12 * var72, var12 * var72);
blockRendererAccessor.setMirrorTexture(rotation == 5);
this.renderSouthFace(block, blockX, blockY, blockZ, block.getTextureForSide(blockRendererAccessor.getBlockView(), blockX, blockY, blockZ, 5), renderingInInventory);
}
blockRendererAccessor.setMirrorTexture(false);
return true;
}
public boolean renderPlant(BlockBase block, int x, int y, int z, boolean renderingInInventory) {
float var6 = block.getBrightness(blockRendererAccessor.getBlockView(), x, y, z);
int var7 = block.getColourMultiplier(blockRendererAccessor.getBlockView(), x, y, z);
int meta = blockRendererAccessor.getBlockView().getTileMeta(x, y, z);
Tessellator var5 = prepareTessellator(((CustomAtlasProvider) block).getAtlas().of(block.getTextureForSide(0, blockRendererAccessor.getBlockView().getTileMeta(x, y, z))), renderingInInventory);
float var8 = (float)(var7 >> 16 & 255) / 255.0F;
float var9 = (float)(var7 >> 8 & 255) / 255.0F;
float var10 = (float)(var7 & 255) / 255.0F;
if (GameRenderer.field_2340) {
float var11 = (var8 * 30.0F + var9 * 59.0F + var10 * 11.0F) / 100.0F;
float var12 = (var8 * 30.0F + var9 * 70.0F) / 100.0F;
float var13 = (var8 * 30.0F + var10 * 70.0F) / 100.0F;
var8 = var11;
var9 = var12;
var10 = var13;
}
var5.colour(var6 * var8, var6 * var9, var6 * var10);
double var19 = x;
double var20 = y;
double var15 = z;
if (block == BlockBase.TALLGRASS) {
long var17 = (x * 3129871L) ^ (long)z * 116129781L ^ (long)y;
var17 = var17 * var17 * 42317861L + var17 * 11L;
var19 += ((double)((float)(var17 >> 16 & 15L) / 15.0F) - 0.5D) * 0.5D;
var20 += ((double)((float)(var17 >> 20 & 15L) / 15.0F) - 1.0D) * 0.2D;
var15 += ((double)((float)(var17 >> 24 & 15L) / 15.0F) - 0.5D) * 0.5D;
}
this.renderCrossed(block, meta, var19, var20, var15, renderingInInventory);
return true;
}
public boolean renderCrops(BlockBase block, int x, int y, int z, boolean renderingInInventory) {
float var6 = block.getBrightness(blockRendererAccessor.getBlockView(), x, y, z);
int meta = blockRendererAccessor.getBlockView().getTileMeta(x, y, z);
Tessellator var5 = prepareTessellator(((CustomAtlasProvider) block).getAtlas().of(block.getTextureForSide(0, blockRendererAccessor.getBlockView().getTileMeta(x, y, z))), renderingInInventory);
var5.colour(var6, var6, var6);
this.renderShiftedColumn(block, meta, x, (float)y - 0.0625F, z, renderingInInventory);
return true;
}
public void renderCrossed(BlockBase block, int meta, double x, double y, double z, boolean renderingInInventory) {
Atlas atlas;
Atlas.Sprite texture;
if (blockRendererAccessor.getTextureOverride() >= 0) {
atlas = Atlases.getTerrain();
texture = atlas.getTexture(blockRendererAccessor.getTextureOverride());
} else {
int textureIndex = block.getTextureForSide(0, meta);
atlas = ((CustomAtlasProvider) block).getAtlas().of(textureIndex);
texture = atlas.getTexture(textureIndex);
}
Tessellator t = prepareTessellator(atlas, renderingInInventory);
double
var13 = texture.getStartU(),
var15 = texture.getEndU(),
var17 = texture.getStartV(),
var19 = texture.getEndV(),
var21 = x + 0.5D - (double)0.45F,
var23 = x + 0.5D + (double)0.45F,
var25 = z + 0.5D - (double)0.45F,
var27 = z + 0.5D + (double)0.45F;
t.vertex(var21, y + 1.0D, var25, var13, var17);
t.vertex(var21, y + 0.0D, var25, var13, var19);
t.vertex(var23, y + 0.0D, var27, var15, var19);
t.vertex(var23, y + 1.0D, var27, var15, var17);
t.vertex(var23, y + 1.0D, var27, var13, var17);
t.vertex(var23, y + 0.0D, var27, var13, var19);
t.vertex(var21, y + 0.0D, var25, var15, var19);
t.vertex(var21, y + 1.0D, var25, var15, var17);
t.vertex(var21, y + 1.0D, var27, var13, var17);
t.vertex(var21, y + 0.0D, var27, var13, var19);
t.vertex(var23, y + 0.0D, var25, var15, var19);
t.vertex(var23, y + 1.0D, var25, var15, var17);
t.vertex(var23, y + 1.0D, var25, var13, var17);
t.vertex(var23, y + 0.0D, var25, var13, var19);
t.vertex(var21, y + 0.0D, var27, var15, var19);
t.vertex(var21, y + 1.0D, var27, var15, var17);
}
public void renderShiftedColumn(BlockBase block, int meta, double x, double y, double z, boolean renderingInInventory) {
Atlas atlas;
Atlas.Sprite texture;
if (blockRendererAccessor.getTextureOverride() >= 0) {
atlas = Atlases.getTerrain();
texture = atlas.getTexture(blockRendererAccessor.getTextureOverride());
} else {
int textureIndex = block.getTextureForSide(0, meta);
atlas = ((CustomAtlasProvider) block).getAtlas().of(textureIndex);
texture = atlas.getTexture(textureIndex);
}
Tessellator t = prepareTessellator(atlas, renderingInInventory);
double var13 = texture.getStartU();
double var15 = texture.getEndU();
double var17 = texture.getStartV();
double var19 = texture.getEndV();
double var21 = x + 0.5D - 0.25D;
double var23 = x + 0.5D + 0.25D;
double var25 = z + 0.5D - 0.5D;
double var27 = z + 0.5D + 0.5D;
t.vertex(var21, y + 1.0D, var25, var13, var17);
t.vertex(var21, y + 0.0D, var25, var13, var19);
t.vertex(var21, y + 0.0D, var27, var15, var19);
t.vertex(var21, y + 1.0D, var27, var15, var17);
t.vertex(var21, y + 1.0D, var27, var13, var17);
t.vertex(var21, y + 0.0D, var27, var13, var19);
t.vertex(var21, y + 0.0D, var25, var15, var19);
t.vertex(var21, y + 1.0D, var25, var15, var17);
t.vertex(var23, y + 1.0D, var27, var13, var17);
t.vertex(var23, y + 0.0D, var27, var13, var19);
t.vertex(var23, y + 0.0D, var25, var15, var19);
t.vertex(var23, y + 1.0D, var25, var15, var17);
t.vertex(var23, y + 1.0D, var25, var13, var17);
t.vertex(var23, y + 0.0D, var25, var13, var19);
t.vertex(var23, y + 0.0D, var27, var15, var19);
t.vertex(var23, y + 1.0D, var27, var15, var17);
var21 = x + 0.5D - 0.5D;
var23 = x + 0.5D + 0.5D;
var25 = z + 0.5D - 0.25D;
var27 = z + 0.5D + 0.25D;
t.vertex(var21, y + 1.0D, var25, var13, var17);
t.vertex(var21, y + 0.0D, var25, var13, var19);
t.vertex(var23, y + 0.0D, var25, var15, var19);
t.vertex(var23, y + 1.0D, var25, var15, var17);
t.vertex(var23, y + 1.0D, var25, var13, var17);
t.vertex(var23, y + 0.0D, var25, var13, var19);
t.vertex(var21, y + 0.0D, var25, var15, var19);
t.vertex(var21, y + 1.0D, var25, var15, var17);
t.vertex(var23, y + 1.0D, var27, var13, var17);
t.vertex(var23, y + 0.0D, var27, var13, var19);
t.vertex(var21, y + 0.0D, var27, var15, var19);
t.vertex(var21, y + 1.0D, var27, var15, var17);
t.vertex(var21, y + 1.0D, var27, var13, var17);
t.vertex(var21, y + 0.0D, var27, var13, var19);
t.vertex(var23, y + 0.0D, var27, var15, var19);
t.vertex(var23, y + 1.0D, var27, var15, var17);
}
public void renderBottomFace(BlockBase block, double renderX, double renderY, double renderZ, int textureIndex, boolean renderingInInventory) {
Atlas atlas;
if (blockRendererAccessor.getTextureOverride() >= 0) {
textureIndex = blockRendererAccessor.getTextureOverride();
atlas = Atlases.getTerrain();
} else
atlas = ((CustomAtlasProvider) block).getAtlas().of(textureIndex);
Atlas.Sprite texture = atlas.getTexture(textureIndex);
BufferedImage atlasImage = atlas.getImage();
Tessellator t = prepareTessellator(atlas, renderingInInventory);
int
texX = texture.getX(),
texY = texture.getY(),
textureWidth = texture.getWidth(),
textureHeight = texture.getHeight(),
atlasWidth = atlasImage.getWidth(),
atlasHeight = atlasImage.getHeight();
double
startU1 = (texX + block.minX * textureWidth) / atlasWidth,
endU1 = (texX + block.maxX * textureWidth) / atlasWidth,
startV1 = (texY + block.minZ * textureHeight) / atlasHeight,
endV1 = (texY + block.maxZ * textureHeight) / atlasHeight;
if (block.minX < 0.0D || block.maxX > 1.0D) {
startU1 = texture.getStartU();
endU1 = texture.getEndU();
}
if (block.minZ < 0.0D || block.maxZ > 1.0D) {
startV1 = texture.getStartV();
endV1 = texture.getEndV();
}
double
endU2 = endU1,
startU2 = startU1,
startV2 = startV1,
endV2 = endV1;
switch (blockRendererAccessor.getBottomFaceRotation()) {
case 1:
startU1 = (texX + textureWidth - block.maxZ * textureWidth) / atlasWidth;
startV1 = (texY + block.minX * textureHeight) / atlasHeight;
endU1 = (texX + textureWidth - block.minZ * textureWidth) / atlasWidth;
endV1 = (texY + block.maxX * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startU1 = endU1;
endU1 = startU2;
startV2 = endV1;
endV2 = startV1;
break;
case 2:
startU1 = (texX + block.minZ * textureWidth) / atlasWidth;
startV1 = (texY + textureHeight - block.maxX * textureHeight) / atlasHeight;
endU1 = (texX + block.maxZ * textureWidth) / atlasWidth;
endV1 = (texY + textureHeight - block.minX * textureHeight) / atlasHeight;
startV2 = startV1;
endV2 = endV1;
endU2 = startU1;
startU2 = endU1;
startV1 = endV1;
endV1 = startV2;
break;
case 3:
startU1 = (texX + textureWidth - block.minX * textureWidth) / atlasWidth;
endU1 = (texX + textureWidth - block.maxX * textureWidth) / atlasWidth;
startV1 = (texY + textureHeight - block.minZ * textureHeight) / atlasHeight;
endV1 = (texY + textureHeight - block.maxZ * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startV2 = startV1;
endV2 = endV1;
break;
}
double
startRenderX = renderX + block.minX,
endRenderX = renderX + block.maxX,
adjustedRenderY = renderY + block.minY,
startRenderZ = renderZ + block.minZ,
endRenderZ = renderZ + block.maxZ;
if (blockRendererAccessor.getField_92()) {
t.colour(
blockRendererAccessor.getField_56(), blockRendererAccessor.getField_60(), blockRendererAccessor.getField_64()
);
t.vertex(startRenderX, adjustedRenderY, endRenderZ, startU2, endV2);
t.colour(
blockRendererAccessor.getField_57(), blockRendererAccessor.getField_61(), blockRendererAccessor.getField_65()
);
t.vertex(startRenderX, adjustedRenderY, startRenderZ, startU1, startV1);
t.colour(
blockRendererAccessor.getField_58(), blockRendererAccessor.getField_62(), blockRendererAccessor.getField_66()
);
t.vertex(endRenderX, adjustedRenderY, startRenderZ, endU2, startV2);
t.colour(
blockRendererAccessor.getField_59(), blockRendererAccessor.getField_63(), blockRendererAccessor.getField_68()
);
} else {
t.vertex(startRenderX, adjustedRenderY, endRenderZ, startU2, endV2);
t.vertex(startRenderX, adjustedRenderY, startRenderZ, startU1, startV1);
t.vertex(endRenderX, adjustedRenderY, startRenderZ, endU2, startV2);
}
t.vertex(endRenderX, adjustedRenderY, endRenderZ, endU1, endV1);
}
public void renderTopFace(BlockBase block, double renderX, double renderY, double renderZ, int textureIndex, boolean renderingInInventory) {
Atlas atlas;
if (blockRendererAccessor.getTextureOverride() >= 0) {
textureIndex = blockRendererAccessor.getTextureOverride();
atlas = Atlases.getTerrain();
} else
atlas = ((CustomAtlasProvider) block).getAtlas().of(textureIndex);
Atlas.Sprite texture = atlas.getTexture(textureIndex);
BufferedImage atlasImage = atlas.getImage();
Tessellator t = prepareTessellator(atlas, renderingInInventory);
int
texX = texture.getX(),
texY = texture.getY(),
textureWidth = texture.getWidth(),
textureHeight = texture.getHeight(),
atlasWidth = atlasImage.getWidth(),
atlasHeight = atlasImage.getHeight();
double
startU1 = (texX + block.minX * textureWidth) / atlasWidth,
endU1 = (texX + block.maxX * textureWidth) / atlasWidth,
startV1 = (texY + block.minZ * textureHeight) / atlasHeight,
endV1 = (texY + block.maxZ * textureHeight) / atlasHeight;
if (block.minX < 0.0D || block.maxX > 1.0D) {
startU1 = texture.getStartU();
endU1 = texture.getEndU();
}
if (block.minZ < 0.0D || block.maxZ > 1.0D) {
startV1 = texture.getStartV();
endV1 = texture.getEndV();
}
double
endU2 = endU1,
startU2 = startU1,
startV2 = startV1,
endV2 = endV1;
switch (blockRendererAccessor.getTopFaceRotation()) {
case 1:
startU1 = (texX + block.minZ * textureWidth) / atlasWidth;
startV1 = (texY + textureHeight - block.maxX * textureHeight) / atlasHeight;
endU1 = (texX + block.maxZ * textureWidth) / atlasWidth;
endV1 = (texY + textureHeight - block.minX * textureHeight) / atlasHeight;
startV2 = startV1;
endV2 = endV1;
endU2 = startU1;
startU2 = endU1;
startV1 = endV1;
endV1 = startV2;
break;
case 2:
startU1 = (texX + textureWidth - block.maxZ * textureWidth) / atlasWidth;
startV1 = (texY + block.minX * textureHeight) / atlasHeight;
endU1 = (texX + textureWidth - block.minZ * textureWidth) / atlasWidth;
endV1 = (texY + block.maxX * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startU1 = endU1;
endU1 = startU2;
startV2 = endV1;
endV2 = startV1;
break;
case 3:
startU1 = (texX + textureWidth - block.minX * textureWidth) / atlasWidth;
endU1 = (texX + textureWidth - block.maxX * textureWidth) / atlasWidth;
startV1 = (texY + textureHeight - block.minZ * textureHeight) / atlasHeight;
endV1 = (texY + textureHeight - block.maxZ * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startV2 = startV1;
endV2 = endV1;
break;
}
double
startRenderX = renderX + block.minX,
endRenderX = renderX + block.maxX,
adjustedRenderY = renderY + block.maxY,
startRenderZ = renderZ + block.minZ,
endRenderZ = renderZ + block.maxZ;
if (blockRendererAccessor.getField_92()) {
t.colour(
blockRendererAccessor.getField_56(), blockRendererAccessor.getField_60(), blockRendererAccessor.getField_64()
);
t.vertex(endRenderX, adjustedRenderY, endRenderZ, endU1, endV1);
t.colour(
blockRendererAccessor.getField_57(), blockRendererAccessor.getField_61(), blockRendererAccessor.getField_65()
);
t.vertex(endRenderX, adjustedRenderY, startRenderZ, endU2, startV2);
t.colour(
blockRendererAccessor.getField_58(), blockRendererAccessor.getField_62(), blockRendererAccessor.getField_66()
);
t.vertex(startRenderX, adjustedRenderY, startRenderZ, startU1, startV1);
t.colour(
blockRendererAccessor.getField_59(), blockRendererAccessor.getField_63(), blockRendererAccessor.getField_68()
);
} else {
t.vertex(endRenderX, adjustedRenderY, endRenderZ, endU1, endV1);
t.vertex(endRenderX, adjustedRenderY, startRenderZ, endU2, startV2);
t.vertex(startRenderX, adjustedRenderY, startRenderZ, startU1, startV1);
}
t.vertex(startRenderX, adjustedRenderY, endRenderZ, startU2, endV2);
}
public void renderEastFace(BlockBase block, double renderX, double renderY, double renderZ, int textureIndex, boolean renderingInInventory) {
Atlas atlas;
if (blockRendererAccessor.getTextureOverride() >= 0) {
textureIndex = blockRendererAccessor.getTextureOverride();
atlas = Atlases.getTerrain();
} else
atlas = ((CustomAtlasProvider) block).getAtlas().of(textureIndex);
Atlas.Sprite texture = atlas.getTexture(textureIndex);
BufferedImage atlasImage = atlas.getImage();
Tessellator t = prepareTessellator(atlas, renderingInInventory);
int
texX = texture.getX(),
texY = texture.getY(),
textureWidth = texture.getWidth(),
textureHeight = texture.getHeight(),
atlasWidth = atlasImage.getWidth(),
atlasHeight = atlasImage.getHeight();
double
startU1 = (texX + block.minX * textureWidth) / atlasWidth,
endU1 = (texX + block.maxX * textureWidth) / atlasWidth,
startV1 = (texY + textureHeight - block.maxY * textureHeight) / atlasHeight,
endV1 = (texY + textureHeight - block.minY * textureHeight) / atlasHeight;
if (blockRendererAccessor.getMirrorTexture()) {
double temp = startU1;
startU1 = endU1;
endU1 = temp;
}
if (block.minX < 0.0D || block.maxX > 1.0D) {
startU1 = texture.getStartU();
endU1 = texture.getEndU();
}
if (block.minY < 0.0D || block.maxY > 1.0D) {
startV1 = texture.getStartV();
endV1 = texture.getEndV();
}
double
endU2 = endU1,
startU2 = startU1,
startV2 = startV1,
endV2 = endV1;
switch (blockRendererAccessor.getEastFaceRotation()) {
case 1:
startU1 = (texX + textureWidth - block.maxY * textureWidth) / atlasWidth;
startV1 = (texY + block.maxX * textureHeight) / atlasHeight;
endU1 = (texX + textureWidth - block.minY * textureWidth) / atlasWidth;
endV1 = (texY + block.minX * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startU1 = endU1;
endU1 = startU2;
startV2 = endV1;
endV2 = startV1;
break;
case 2:
startU1 = (texX + block.minY * textureWidth) / atlasWidth;
startV1 = (texY + textureHeight - block.minX * textureHeight) / atlasHeight;
endU1 = (texX + block.maxY * textureWidth) / atlasWidth;
endV1 = (texY + textureHeight - block.maxX * textureHeight) / atlasHeight;
startV2 = startV1;
endV2 = endV1;
endU2 = startU1;
startU2 = endU1;
startV1 = endV1;
endV1 = startV2;
break;
case 3:
startU1 = (texX + textureWidth - block.minX * textureWidth) / atlasWidth;
endU1 = (texX + textureWidth - block.maxX * textureWidth) / atlasWidth;
startV1 = (texY + block.maxY * textureHeight) / atlasHeight;
endV1 = (texY + block.minY * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startV2 = startV1;
endV2 = endV1;
break;
}
double
startRenderX = renderX + block.minX,
endRenderX = renderX + block.maxX,
startRenderY = renderY + block.minY,
endRenderY = renderY + block.maxY,
adjustedRenderZ = renderZ + block.minZ;
if (blockRendererAccessor.getField_92()) {
t.colour(
blockRendererAccessor.getField_56(), blockRendererAccessor.getField_60(), blockRendererAccessor.getField_64()
);
t.vertex(startRenderX, endRenderY, adjustedRenderZ, endU2, startV2);
t.colour(
blockRendererAccessor.getField_57(), blockRendererAccessor.getField_61(), blockRendererAccessor.getField_65()
);
t.vertex(endRenderX, endRenderY, adjustedRenderZ, startU1, startV1);
t.colour(
blockRendererAccessor.getField_58(), blockRendererAccessor.getField_62(), blockRendererAccessor.getField_66()
);
t.vertex(endRenderX, startRenderY, adjustedRenderZ, startU2, endV2);
t.colour(
blockRendererAccessor.getField_59(), blockRendererAccessor.getField_63(), blockRendererAccessor.getField_68()
);
} else {
t.vertex(startRenderX, endRenderY, adjustedRenderZ, endU2, startV2);
t.vertex(endRenderX, endRenderY, adjustedRenderZ, startU1, startV1);
t.vertex(endRenderX, startRenderY, adjustedRenderZ, startU2, endV2);
}
t.vertex(startRenderX, startRenderY, adjustedRenderZ, endU1, endV1);
}
public void renderWestFace(BlockBase block, double renderX, double renderY, double renderZ, int textureIndex, boolean renderingInInventory) {
Atlas atlas;
if (blockRendererAccessor.getTextureOverride() >= 0) {
textureIndex = blockRendererAccessor.getTextureOverride();
atlas = Atlases.getTerrain();
} else
atlas = ((CustomAtlasProvider) block).getAtlas().of(textureIndex);
Atlas.Sprite texture = atlas.getTexture(textureIndex);
BufferedImage atlasImage = atlas.getImage();
Tessellator t = prepareTessellator(atlas, renderingInInventory);
int
texX = texture.getX(),
texY = texture.getY(),
textureWidth = texture.getWidth(),
textureHeight = texture.getHeight(),
atlasWidth = atlasImage.getWidth(),
atlasHeight = atlasImage.getHeight();
double
startU1 = (texX + block.minX * textureWidth) / atlasWidth,
endU1 = (texX + block.maxX * textureWidth) / atlasWidth,
startV1 = (texY + textureHeight - block.maxY * textureHeight) / atlasHeight,
endV1 = (texY + textureHeight - block.minY * textureHeight) / atlasHeight;
if (blockRendererAccessor.getMirrorTexture()) {
double temp = startU1;
startU1 = endU1;
endU1 = temp;
}
if (block.minX < 0.0D || block.maxX > 1.0D) {
startU1 = texture.getStartU();
endU1 = texture.getEndU();
}
if (block.minY < 0.0D || block.maxY > 1.0D) {
startV1 = texture.getStartV();
endV1 = texture.getEndV();
}
double
endU2 = endU1,
startU2 = startU1,
startV2 = startV1,
endV2 = endV1;
switch (blockRendererAccessor.getWestFaceRotation()) {
case 1:
startU1 = (texX + block.minY * textureWidth) / atlasWidth;
endV1 = (texY + textureHeight - block.minX * textureHeight) / atlasHeight;
endU1 = (texX + block.maxY * textureWidth) / atlasWidth;
startV1 = (texY + textureHeight - block.maxX * textureHeight) / atlasHeight;
startV2 = startV1;
endV2 = endV1;
endU2 = startU1;
startU2 = endU1;
startV1 = endV1;
endV1 = startV2;
break;
case 2:
startU1 = (texX + textureWidth - block.maxY * textureWidth) / atlasWidth;
startV1 = (texY + block.minX * textureHeight) / atlasHeight;
endU1 = (texX + textureWidth - block.minY * textureWidth) / atlasWidth;
endV1 = (texY + block.maxX * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startU1 = endU1;
endU1 = startU2;
startV2 = endV1;
endV2 = startV1;
break;
case 3:
startU1 = (texX + textureWidth - block.minX * textureWidth) / atlasWidth;
endU1 = (texX + textureWidth - block.maxX * textureWidth) / atlasWidth;
startV1 = (texY + block.maxY * textureHeight) / atlasHeight;
endV1 = (texY + block.minY * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startV2 = startV1;
endV2 = endV1;
break;
}
double
startRenderX = renderX + block.minX,
endRenderX = renderX + block.maxX,
startRenderY = renderY + block.minY,
endRenderY = renderY + block.maxY,
adjustedRenderZ = renderZ + block.maxZ;
if (blockRendererAccessor.getField_92()) {
t.colour(
blockRendererAccessor.getField_56(), blockRendererAccessor.getField_60(), blockRendererAccessor.getField_64()
);
t.vertex(startRenderX, endRenderY, adjustedRenderZ, startU1, startV1);
t.colour(
blockRendererAccessor.getField_57(), blockRendererAccessor.getField_61(), blockRendererAccessor.getField_65()
);
t.vertex(startRenderX, startRenderY, adjustedRenderZ, startU2, endV2);
t.colour(
blockRendererAccessor.getField_58(), blockRendererAccessor.getField_62(), blockRendererAccessor.getField_66()
);
t.vertex(endRenderX, startRenderY, adjustedRenderZ, endU1, endV1);
t.colour(
blockRendererAccessor.getField_59(), blockRendererAccessor.getField_63(), blockRendererAccessor.getField_68()
);
} else {
t.vertex(startRenderX, endRenderY, adjustedRenderZ, startU1, startV1);
t.vertex(startRenderX, startRenderY, adjustedRenderZ, startU2, endV2);
t.vertex(endRenderX, startRenderY, adjustedRenderZ, endU1, endV1);
}
t.vertex(endRenderX, endRenderY, adjustedRenderZ, endU2, startV2);
}
public void renderNorthFace(BlockBase block, double renderX, double renderY, double renderZ, int textureIndex, boolean renderingInInventory) {
Atlas atlas;
if (blockRendererAccessor.getTextureOverride() >= 0) {
textureIndex = blockRendererAccessor.getTextureOverride();
atlas = Atlases.getTerrain();
} else
atlas = ((CustomAtlasProvider) block).getAtlas().of(textureIndex);
Atlas.Sprite texture = atlas.getTexture(textureIndex);
BufferedImage atlasImage = atlas.getImage();
Tessellator t = prepareTessellator(atlas, renderingInInventory);
int
texX = texture.getX(),
texY = texture.getY(),
textureWidth = texture.getWidth(),
textureHeight = texture.getHeight(),
atlasWidth = atlasImage.getWidth(),
atlasHeight = atlasImage.getHeight();
double
startU1 = (texX + block.minZ * textureWidth) / atlasWidth,
endU1 = (texX + block.maxZ * textureWidth) / atlasWidth,
startV1 = (texY + textureHeight - block.maxY * textureHeight) / atlasHeight,
endV1 = (texY + textureHeight - block.minY * textureHeight) / atlasHeight;
if (blockRendererAccessor.getMirrorTexture()) {
double temp = startU1;
startU1 = endU1;
endU1 = temp;
}
if (block.minZ < 0.0D || block.maxZ > 1.0D) {
startU1 = texture.getStartU();
endU1 = texture.getEndU();
}
if (block.minY < 0.0D || block.maxY > 1.0D) {
startV1 = texture.getStartV();
endV1 = texture.getEndV();
}
double
endU2 = endU1,
startU2 = startU1,
startV2 = startV1,
endV2 = endV1;
switch (blockRendererAccessor.getNorthFaceRotation()) {
case 1:
startU1 = (texX + block.minY * textureWidth) / atlasWidth;
startV1 = (texY + textureHeight - block.maxZ * textureHeight) / atlasHeight;
endU1 = (texX + block.maxY * textureWidth) / atlasWidth;
endV1 = (texY + textureHeight - block.minZ * textureHeight) / atlasHeight;
startV2 = startV1;
endV2 = endV1;
endU2 = startU1;
startU2 = endU1;
startV1 = endV1;
endV1 = startV2;
break;
case 2:
startU1 = (texX + textureWidth - block.maxY * textureWidth) / atlasWidth;
startV1 = (texY + block.minZ * textureHeight) / atlasHeight;
endU1 = (texX + textureWidth - block.minY * textureWidth) / atlasWidth;
endV1 = (texY + block.maxZ * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startU1 = endU1;
endU1 = startU2;
startV2 = endV1;
endV2 = startV1;
break;
case 3:
startU1 = (texX + textureWidth - block.minZ * textureWidth) / atlasWidth;
endU1 = (texX + textureWidth - block.maxZ * textureWidth) / atlasWidth;
startV1 = (texY + block.maxY * textureHeight) / atlasHeight;
endV1 = (texY + block.minY * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startV2 = startV1;
endV2 = endV1;
break;
}
double
adjustedRenderX = renderX + block.minX,
startRenderY = renderY + block.minY,
endRenderY = renderY + block.maxY,
startRenderZ = renderZ + block.minZ,
endRenderZ = renderZ + block.maxZ;
if (blockRendererAccessor.getField_92()) {
t.colour(
blockRendererAccessor.getField_56(), blockRendererAccessor.getField_60(), blockRendererAccessor.getField_64()
);
t.vertex(adjustedRenderX, endRenderY, endRenderZ, endU2, startV2);
t.colour(
blockRendererAccessor.getField_57(), blockRendererAccessor.getField_61(), blockRendererAccessor.getField_65()
);
t.vertex(adjustedRenderX, endRenderY, startRenderZ, startU1, startV1);
t.colour(
blockRendererAccessor.getField_58(), blockRendererAccessor.getField_62(), blockRendererAccessor.getField_66()
);
t.vertex(adjustedRenderX, startRenderY, startRenderZ, startU2, endV2);
t.colour(
blockRendererAccessor.getField_59(), blockRendererAccessor.getField_63(), blockRendererAccessor.getField_68()
);
} else {
t.vertex(adjustedRenderX, endRenderY, endRenderZ, endU2, startV2);
t.vertex(adjustedRenderX, endRenderY, startRenderZ, startU1, startV1);
t.vertex(adjustedRenderX, startRenderY, startRenderZ, startU2, endV2);
}
t.vertex(adjustedRenderX, startRenderY, endRenderZ, endU1, endV1);
}
public void renderSouthFace(BlockBase block, double renderX, double renderY, double renderZ, int textureIndex, boolean renderingInInventory) {
Atlas atlas;
if (blockRendererAccessor.getTextureOverride() >= 0) {
textureIndex = blockRendererAccessor.getTextureOverride();
atlas = Atlases.getTerrain();
} else
atlas = ((CustomAtlasProvider) block).getAtlas().of(textureIndex);
Atlas.Sprite texture = atlas.getTexture(textureIndex);
BufferedImage atlasImage = atlas.getImage();
Tessellator t = prepareTessellator(atlas, renderingInInventory);
int
texX = texture.getX(),
texY = texture.getY(),
textureWidth = texture.getWidth(),
textureHeight = texture.getHeight(),
atlasWidth = atlasImage.getWidth(),
atlasHeight = atlasImage.getHeight();
double
startU1 = (texX + block.minZ * textureWidth) / atlasWidth,
endU1 = (texX + block.maxZ * textureWidth) / atlasWidth,
startV1 = (texY + textureHeight - block.maxY * textureHeight) / atlasHeight,
endV1 = (texY + textureHeight - block.minY * textureHeight) / atlasHeight;
if (blockRendererAccessor.getMirrorTexture()) {
double temp = startU1;
startU1 = endU1;
endU1 = temp;
}
if (block.minZ < 0.0D || block.maxZ > 1.0D) {
startU1 = texture.getStartU();
endU1 = texture.getEndU();
}
if (block.minY < 0.0D || block.maxY > 1.0D) {
startV1 = texture.getStartV();
endV1 = texture.getEndV();
}
double
endU2 = endU1,
startU2 = startU1,
startV2 = startV1,
endV2 = endV1;
switch (blockRendererAccessor.getSouthFaceRotation()) {
case 1:
startU1 = (texX + textureWidth - block.maxY * textureWidth) / atlasWidth;
startV1 = (texY + block.maxZ * textureHeight) / atlasHeight;
endU1 = (texX + textureWidth - block.minY * textureWidth) / atlasWidth;
endV1 = (texY + block.minZ * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startU1 = endU1;
endU1 = startU2;
startV2 = endV1;
endV2 = startV1;
break;
case 2:
startU1 = (texX + block.minY * textureWidth) / atlasWidth;
startV1 = (texY + textureHeight - block.minZ * textureHeight) / atlasHeight;
endU1 = (texX + block.maxY * textureWidth) / atlasWidth;
endV1 = (texY + textureHeight - block.maxZ * textureHeight) / atlasHeight;
startV2 = startV1;
endV2 = endV1;
endU2 = startU1;
startU2 = endU1;
startV1 = endV1;
endV1 = startV2;
break;
case 3:
startU1 = (texX + textureWidth - block.minZ * textureWidth) / atlasWidth;
endU1 = (texX + textureWidth - block.maxZ * textureWidth) / atlasWidth;
startV1 = (texY + block.maxY * textureHeight) / atlasHeight;
endV1 = (texY + block.minY * textureHeight) / atlasHeight;
endU2 = endU1;
startU2 = startU1;
startV2 = startV1;
endV2 = endV1;
break;
}
double
adjustedRenderX = renderX + block.maxX,
startRenderY = renderY + block.minY,
endRenderY = renderY + block.maxY,
startRenderZ = renderZ + block.minZ,
endRenderZ = renderZ + block.maxZ;
if (blockRendererAccessor.getField_92()) {
t.colour(
blockRendererAccessor.getField_56(), blockRendererAccessor.getField_60(), blockRendererAccessor.getField_64()
);
t.vertex(adjustedRenderX, startRenderY, endRenderZ, startU2, endV2);
t.colour(
blockRendererAccessor.getField_57(), blockRendererAccessor.getField_61(), blockRendererAccessor.getField_65()
);
t.vertex(adjustedRenderX, startRenderY, startRenderZ, endU1, endV1);
t.colour(
blockRendererAccessor.getField_58(), blockRendererAccessor.getField_62(), blockRendererAccessor.getField_66()
);
t.vertex(adjustedRenderX, endRenderY, startRenderZ, endU2, startV2);
t.colour(
blockRendererAccessor.getField_59(), blockRendererAccessor.getField_63(), blockRendererAccessor.getField_68()
);
} else {
t.vertex(adjustedRenderX, startRenderY, endRenderZ, startU2, endV2);
t.vertex(adjustedRenderX, startRenderY, startRenderZ, endU1, endV1);
t.vertex(adjustedRenderX, endRenderY, startRenderZ, endU2, startV2);
}
t.vertex(adjustedRenderX, endRenderY, endRenderZ, startU1, startV1);
}
}
<|start_filename|>station-dimensions-v0/src/main/java/net/modificationstation/stationapi/impl/client/level/dimension/DimensionHelperClientImpl.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.level.dimension;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.class_467;
import net.minecraft.client.Minecraft;
import net.minecraft.entity.player.PlayerBase;
import net.minecraft.level.Level;
import net.minecraft.level.dimension.Dimension;
import net.modificationstation.stationapi.api.registry.DimensionRegistry;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.impl.level.dimension.DimensionHelperImpl;
import static net.modificationstation.stationapi.api.level.dimension.VanillaDimensions.OVERWORLD;
public class DimensionHelperClientImpl extends DimensionHelperImpl {
@Override
public void switchDimension(PlayerBase player, Identifier destination, double scale, class_467 travelAgent, String enteringMessage, String leavingMessage) {
DimensionRegistry dimensions = DimensionRegistry.INSTANCE;
//noinspection deprecation
Minecraft game = (Minecraft) FabricLoader.getInstance().getGameInstance();
int overworldSerial = dimensions.getSerialID(OVERWORLD).orElseThrow(() -> new IllegalStateException("Couldn't find overworld dimension in the registry!"));
int destinationSerial = dimensions.getSerialID(destination).orElseThrow(() -> new IllegalArgumentException("Unknown dimension: " + destination + "!"));
player.dimensionId = player.dimensionId == destinationSerial ? overworldSerial : destinationSerial;
game.level.removeEntity(player);
player.removed = false;
double var1 = player.x;
double var3 = player.z;
if (player.dimensionId == destinationSerial) {
var1 = var1 * scale;
var3 = var3 * scale;
player.setPositionAndAngles(var1, player.y, var3, player.yaw, player.pitch);
if (player.isAlive()) {
game.level.method_193(player, false);
}
Level var10 = new Level(game.level, Dimension.getByID(destinationSerial));
game.showLevelProgress(var10, enteringMessage, player);
} else {
var1 = var1 / scale;
var3 = var3 / scale;
player.setPositionAndAngles(var1, player.y, var3, player.yaw, player.pitch);
if (player.isAlive()) {
game.level.method_193(player, false);
}
Level var12 = new Level(game.level, Dimension.getByID(overworldSerial));
game.showLevelProgress(var12, leavingMessage, player);
}
player.level = game.level;
if (player.isAlive()) {
player.setPositionAndAngles(var1, player.y, var3, player.yaw, player.pitch);
game.level.method_193(player, false);
travelAgent.method_1530(game.level, player);
}
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/model/Vertex.java<|end_filename|>
package net.modificationstation.stationapi.api.client.model;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import lombok.AccessLevel;
import lombok.EqualsAndHashCode;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import net.modificationstation.stationapi.api.util.math.Direction;
import java.util.*;
import java.util.concurrent.*;
@EqualsAndHashCode
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
@FieldDefaults(makeFinal = true, level = AccessLevel.PUBLIC)
public class Vertex {
private static final Cache<String, Vertex> CACHE = CacheBuilder.newBuilder().softValues().build();
double
x, y, z,
u, v;
Direction lightingFace;
float normalX, normalY, normalZ;
boolean shade;
public static Vertex get(double x, double y, double z, double u, double v, Direction face, boolean shade) {
return get(x, y, z, u, v, face, face, shade);
}
public static Vertex get(double x, double y, double z, double u, double v, Direction lightingFace, Direction normal, boolean shade) {
return get(x, y, z, u, v, lightingFace, normal.vector.x, normal.vector.y, normal.vector.z, shade);
}
public static Vertex get(double x, double y, double z, double u, double v, Direction face, float normalX, float normalY, float normalZ, boolean shade) {
try {
return CACHE.get(Arrays.deepToString(new Object[] {x, y, z, u, v, face, normalX, normalY, normalZ, shade}), () -> new Vertex(x, y, z, u, v, face, normalX, normalY, normalZ, shade));
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
}
<|start_filename|>station-dimensions-v0/src/main/java/net/modificationstation/stationapi/impl/level/dimension/DimensionHelperImpl.java<|end_filename|>
package net.modificationstation.stationapi.impl.level.dimension;
import net.minecraft.class_467;
import net.minecraft.entity.player.PlayerBase;
import net.modificationstation.stationapi.api.registry.Identifier;
public abstract class DimensionHelperImpl {
public abstract void switchDimension(PlayerBase player, Identifier destination, double scale, class_467 travelAgent, String enteringMessage, String leavingMessage);
}
<|start_filename|>station-items-v0/src/main/java/net/modificationstation/stationapi/impl/client/gui/CustomTooltipRendererImpl.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.gui;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.mine_diver.unsafeevents.listener.ListenerPriority;
import net.minecraft.item.ItemBase;
import net.modificationstation.stationapi.api.client.event.gui.TooltipRenderEvent;
import net.modificationstation.stationapi.api.client.gui.CustomTooltipProvider;
import net.modificationstation.stationapi.api.mod.entrypoint.Entrypoint;
import net.modificationstation.stationapi.api.mod.entrypoint.EventBusPolicy;
import net.modificationstation.stationapi.mixin.item.client.DrawableHelperInvoker;
import java.util.*;
import java.util.stream.*;
@Entrypoint(eventBus = @EventBusPolicy(registerInstance = false))
public class CustomTooltipRendererImpl {
@EventListener(priority = ListenerPriority.HIGH)
private static void renderCustomTooltips(TooltipRenderEvent event) {
ItemBase item = event.itemInstance.getType();
if (!event.isCancelled() && item instanceof CustomTooltipProvider) {
String[] newTooltip = ((CustomTooltipProvider) item).getTooltip(event.itemInstance, event.originalTooltip);
if (newTooltip != null) Arrays.stream(newTooltip).mapToInt(event.textManager::getTextWidth).max().ifPresent(tooltipWidth -> {
int tooltipX = event.mouseX - event.containerX + 12;
int tooltipY = event.mouseY - event.containerY - 12;
((DrawableHelperInvoker) event.container).invokeFillGradient(tooltipX - 3, tooltipY - 3, tooltipX + tooltipWidth + 3, tooltipY + (8 * newTooltip.length) + (3 * newTooltip.length), -1073741824, -1073741824);
IntStream.range(0, newTooltip.length).forEach(currentTooltip -> event.textManager.drawTextWithShadow(newTooltip[currentTooltip], tooltipX, tooltipY + (8 * currentTooltip) + (3 * currentTooltip), -1));
});
event.cancel();
}
}
}
<|start_filename|>src/testmod/java/net/modificationstation/sltest/texture/TextureListener.java<|end_filename|>
package net.modificationstation.sltest.texture;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.modificationstation.sltest.SLTest;
import net.modificationstation.sltest.block.BlockFreezer;
import net.modificationstation.sltest.item.ItemListener;
import net.modificationstation.stationapi.api.client.event.texture.TextureRegisterEvent;
import net.modificationstation.stationapi.api.client.model.json.JsonModel;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlases;
import net.modificationstation.stationapi.api.client.texture.atlas.ExpandableAtlas;
import net.modificationstation.stationapi.api.util.math.Direction;
import static net.modificationstation.sltest.SLTest.MODID;
import static net.modificationstation.sltest.block.Blocks.FREEZER;
import static net.modificationstation.sltest.block.Blocks.TEST_ANIMATED_BLOCK;
import static net.modificationstation.sltest.block.Blocks.TEST_BLOCK;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
public class TextureListener {
@EventListener
public void registerTextures(TextureRegisterEvent event) {
ExpandableAtlas terrain = Atlases.getStationTerrain();
TEST_BLOCK.get().texture = terrain.addTexture(of(MODID, "blocks/testBlock")).index;
TEST_ANIMATED_BLOCK.get().texture = terrain.addTexture(of(MODID, "blocks/testAnimatedBlock")).index;
FREEZER.get().texture = terrain.addTexture(of(MODID, "blocks/FreezerTop")).index;
FREEZER.get(BlockFreezer.class).sideTexture = terrain.addTexture(of(MODID, "blocks/FreezerSide")).index;
altarTextures[Direction.DOWN.ordinal()] = terrain.addTexture(of(MODID, "blocks/altar_bottom")).index;
altarTextures[Direction.UP.ordinal()] = terrain.addTexture(of(MODID, "blocks/altar_top")).index;
altarTextures[Direction.EAST.ordinal()] = terrain.addTexture(of(MODID, "blocks/altar_east")).index;
altarTextures[Direction.WEST.ordinal()] = terrain.addTexture(of(MODID, "blocks/altar_west")).index;
altarTextures[Direction.NORTH.ordinal()] = terrain.addTexture(of(MODID, "blocks/altar_north")).index;
altarTextures[Direction.SOUTH.ordinal()] = terrain.addTexture(of(MODID, "blocks/altar_south")).index;
ItemListener.testNBTItem.setTexture(of(MODID, "items/nbtItem"));
ItemListener.testItem.setTexture(of(MODID, "items/highres"));
// ItemListener.testPickaxe.setAnimationBinder("/assets/sltest/stationapi/textures/items/testPickaxe.png", 1, of(MODID, "items/testItem"));
ItemListener.testPickaxe.setTexture(of(MODID, "items/testPickaxe"));
// SquareAtlas.GUI_ITEMS.addAnimationBinder("/assets/sltest/textures/items/testPickaxe.png", 1, 0);
TEST_ATLAS = new ExpandableAtlas(of(SLTest.MODID, "test_atlas"));
TEST_ATLAS.addTexture(of(MODID, "items/testItem"));
TEST_ATLAS.addTexture(of(MODID, "blocks/testBlock"));
TEST_ATLAS.addTexture(of(MODID, "blocks/testAnimatedBlock"));
TEST_ATLAS.addTexture(of(MODID, "items/testPickaxe"));
TEST_ATLAS.addTexture(of(MODID, "items/nbtItem"));
TEST_ATLAS.addTexture(of(MODID, "blocks/FreezerTop"));
TEST_ATLAS.addTexture(of(MODID, "blocks/FreezerSide"));
farlandsBlockModel = JsonModel.get(of(MODID, "farlandsBlock"));
}
public static final int[] altarTextures = new int[6];
public static ExpandableAtlas TEST_ATLAS;
public static JsonModel farlandsBlockModel;
}
<|start_filename|>station-entities-v0/src/main/java/net/modificationstation/stationapi/mixin/entity/MixinEntityRegistry.java<|end_filename|>
package net.modificationstation.stationapi.mixin.entity;
import net.minecraft.entity.EntityBase;
import net.minecraft.entity.EntityRegistry;
import net.modificationstation.stationapi.api.StationAPI;
import net.modificationstation.stationapi.api.event.entity.EntityRegister;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import java.util.*;
@Mixin(EntityRegistry.class)
public class MixinEntityRegistry {
@Shadow
private static void register(Class<? extends EntityBase> arg, String string, int i) { }
@Shadow private static Map<String, Class<? extends EntityBase>> STRING_ID_TO_CLASS;
@Shadow private static Map<Class<? extends EntityBase>, String> CLASS_TO_STRING_ID;
@SuppressWarnings("UnresolvedMixinReference")
@Inject(method = "<clinit>", at = @At("RETURN"))
private static void onEntityRegister(CallbackInfo ci) {
StationAPI.EVENT_BUS.post(new EntityRegister(MixinEntityRegistry::register, (aClass, s) -> {
STRING_ID_TO_CLASS.put(s, aClass);
CLASS_TO_STRING_ID.put(aClass, s);
}));
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/resource/Resource.java<|end_filename|>
package net.modificationstation.stationapi.api.client.resource;
import java.io.*;
import java.util.*;
public interface Resource {
InputStream getResource();
default Optional<InputStream> getMeta() {
return Optional.empty();
}
static Resource of(InputStream stream) {
return stream instanceof Resource ? (Resource) stream : () -> stream;
}
}
<|start_filename|>station-registry-sync-v0/src/main/java/net/modificationstation/stationapi/impl/client/registry/ClientServerRegistryRemapper.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.registry;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.mine_diver.unsafeevents.listener.ListenerPriority;
import net.minecraft.entity.player.PlayerBase;
import net.minecraft.util.io.NBTIO;
import net.modificationstation.stationapi.api.event.registry.MessageListenerRegistryEvent;
import net.modificationstation.stationapi.api.mod.entrypoint.Entrypoint;
import net.modificationstation.stationapi.api.mod.entrypoint.EventBusPolicy;
import net.modificationstation.stationapi.api.packet.Message;
import net.modificationstation.stationapi.api.registry.LevelSerialRegistry;
import java.io.*;
import static net.modificationstation.stationapi.api.StationAPI.LOGGER;
import static net.modificationstation.stationapi.api.StationAPI.MODID;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
@Entrypoint(eventBus = @EventBusPolicy(registerInstance = false))
public class ClientServerRegistryRemapper {
@EventListener(priority = ListenerPriority.HIGH)
private static void registerListeners(MessageListenerRegistryEvent event) {
event.registry.register(of(MODID, "server_registry_sync"), ClientServerRegistryRemapper::remapRegistries);
}
private static void remapRegistries(PlayerBase player, Message message) {
LOGGER.info("Received level registries from server. Remapping...");
LevelSerialRegistry.loadAll(NBTIO.readGzipped(new ByteArrayInputStream(message.bytes)));
LOGGER.info("Successfully synchronized registries with the server.");
}
}
<|start_filename|>station-templates-v0/src/main/java/net/modificationstation/stationapi/api/template/item/ItemTemplate.java<|end_filename|>
package net.modificationstation.stationapi.api.template.item;
import net.minecraft.item.ItemBase;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlases;
import net.modificationstation.stationapi.api.client.texture.atlas.CustomAtlasProvider;
import net.modificationstation.stationapi.api.client.texture.atlas.ExpandableAtlas;
import net.modificationstation.stationapi.api.client.texture.binder.StationTextureBinder;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.api.registry.ModID;
import java.util.function.*;
public interface ItemTemplate<T extends ItemBase> extends CustomAtlasProvider {
default T setTranslationKey(ModID modID, String translationKey) {
//noinspection unchecked
return (T) ((ItemBase) this).setTranslationKey(Identifier.of(modID, translationKey).toString());
}
@Override
default Atlas getAtlas() {
return Atlases.getStationGuiItems();
}
default Atlas.Sprite setTexture(Identifier textureIdentifier) {
Atlas.Sprite texture = ((ExpandableAtlas) getAtlas()).addTexture(textureIdentifier);
((ItemBase) this).setTexturePosition(texture.index);
return texture;
}
default Atlas.Sprite setTexture(String texturePath) {
Atlas.Sprite texture = ((ExpandableAtlas) getAtlas()).addTexture(texturePath);
((ItemBase) this).setTexturePosition(texture.index);
return texture;
}
default <E extends StationTextureBinder> E setTextureBinder(Identifier staticReference, Function<Atlas.Sprite, E> initializer) {
E textureBinder = ((ExpandableAtlas) getAtlas()).addTextureBinder(staticReference, initializer);
((ItemBase) this).setTexturePosition(textureBinder.index);
return textureBinder;
}
}
<|start_filename|>station-registry-api-v0/src/main/java/net/modificationstation/stationapi/api/registry/AbstractSerialRegistry.java<|end_filename|>
package net.modificationstation.stationapi.api.registry;
import org.jetbrains.annotations.NotNull;
import java.util.*;
import java.util.function.*;
/**
* Abstract extension of {@link Registry} which allows interaction with objects that also have serial IDs assigned to them.
*
* <p>For example, "minecraft:dirt" -> {@link net.minecraft.block.BlockBase#DIRT}; "minecraft:dirt" -> 3 (serial ID)
*
* <p>Serial IDs act like identifiers. Every object must have a serial ID, but not all serial IDs must have an object.
* Serial IDs are integers.
*
* <p>Unlike identifiers though, serial IDs are limited and can be iterated through, as they're just integer numbers.
*
* <p>This registry has a lot of abstract methods to allow direct interaction with already existing methods of
* serial ID lookup, for example {@link net.minecraft.block.BlockBase#id} and {@link net.minecraft.block.BlockBase#BY_ID}.
*
* @param <T> the object's type that's stored in the registry.
* @author mine_diver
* @see Registry
* @see LevelSerialRegistry
*/
public abstract class AbstractSerialRegistry<T> extends Registry<T> {
/**
* This flag defines whether or not should the next free serial ID be shifted to 0->size-shift range
* during object initialization in {@link AbstractSerialRegistry#register(Identifier, IntFunction)} method.
*
* <p>This is set to true in ItemRegistry due to quirkiness of item constructor and reserved serial IDs for block items.
*/
private final boolean shiftSerialIDOnRegister;
/**
* Default registry constructor.
* @param identifier registry's identifier.
*/
public AbstractSerialRegistry(@NotNull Identifier identifier) {
this(identifier, false);
}
/**
* Constructor that allows to change the {@link AbstractSerialRegistry#shiftSerialIDOnRegister} flag.
*
* <p>Used by ItemRegistry.
*
* @param identifier registry's identifier.
* @param shiftSerialIDOnRegister whether the next free serial ID should be shifted
* to 0->size-shift range during object initialization.
*/
public AbstractSerialRegistry(@NotNull Identifier identifier, boolean shiftSerialIDOnRegister) {
super(identifier);
this.shiftSerialIDOnRegister = shiftSerialIDOnRegister;
}
/**
* Defines registry's serial IDs limit.
*
* <p>For example, the length of {@link net.minecraft.block.BlockBase#BY_ID} array.
*
* @return the maximum serial ID value (exclusive).
*/
public abstract int getSize();
/**
* Returns object's serial ID.
*
* <p>Since every object is supposed to have a serial ID, {@link OptionalInt} isn't required here.
*
* @param value the object associated to the requested serial ID.
* @return the serial ID of the given object.
*/
public abstract int getSerialID(@NotNull T value);
/**
* Returns the serial ID of object associated to the given identifier.
*
* <p>Note, since not every identifier is supposed to have an object associated to it,
* not every identifier is supposed to have a corresponding serial ID, so {@link OptionalInt} is required here.
*
* @param identifier the identifier of object associated to the requested serial ID.
* @return the serial ID of object associated to the given identifier.
*/
public @NotNull OptionalInt getSerialID(@NotNull Identifier identifier) {
return get(Objects.requireNonNull(identifier)).map(t -> OptionalInt.of(getSerialID(t))).orElse(OptionalInt.empty());
}
/**
* Returns the identifier of object associated to the given serial ID.
*
* <p>Note, since not every serial ID is supposed to have an object associated to it,
* not every serial ID is supposed to have a corresponding identifier, so {@link Optional} is required here.
*
* @param serialID the serial ID of object associated to the requested identifier.
* @return the identifier of object associated to the given serial ID.
*/
public @NotNull Optional<Identifier> getIdentifier(int serialID) {
return get(serialID).map(this::getIdentifier);
}
/**
* Returns the object associated to the given serial ID.
*
* <p>Note, since not every serial ID is supposed to have an object associated to it,
* an {@link Optional} is returned instead of the object itself.
*
* @param serialID the serial ID of the requested object.
* @return an {@link Optional} containing the object associated to the given serial ID,
* or an empty optional if there's no object associated to it.
*/
public abstract @NotNull Optional<T> get(int serialID);
/**
* Defines the first serial ID (inclusive).
*
* <p>This is useful if the registry in question has preserved serial IDs for some internal behavior,
* or if the serial IDs can be negative.
*
* <p>For example, a block with serial ID 0 is null because that's how Minecraft handles air blocks,
* but if we try searching for a free serial ID, it'll be considered free, which will cause
* a lot of unpredictable behavior and crashes. Thus, shifting the first serial ID to 1 allows us to
* avoid such scenarios.
*
* @return the serial ID shift (inclusive).
*/
public abstract int getSerialIDShift();
/**
* Searches for a free serial ID starting from {@link AbstractSerialRegistry#getSerialIDShift()} (inclusive)
* to {@link AbstractSerialRegistry#getSize()} (exclusive).
*
* <p>If a serial ID doesn't have a value associated to it (the returned {@link Optional} is empty),
* then the serial ID is considered free.
*
* @return the found free serial ID.
* @throws IndexOutOfBoundsException if there are no free serial IDs left in the range.
*/
public int getNextSerialID() {
for (int i = getSerialIDShift(); i < getSize(); i++) if (!get(i).isPresent())
return i;
throw new IndexOutOfBoundsException("No more free serial IDs left for " + id + " registry!");
}
/**
* Returns the next serial ID but shifted to 0->size-shift range by subtracting the {@link AbstractSerialRegistry#getSerialIDShift()}.
*
* <p>This is useful for the ItemRegistry, in which usual items take IDs from 0->size-shift range
* (shift being 256, the default size of BlockRegistry),
* but {@link net.minecraft.item.ItemBase#id} has the true ID that's shifted back to shift->size range.
*
* @return the next serial ID but shifted to 0.
*/
public int getNextSerialIDShifted() {
return getNextSerialID() - getSerialIDShift();
}
/**
* This register method acts like a shortcut for initializing an object by giving it a free serial ID
* and adding it to the registry with the given {@code identifier}.
*
* <p>A practical use case would be:
* <p><code>myCoolBlock = registry.register(Identifier.of(MODID, "my_cool_block"), MyCoolBlock::new).setTranslationKey(MODID, "myCoolBlock");</code>
*
* @param identifier the identifier that should be associated to the object.
* @param initializer the function that initializes the object with the serial ID (for example, {@code MyCoolBlock::new}).
* @param <E> a subtype of object's type. Useful so that you get for example {@code MyCoolBlock} on the return
* instead of {@code BlockBase}.
* @return the initialized object.
* @throws IndexOutOfBoundsException if there are no free serial IDs left.
*/
public <E extends T> @NotNull E register(@NotNull Identifier identifier, IntFunction<@NotNull E> initializer) {
E value = initializer.apply(shiftSerialIDOnRegister ? getNextSerialIDShifted() : getNextSerialID());
register(identifier, value);
return value;
}
}
<|start_filename|>station-api-base/src/main/java/net/modificationstation/stationapi/api/util/math/MathHelper.java<|end_filename|>
package net.modificationstation.stationapi.api.util.math;
public class MathHelper {
public static int lerp(double delta, int start, int end) {
return (int) Math.round(start + (end - start) * delta);
}
public static long hashCode(int x, int y, int z) {
long l = (x * 3129871L) ^ (long)z * 116129781L ^ (long)y;
l = l * l * 42317861L + l * 11L;
return l >> 16;
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/mixin/render/client/MixinZippedTexturePack.java<|end_filename|>
package net.modificationstation.stationapi.mixin.render.client;
import net.minecraft.client.resource.TexturePack;
import net.minecraft.client.resource.ZippedTexturePack;
import net.modificationstation.stationapi.api.client.texture.atlas.ExpandableAtlas;
import net.modificationstation.stationapi.impl.client.resource.ResourceImpl;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import java.io.*;
import java.util.zip.*;
@Mixin(ZippedTexturePack.class)
public class MixinZippedTexturePack {
@Shadow private ZipFile zipFile;
@Inject(method = "getResourceAsStream(Ljava/lang/String;)Ljava/io/InputStream;", at = @At("HEAD"), cancellable = true)
private void getExpandableAtlas(String name, CallbackInfoReturnable<InputStream> cir) {
ExpandableAtlas atlas = ExpandableAtlas.getByPath(name);
if (atlas != null)
cir.setReturnValue(atlas.getStream());
}
@Inject(method = "getResourceAsStream(Ljava/lang/String;)Ljava/io/InputStream;", at = @At(value = "RETURN", ordinal = 0), locals = LocalCapture.CAPTURE_FAILHARD, cancellable = true)
private void retrieveMeta(String name, CallbackInfoReturnable<InputStream> cir, ZipEntry entry) {
InputStream resource = cir.getReturnValue();
if (resource != null) {
InputStream meta = null;
try {
ZipEntry metaEntry = zipFile.getEntry(name.substring(1) + ".mcmeta");
meta = metaEntry == null ? null : zipFile.getInputStream(metaEntry);
} catch (IOException ignored) {}
cir.setReturnValue(new ResourceImpl(resource, meta));
}
}
@Inject(method = "getResourceAsStream(Ljava/lang/String;)Ljava/io/InputStream;", at = @At(value = "RETURN", ordinal = 1), cancellable = true)
private void retrieveMeta(String name, CallbackInfoReturnable<InputStream> cir) {
InputStream resource = cir.getReturnValue();
if (resource != null)
cir.setReturnValue(new ResourceImpl(resource, TexturePack.class.getResourceAsStream(name + ".mcmeta")));
}
}
<|start_filename|>station-entities-v0/src/main/java/net/modificationstation/stationapi/api/registry/MobHandlerRegistry.java<|end_filename|>
package net.modificationstation.stationapi.api.registry;
import net.minecraft.entity.Living;
import net.minecraft.level.Level;
import org.jetbrains.annotations.NotNull;
import java.util.function.*;
import static net.modificationstation.stationapi.api.StationAPI.MODID;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
public final class MobHandlerRegistry extends Registry<Function<Level, Living>> {
public static final MobHandlerRegistry INSTANCE = new MobHandlerRegistry(of(MODID, "mob_handlers"));
/**
* Default registry constructor.
*
* @param identifier registry's identifier.
*/
private MobHandlerRegistry(@NotNull Identifier identifier) {
super(identifier);
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/model/json/JsonModel.java<|end_filename|>
package net.modificationstation.stationapi.api.client.model.json;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.InstanceCreator;
import net.minecraft.client.resource.TexturePack;
import net.modificationstation.stationapi.api.client.model.BasicBakedModel;
import net.modificationstation.stationapi.api.client.model.Model;
import net.modificationstation.stationapi.api.client.model.Vertex;
import net.modificationstation.stationapi.api.client.registry.ModelRegistry;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlases;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.api.resource.ResourceManager;
import net.modificationstation.stationapi.api.util.math.Direction;
import net.modificationstation.stationapi.impl.client.model.GuiLightType;
import net.modificationstation.stationapi.impl.client.model.JsonCuboidData;
import net.modificationstation.stationapi.impl.client.model.JsonFaceData;
import net.modificationstation.stationapi.impl.client.model.JsonModelData;
import java.io.*;
import java.lang.reflect.*;
import java.nio.charset.*;
import java.util.*;
import java.util.stream.*;
import static net.modificationstation.stationapi.api.StationAPI.MODID;
import static net.modificationstation.stationapi.api.client.texture.atlas.JsonModelAtlas.MISSING;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
import static net.modificationstation.stationapi.api.util.math.Direction.values;
public final class JsonModel extends Model {
@SuppressWarnings({"rawtypes", "unchecked"})
private static final Gson GSON = new GsonBuilder().registerTypeAdapter(EnumMap.class, (InstanceCreator<EnumMap>) type -> new EnumMap((Class) ((ParameterizedType) type).getActualTypeArguments()[0])).create();
private JsonModelData data;
private ImmutableMap<String, Atlas.Sprite> textures;
public static JsonModel get(final Identifier identifier) {
return get(identifier, JsonModel::new);
}
private JsonModel(final Identifier identifier) {
super(identifier, "json");
}
@Override
public void reloadFromTexturePack(final TexturePack newTexturePack) {
invalidated = true;
InputStream stream = newTexturePack.getResourceAsStream(modelPath);
if (stream == null) {
ModelRegistry.INSTANCE.unregister(id);
data = null;
textures = null;
} else {
data = GSON.fromJson(new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)).lines().collect(Collectors.joining("\n")), JsonModelData.class);
List<JsonModelData> inheritance = new ArrayList<>();
{
JsonModelData parentData = data;
inheritance.add(parentData);
while (parentData.parent != null)
inheritance.add(
parentData = GSON.fromJson(
new BufferedReader(
new InputStreamReader(
newTexturePack.getResourceAsStream(
ResourceManager.parsePath(
Identifier.of(parentData.parent),
"/" + MODID + "/models", "json")
),
StandardCharsets.UTF_8
)
).lines().collect(Collectors.joining("\n")), JsonModelData.class)
);
Collections.reverse(inheritance);
}
Map<String, String> textures = new HashMap<>();
List<JsonCuboidData> elements = new ArrayList<>();
inheritance.forEach(parentData -> {
if (parentData.textures != null)
textures.putAll(parentData.textures);
if (parentData.elements != null) {
elements.clear();
elements.addAll(parentData.elements);
}
});
data.textures = textures;
data.elements = elements;
ImmutableMap.Builder<String, Atlas.Sprite> texturesBuilder = ImmutableMap.builder();
data.textures.forEach((textureId, texturePath) -> {
while (texturePath.startsWith("#")) texturePath = data.textures.get(texturePath.substring(1));
texturesBuilder.put("#" + textureId, Atlases.getStationJsonModels().addTexture(of(texturePath)));
});
this.textures = texturesBuilder.build();
data.elements.forEach(cuboid -> {
cuboid.postprocess();
cuboid.faces.values().forEach(face -> face.postprocess(this.textures.getOrDefault(face.textureId, Atlases.getStationJsonModels().addTexture(MISSING))));
});
updateUVs();
}
}
public void updateUVs() {
invalidated = true;
if (data != null)
data.elements.forEach(cuboid -> cuboid.faces.values().forEach(JsonFaceData::updateUVs));
}
@Override
protected BasicBakedModel bake() {
Map<Direction, ImmutableList.Builder<Vertex>> faceVertexesBuilder = new EnumMap<>(Direction.class);
Arrays.stream(values()).forEach(direction -> faceVertexesBuilder.put(direction, ImmutableList.builder()));
ImmutableList.Builder<Vertex> vertexes = ImmutableList.builder();
data.elements.forEach(cuboid -> {
double[]
from = cuboid.from,
to = cuboid.to;
double
xFrom = from[0],
yFrom = from[1],
zFrom = from[2],
xTo = to[0],
yTo = to[1],
zTo = to[2];
Map<Direction, JsonFaceData> faces = cuboid.faces;
boolean shade = cuboid.isShade();
faces.forEach((direction, face) -> {
boolean absentCullface = face.cullface == null;
Direction lightingFace = absentCullface ? direction : face.cullface;
ImmutableList.Builder<Vertex> v = absentCullface ? vertexes : faceVertexesBuilder.get(face.cullface);
face.updateUVs();
double[] uv = face.getUv();
switch (direction) {
case DOWN:
v.add(Vertex.get(xFrom, yFrom, zTo, uv[4], uv[7], lightingFace, shade));
v.add(Vertex.get(xFrom, yFrom, zFrom, uv[0], uv[1], lightingFace, shade));
v.add(Vertex.get(xTo, yFrom, zFrom, uv[6], uv[5], lightingFace, shade));
v.add(Vertex.get(xTo, yFrom, zTo, uv[2], uv[3], lightingFace, shade));
break;
case UP:
v.add(Vertex.get(xTo, yTo, zTo, uv[2], uv[3], lightingFace, shade));
v.add(Vertex.get(xTo, yTo, zFrom, uv[6], uv[5], lightingFace, shade));
v.add(Vertex.get(xFrom, yTo, zFrom, uv[0], uv[1], lightingFace, shade));
v.add(Vertex.get(xFrom, yTo, zTo, uv[4], uv[7], lightingFace, shade));
break;
case EAST:
v.add(Vertex.get(xFrom, yTo, zFrom, uv[2], uv[1], lightingFace, shade));
v.add(Vertex.get(xTo, yTo, zFrom, uv[0], uv[1], lightingFace, shade));
v.add(Vertex.get(xTo, yFrom, zFrom, uv[0], uv[3], lightingFace, shade));
v.add(Vertex.get(xFrom, yFrom, zFrom, uv[2], uv[3], lightingFace, shade));
break;
case WEST:
v.add(Vertex.get(xFrom, yTo, zTo, uv[0], uv[1], lightingFace, shade));
v.add(Vertex.get(xFrom, yFrom, zTo, uv[0], uv[3], lightingFace, shade));
v.add(Vertex.get(xTo, yFrom, zTo, uv[2], uv[3], lightingFace, shade));
v.add(Vertex.get(xTo, yTo, zTo, uv[2], uv[1], lightingFace, shade));
break;
case NORTH:
v.add(Vertex.get(xFrom, yTo, zTo, uv[2], uv[1], lightingFace, shade));
v.add(Vertex.get(xFrom, yTo, zFrom, uv[0], uv[1], lightingFace, shade));
v.add(Vertex.get(xFrom, yFrom, zFrom, uv[0], uv[3], lightingFace, shade));
v.add(Vertex.get(xFrom, yFrom, zTo, uv[2], uv[3], lightingFace, shade));
break;
case SOUTH:
v.add(Vertex.get(xTo, yFrom, zTo, uv[0], uv[3], lightingFace, shade));
v.add(Vertex.get(xTo, yFrom, zFrom, uv[2], uv[3], lightingFace, shade));
v.add(Vertex.get(xTo, yTo, zFrom, uv[2], uv[1], lightingFace, shade));
v.add(Vertex.get(xTo, yTo, zTo, uv[0], uv[1], lightingFace, shade));
break;
}
});
});
ImmutableMap.Builder<Direction, ImmutableList<Vertex>> faceVertexes = ImmutableMap.builder();
faceVertexesBuilder.forEach((direction, faceQuadPointBuilder) -> faceVertexes.put(direction, faceQuadPointBuilder.build()));
return new BasicBakedModel.Builder()
.faceVertexes(Maps.immutableEnumMap(faceVertexes.build()))
.vertexes(vertexes.build())
.useAO(data.isAmbientocclusion())
.isSideLit(data.gui_light == GuiLightType.SIDE)
.sprite(textures.get("#particle"))
.build();
}
}
<|start_filename|>station-api-base/src/main/java/net/modificationstation/stationapi/api/util/math/Direction.java<|end_filename|>
package net.modificationstation.stationapi.api.util.math;
import com.google.gson.annotations.SerializedName;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import net.minecraft.util.maths.Vec3i;
import static net.modificationstation.stationapi.api.util.math.Axis.X;
import static net.modificationstation.stationapi.api.util.math.Axis.Y;
import static net.modificationstation.stationapi.api.util.math.Axis.Z;
@RequiredArgsConstructor
public enum Direction {
@SerializedName("down")
DOWN(new Vec3i(0, -1, 0), Y),
@SerializedName("up")
UP(new Vec3i(0, 1, 0), Y),
@SerializedName("east")
EAST(new Vec3i(0, 0, -1), Z),
@SerializedName("west")
WEST(new Vec3i(0, 0, 1), Z),
@SerializedName("north")
NORTH(new Vec3i(-1, 0, 0), X),
@SerializedName("south")
SOUTH(new Vec3i(1, 0, 0), X);
static {
DOWN.opposite = UP;
UP.opposite = DOWN;
EAST.opposite = WEST;
WEST.opposite = EAST;
NORTH.opposite = SOUTH;
SOUTH.opposite = NORTH;
}
public final Vec3i vector;
@Getter
private Direction opposite;
public final Axis axis;
}
<|start_filename|>src/testmod/java/net/modificationstation/sltest/entity/PoorGuy.java<|end_filename|>
package net.modificationstation.sltest.entity;
import lombok.Getter;
import net.minecraft.entity.animal.AnimalBase;
import net.minecraft.item.ItemBase;
import net.minecraft.level.Level;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.api.server.entity.HasTrackingParameters;
import net.modificationstation.stationapi.api.server.entity.MobSpawnDataProvider;
import static net.modificationstation.sltest.SLTest.MODID;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
@HasTrackingParameters(trackingDistance = 5, updatePeriod = 2)
public class PoorGuy extends AnimalBase implements MobSpawnDataProvider {
public PoorGuy(Level arg) {
super(arg);
System.out.println("well guess im dead");
texture = "/assets/sltest/textures/entities/geisterspoor.png";
}
// public PoorGuy(Level level, double x, double y, double z) {
// this(level);
// System.out.println("yoooooooooooooooooooooooo");
// setPosition(x, y, z);
// field_1026 = true;
// }
@Override
protected int getMobDrops() {
return ItemBase.wheat.id;
}
@Getter
private final Identifier handlerIdentifier = of(MODID, "gpoor");
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/mixin/render/client/MixinBlockRenderer.java<|end_filename|>
package net.modificationstation.stationapi.mixin.render.client;
import lombok.Getter;
import net.fabricmc.api.EnvType;
import net.fabricmc.api.Environment;
import net.minecraft.block.BlockBase;
import net.minecraft.client.render.block.BlockRenderer;
import net.minecraft.level.BlockView;
import net.minecraft.level.Level;
import net.modificationstation.stationapi.api.client.model.block.BlockWithInventoryRenderer;
import net.modificationstation.stationapi.api.client.model.block.BlockWithWorldRenderer;
import net.modificationstation.stationapi.impl.client.texture.StationBlockRenderer;
import net.modificationstation.stationapi.impl.client.texture.StationBlockRendererProvider;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.Unique;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
@Environment(EnvType.CLIENT)
@Mixin(BlockRenderer.class)
public class MixinBlockRenderer implements StationBlockRendererProvider {
@Shadow private BlockView blockView;
@Unique @Getter
private final StationBlockRenderer stationBlockRenderer = new StationBlockRenderer((BlockRenderer) (Object) this);
@Unique
private boolean renderingInInventory;
@Inject(
method = "renderBed(Lnet/minecraft/block/BlockBase;III)Z",
at = @At("HEAD"),
cancellable = true
)
private void renderBed_redirect(BlockBase block, int blockX, int blockY, int blockZ, CallbackInfoReturnable<Boolean> cir) {
cir.setReturnValue(stationBlockRenderer.renderBed(block, blockX, blockY, blockZ, renderingInInventory));
}
@Inject(
method = "renderCrossed(Lnet/minecraft/block/BlockBase;III)Z",
at = @At("HEAD"),
cancellable = true
)
private void renderPlant_redirect(BlockBase arg, int i, int j, int k, CallbackInfoReturnable<Boolean> cir) {
cir.setReturnValue(stationBlockRenderer.renderPlant(arg, i, j, k, renderingInInventory));
}
@Inject(
method = "renderCrops(Lnet/minecraft/block/BlockBase;III)Z",
at = @At("HEAD"),
cancellable = true
)
private void renderCrops_redirect(BlockBase arg, int i, int j, int k, CallbackInfoReturnable<Boolean> cir) {
cir.setReturnValue(stationBlockRenderer.renderCrops(arg, i, j, k, renderingInInventory));
}
@Inject(
method = "method_47(Lnet/minecraft/block/BlockBase;IDDD)V",
at = @At("HEAD"),
cancellable = true
)
private void renderCrossed_redirect(BlockBase arg, int i, double d, double d1, double d2, CallbackInfo ci) {
stationBlockRenderer.renderCrossed(arg, i, d, d1, d2, renderingInInventory);
ci.cancel();
}
@Inject(
method = "method_56(Lnet/minecraft/block/BlockBase;IDDD)V",
at = @At("HEAD"),
cancellable = true
)
private void renderShhiftedColumn_redirect(BlockBase arg, int i, double d, double d1, double d2, CallbackInfo ci) {
stationBlockRenderer.renderShiftedColumn(arg, i, d, d1, d2, renderingInInventory);
ci.cancel();
}
@Inject(
method = "renderBottomFace(Lnet/minecraft/block/BlockBase;DDDI)V",
at = @At("HEAD"),
cancellable = true
)
private void renderBottomFace_redirect(BlockBase arg, double d, double d1, double d2, int i, CallbackInfo ci) {
stationBlockRenderer.renderBottomFace(arg, d, d1, d2, i, renderingInInventory);
ci.cancel();
}
@Inject(
method = "renderTopFace(Lnet/minecraft/block/BlockBase;DDDI)V",
at = @At("HEAD"),
cancellable = true
)
private void renderTopFace_redirect(BlockBase arg, double d, double d1, double d2, int i, CallbackInfo ci) {
stationBlockRenderer.renderTopFace(arg, d, d1, d2, i, renderingInInventory);
ci.cancel();
}
@Inject(
method = "renderEastFace(Lnet/minecraft/block/BlockBase;DDDI)V",
at = @At("HEAD"),
cancellable = true
)
private void renderEastFace_redirect(BlockBase arg, double d, double d1, double d2, int i, CallbackInfo ci) {
stationBlockRenderer.renderEastFace(arg, d, d1, d2, i, renderingInInventory);
ci.cancel();
}
@Inject(
method = "renderWestFace(Lnet/minecraft/block/BlockBase;DDDI)V",
at = @At("HEAD"),
cancellable = true
)
private void renderWestFace_redirect(BlockBase arg, double d, double d1, double d2, int i, CallbackInfo ci) {
stationBlockRenderer.renderWestFace(arg, d, d1, d2, i, renderingInInventory);
ci.cancel();
}
@Inject(
method = "renderNorthFace(Lnet/minecraft/block/BlockBase;DDDI)V",
at = @At("HEAD"),
cancellable = true
)
private void renderNorthFace_redirect(BlockBase arg, double d, double d1, double d2, int i, CallbackInfo ci) {
stationBlockRenderer.renderNorthFace(arg, d, d1, d2, i, renderingInInventory);
ci.cancel();
}
@Inject(
method = "renderSouthFace(Lnet/minecraft/block/BlockBase;DDDI)V",
at = @At("HEAD"),
cancellable = true
)
private void renderSouthFace_redirect(BlockBase arg, double d, double d1, double d2, int i, CallbackInfo ci) {
stationBlockRenderer.renderSouthFace(arg, d, d1, d2, i, renderingInInventory);
ci.cancel();
}
@Inject(
method = "method_48(Lnet/minecraft/block/BlockBase;IF)V",
at = @At("HEAD")
)
private void setRenderingInInventory1(BlockBase arg, int i, float f, CallbackInfo ci) {
renderingInInventory = true;
}
@Inject(
method = "method_48(Lnet/minecraft/block/BlockBase;IF)V",
at = @At("RETURN")
)
private void setRenderingInInventory2(BlockBase arg, int i, float f, CallbackInfo ci) {
renderingInInventory = false;
}
@Inject(
method = "method_53(Lnet/minecraft/block/BlockBase;Lnet/minecraft/level/Level;III)V",
at = @At("RETURN")
)
private void renderFallingBlockAtlases(BlockBase arg, Level arg1, int i, int j, int k, CallbackInfo ci) {
stationBlockRenderer.renderActiveAtlases();
}
@Inject(
method = "render(Lnet/minecraft/block/BlockBase;III)Z",
at = @At(
value = "INVOKE",
target = "Lnet/minecraft/block/BlockBase;updateBoundingBox(Lnet/minecraft/level/BlockView;III)V",
shift = At.Shift.AFTER
),
cancellable = true
)
private void onRenderInWorld(BlockBase block, int blockX, int blockY, int blockZ, CallbackInfoReturnable<Boolean> cir) {
if (block instanceof BlockWithWorldRenderer)
cir.setReturnValue(((BlockWithWorldRenderer) block).renderWorld((BlockRenderer) (Object) this, blockView, blockX, blockY, blockZ));
}
@Inject(
method = "method_48(Lnet/minecraft/block/BlockBase;IF)V",
at = @At(
value = "INVOKE",
target = "Lnet/minecraft/block/BlockBase;getRenderType()I"
),
cancellable = true
)
private void onRenderInInventory(BlockBase arg, int i, float f, CallbackInfo ci) {
if (arg instanceof BlockWithInventoryRenderer) {
((BlockWithInventoryRenderer) arg).renderInventory((BlockRenderer) (Object) this, i);
renderingInInventory = false;
ci.cancel();
}
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/model/item/ItemWithRenderer.java<|end_filename|>
package net.modificationstation.stationapi.api.client.model.item;
import net.minecraft.item.ItemInstance;
public interface ItemWithRenderer {
void render(ItemInstance itemInstance);
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/model/JsonModelData.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.model;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import net.modificationstation.stationapi.api.util.Null;
import java.util.*;
@RequiredArgsConstructor
public class JsonModelData {
public String parent = null;
@SuppressWarnings("FieldMayBeFinal") // had to make it non-final with a getter because javac is retarded
@Getter
private boolean ambientocclusion = true;
public Map<String, String> textures = Null.get();
public List<JsonCuboidData> elements = Null.get();
public GuiLightType gui_light = GuiLightType.SIDE;
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/texture/StationParticleManager.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.texture;
import net.minecraft.client.render.Tessellator;
import net.minecraft.entity.ParticleBase;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.mixin.render.client.TessellatorAccessor;
import java.util.*;
public class StationParticleManager {
private final Set<Atlas> activeAtlases = new HashSet<>();
public void checkParticle(ParticleBase particle) {
if (particle instanceof StationDiggingParticleProvider) {
Atlas atlas = ((StationDiggingParticleProvider) particle).getStationDiggingParticle().getTexture().getAtlas();
Tessellator tessellator = atlas.getTessellator();
if (!((TessellatorAccessor) tessellator).getDrawing()) {
activeAtlases.add(atlas);
tessellator.start();
}
}
}
public void renderAtlases() {
if (!activeAtlases.isEmpty()) {
activeAtlases.forEach(atlas -> {
atlas.bindAtlas();
atlas.getTessellator().draw();
});
activeAtlases.clear();
}
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/model/LightingHelper.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.model;
import net.minecraft.block.BlockBase;
import net.minecraft.level.BlockView;
import net.minecraft.sortme.GameRenderer;
import net.modificationstation.stationapi.api.client.model.Vertex;
import net.modificationstation.stationapi.api.util.math.Direction;
public class LightingHelper {
public static int getFastForVertex(
Vertex vertex,
float colourMultiplierRed, float colourMultiplierGreen, float colourMultiplierBlue,
float brightnessMiddle, float brightnessBottom, float brightnessTop, float brightnessEast, float brightnessWest, float brightnessNorth, float brightnessSouth
) {
Direction face = vertex.lightingFace;
switch (face) {
case DOWN:
colourMultiplierRed *= 0.5;
colourMultiplierGreen *= 0.5;
colourMultiplierBlue *= 0.5;
break;
case UP:
break;
case EAST:
case WEST:
colourMultiplierRed *= 0.8;
colourMultiplierGreen *= 0.8;
colourMultiplierBlue *= 0.8;
break;
case NORTH:
case SOUTH:
colourMultiplierRed *= 0.6;
colourMultiplierGreen *= 0.6;
colourMultiplierBlue *= 0.6;
break;
default:
throw new IllegalStateException("Unexpected value: " + face);
}
float brightnessMultiplier;
switch (face) {
case DOWN:
if (vertex.y > -1)
brightnessMultiplier = brightnessMiddle;
else
brightnessMultiplier = brightnessBottom;
break;
case UP:
if (vertex.y < 1)
brightnessMultiplier = brightnessMiddle;
else
brightnessMultiplier = brightnessTop;
break;
case EAST:
if (vertex.z > -1)
brightnessMultiplier = brightnessMiddle;
else
brightnessMultiplier = brightnessEast;
break;
case WEST:
if (vertex.z < 1)
brightnessMultiplier = brightnessMiddle;
else
brightnessMultiplier = brightnessWest;
break;
case NORTH:
if (vertex.x > -1)
brightnessMultiplier = brightnessMiddle;
else
brightnessMultiplier = brightnessNorth;
break;
case SOUTH:
if (vertex.x < 1)
brightnessMultiplier = brightnessMiddle;
else
brightnessMultiplier = brightnessSouth;
break;
default:
throw new IllegalStateException("Unexpected face: " + face);
}
return (((int) (colourMultiplierRed * brightnessMultiplier * 255) & 255) << 16) +
(((int) (colourMultiplierGreen * brightnessMultiplier * 255) & 255) << 8) +
((int) (colourMultiplierBlue * brightnessMultiplier * 255) & 255);
}
// TODO: use this for inner model faces
public static float[] getSmoothForVertex(BlockBase block, BlockView blockView, int x, int y, int z, Vertex faceQuadPoint) {
Direction face = faceQuadPoint.lightingFace;
int colourMultiplier = block.getColourMultiplier(blockView, x, y, z);
float colourMultiplierRed = (float)(colourMultiplier >> 16 & 255) / 255.0F;
float colourMultiplierGreen = (float)(colourMultiplier >> 8 & 255) / 255.0F;
float colourMultiplierBlue = (float)(colourMultiplier & 255) / 255.0F;
if (GameRenderer.field_2340) {
float colourMultiplierRedTmp = (colourMultiplierRed * 30.0F + colourMultiplierGreen * 59.0F + colourMultiplierBlue * 11.0F) / 100.0F;
float colourMultiplierGreenTmp = (colourMultiplierRed * 30.0F + colourMultiplierGreen * 70.0F) / 100.0F;
float colourMultiplierBlueTmp = (colourMultiplierRed * 30.0F + colourMultiplierBlue * 70.0F) / 100.0F;
colourMultiplierRed = colourMultiplierRedTmp;
colourMultiplierGreen = colourMultiplierGreenTmp;
colourMultiplierBlue = colourMultiplierBlueTmp;
}
switch (face) {
case DOWN:
colourMultiplierRed *= 0.5;
colourMultiplierGreen *= 0.5;
colourMultiplierBlue *= 0.5;
break;
case UP:
break;
case EAST:
case WEST:
colourMultiplierRed *= 0.8;
colourMultiplierGreen *= 0.8;
colourMultiplierBlue *= 0.8;
break;
case NORTH:
case SOUTH:
colourMultiplierRed *= 0.6;
colourMultiplierGreen *= 0.6;
colourMultiplierBlue *= 0.6;
break;
default:
throw new IllegalStateException("Unexpected value: " + face);
}
float brightnessSelf = block.getBrightness(blockView, x, y, z);
y--;
float
brightnessBottomEast = block.getBrightness(blockView, x, y, z - 1),
brightnessBottomWest = block.getBrightness(blockView, x, y, z + 1),
brightnessBottomNorth = block.getBrightness(blockView, x - 1, y, z),
brightnessBottomSouth = block.getBrightness(blockView, x + 1, y, z);
boolean
allowsGrassUnderBottomEast = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y, z - 1)],
allowsGrassUnderBottomWest = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y, z + 1)],
allowsGrassUnderBottomNorth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId( x - 1, y, z)],
allowsGrassUnderBottomSouth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId( x + 1, y, z)];
y++;
float
brightnessNorthEast = block.getBrightness(blockView, x - 1, y, z - 1),
brightnessNorthWest = block.getBrightness(blockView, x - 1, y, z + 1),
brightnessSouthEast = block.getBrightness(blockView, x + 1, y, z - 1),
brightnessSouthWest = block.getBrightness(blockView, x + 1, y, z + 1);
boolean
allowsGrassUnderNorthEast = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x - 1, y, z - 1)],
allowsGrassUnderNorthWest = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x - 1, y, z + 1)],
allowsGrassUnderSouthEast = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x + 1, y, z - 1)],
allowsGrassUnderSouthWest = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x + 1, y, z + 1)];
y++;
float
brightnessTopEast = block.getBrightness(blockView, x, y, z - 1),
brightnessTopWest = block.getBrightness(blockView, x, y, z + 1),
brightnessTopNorth = block.getBrightness(blockView, x - 1, y, z),
brightnessTopSouth = block.getBrightness(blockView, x + 1, y, z);
boolean
allowsGrassUnderTopEast = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y, z - 1)],
allowsGrassUnderTopWest = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y, z + 1)],
allowsGrassUnderTopNorth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x - 1, y, z)],
allowsGrassUnderTopSouth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x + 1, y, z)];
y--;
float
brightnessBottomNorthEast,
brightnessBottomSouthEast,
brightnessBottomNorthWest,
brightnessBottomSouthWest,
brightnessTopNorthEast,
brightnessTopSouthEast,
brightnessTopNorthWest,
brightnessTopSouthWest;
if (!allowsGrassUnderBottomEast && !allowsGrassUnderBottomNorth)
brightnessBottomNorthEast = brightnessBottomNorth;
else
brightnessBottomNorthEast = block.getBrightness(blockView, x - 1, y - 1, z - 1);
if (!allowsGrassUnderBottomEast && !allowsGrassUnderBottomSouth)
brightnessBottomSouthEast = brightnessBottomSouth;
else
brightnessBottomSouthEast = block.getBrightness(blockView, x + 1, y - 1, z - 1);
if (!allowsGrassUnderBottomWest && !allowsGrassUnderBottomNorth)
brightnessBottomNorthWest = brightnessBottomNorth;
else
brightnessBottomNorthWest = block.getBrightness(blockView, x - 1, y - 1, z + 1);
if (!allowsGrassUnderBottomWest && !allowsGrassUnderBottomSouth)
brightnessBottomSouthWest = brightnessBottomSouth;
else
brightnessBottomSouthWest = block.getBrightness(blockView, x + 1, y - 1, z + 1);
if (!allowsGrassUnderTopEast && !allowsGrassUnderTopNorth)
brightnessTopNorthEast = brightnessTopNorth;
else
brightnessTopNorthEast = block.getBrightness(blockView, x - 1, y + 1, z - 1);
if (!allowsGrassUnderTopEast && !allowsGrassUnderTopSouth)
brightnessTopSouthEast = brightnessTopSouth;
else
brightnessTopSouthEast = block.getBrightness(blockView, x + 1, y + 1, z - 1);
if (!allowsGrassUnderTopWest && !allowsGrassUnderTopNorth)
brightnessTopNorthWest = brightnessTopNorth;
else
brightnessTopNorthWest = block.getBrightness(blockView, x - 1, y + 1, z + 1);
if (!allowsGrassUnderTopWest && !allowsGrassUnderTopSouth)
brightnessTopSouthWest = brightnessTopSouth;
else
brightnessTopSouthWest = block.getBrightness(blockView, x + 1, y + 1, z + 1);
float brightnessX =
(float)(brightnessBottomNorthEast + (brightnessBottomSouthEast - brightnessBottomNorthEast) * faceQuadPoint.x +
brightnessBottomNorthWest + (brightnessBottomSouthWest - brightnessBottomNorthWest) * faceQuadPoint.x +
brightnessTopNorthEast + (brightnessTopSouthEast - brightnessTopNorthEast) * faceQuadPoint.x +
brightnessTopNorthWest + (brightnessTopSouthWest - brightnessTopNorthWest) * faceQuadPoint.x
) / 4;
float brightnessY =
(float)(brightnessBottomNorthEast + (brightnessTopNorthEast - brightnessBottomNorthEast) * faceQuadPoint.y +
brightnessBottomNorthWest + (brightnessTopNorthWest - brightnessBottomNorthWest) * faceQuadPoint.y +
brightnessBottomSouthEast + (brightnessTopSouthEast - brightnessBottomSouthEast) * faceQuadPoint.y +
brightnessBottomSouthWest + (brightnessTopSouthWest - brightnessBottomSouthWest) * faceQuadPoint.y
) / 4;
float brightnessZ =
(float)(brightnessBottomNorthEast + (brightnessBottomNorthWest - brightnessBottomNorthEast) * faceQuadPoint.z +
brightnessBottomSouthEast + (brightnessBottomSouthWest - brightnessBottomSouthEast) * faceQuadPoint.z +
brightnessTopNorthEast + (brightnessTopNorthWest - brightnessTopNorthEast) * faceQuadPoint.z +
brightnessTopSouthEast + (brightnessTopSouthWest - brightnessTopSouthEast) * faceQuadPoint.z
) / 4;
float brightnessMultiplier = (brightnessX + brightnessY + brightnessZ + brightnessSelf) / 4;
return new float[] {
colourMultiplierRed * brightnessMultiplier,
colourMultiplierGreen * brightnessMultiplier,
colourMultiplierBlue * brightnessMultiplier
};
}
// TODO: use this for outer model faces, interpolate the light and optimize by using pre-calculated light levels
public static int getSmoothForVertex(BlockBase block, BlockView blockView, int x, int y, int z, Vertex faceQuadPoint, int quadPointOrder, float colourMultiplierRed, float colourMultiplierGreen, float colourMultiplierBlue) {
Direction face = faceQuadPoint.lightingFace;
switch (face) {
case DOWN:
colourMultiplierRed *= 0.5;
colourMultiplierGreen *= 0.5;
colourMultiplierBlue *= 0.5;
--y;
break;
case UP:
++y;
break;
case EAST:
colourMultiplierRed *= 0.8;
colourMultiplierGreen *= 0.8;
colourMultiplierBlue *= 0.8;
--z;
break;
case WEST:
colourMultiplierRed *= 0.8;
colourMultiplierGreen *= 0.8;
colourMultiplierBlue *= 0.8;
++z;
break;
case NORTH:
colourMultiplierRed *= 0.6;
colourMultiplierGreen *= 0.6;
colourMultiplierBlue *= 0.6;
--x;
break;
case SOUTH:
colourMultiplierRed *= 0.6;
colourMultiplierGreen *= 0.6;
colourMultiplierBlue *= 0.6;
++x;
break;
default:
throw new IllegalStateException("Unexpected value: " + face);
}
boolean
allowsGrassUnderSouth,
allowsGrassUnderNorth,
allowsGrassUnderWest,
allowsGrassUnderEast;
float
brightnessMultiplier,
brightnessMiddle = block.getBrightness(blockView, x, y, z),
brightnessNorth,
brightnessSouth,
brightnessEast,
brightnessWest,
brightnessNorthEast,
brightnessSouthEast,
brightnessNorthWest,
brightnessSouthWest;
switch (face) {
case DOWN:
case UP:
allowsGrassUnderSouth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x + 1, y, z)];
allowsGrassUnderNorth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x - 1, y, z)];
allowsGrassUnderWest = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y, z + 1)];
allowsGrassUnderEast = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y, z - 1)];
brightnessNorth = block.getBrightness(blockView, x - 1, y, z);
brightnessSouth = block.getBrightness(blockView, x + 1, y, z);
brightnessEast = block.getBrightness(blockView, x, y, z - 1);
brightnessWest = block.getBrightness(blockView, x, y, z + 1);
brightnessNorthEast = !allowsGrassUnderEast && !allowsGrassUnderNorth ? brightnessNorth : block.getBrightness(blockView, x - 1, y, z - 1);
brightnessSouthEast = !allowsGrassUnderEast && !allowsGrassUnderSouth ? brightnessSouth : block.getBrightness(blockView, x + 1, y, z - 1);
brightnessNorthWest = !allowsGrassUnderWest && !allowsGrassUnderNorth ? brightnessNorth : block.getBrightness(blockView, x - 1, y, z + 1);
brightnessSouthWest = !allowsGrassUnderWest && !allowsGrassUnderSouth ? brightnessSouth : block.getBrightness(blockView, x + 1, y, z + 1);
break;
case EAST:
case WEST:
allowsGrassUnderSouth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x + 1, y, z)];
allowsGrassUnderNorth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x - 1, y, z)];
allowsGrassUnderWest = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y - 1, z)];
allowsGrassUnderEast = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y + 1, z)];
brightnessNorth = block.getBrightness(blockView, x - 1, y, z);
brightnessSouth = block.getBrightness(blockView, x + 1, y, z);
brightnessEast = block.getBrightness(blockView, x, y + 1, z);
brightnessWest = block.getBrightness(blockView, x, y - 1, z);
brightnessNorthEast = !allowsGrassUnderEast && !allowsGrassUnderNorth ? brightnessNorth : block.getBrightness(blockView, x - 1, y + 1, z);
brightnessSouthEast = !allowsGrassUnderEast && !allowsGrassUnderSouth ? brightnessSouth : block.getBrightness(blockView, x + 1, y + 1, z);
brightnessNorthWest = !allowsGrassUnderWest && !allowsGrassUnderNorth ? brightnessNorth : block.getBrightness(blockView, x - 1, y - 1, z);
brightnessSouthWest = !allowsGrassUnderWest && !allowsGrassUnderSouth ? brightnessSouth : block.getBrightness(blockView, x + 1, y - 1, z);
break;
case NORTH:
case SOUTH:
allowsGrassUnderSouth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y + 1, z)];
allowsGrassUnderNorth = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y - 1, z)];
allowsGrassUnderWest = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y, z - 1)];
allowsGrassUnderEast = BlockBase.ALLOWS_GRASS_UNDER[blockView.getTileId(x, y, z + 1)];
brightnessNorth = block.getBrightness(blockView, x, y - 1, z);
brightnessSouth = block.getBrightness(blockView, x, y + 1, z);
brightnessEast = block.getBrightness(blockView, x, y, z + 1);
brightnessWest = block.getBrightness(blockView, x, y, z - 1);
brightnessNorthEast = !allowsGrassUnderEast && !allowsGrassUnderNorth ? brightnessNorth : block.getBrightness(blockView, x, y - 1, z + 1);
brightnessSouthEast = !allowsGrassUnderEast && !allowsGrassUnderSouth ? brightnessSouth : block.getBrightness(blockView, x, y + 1, z + 1);
brightnessNorthWest = !allowsGrassUnderWest && !allowsGrassUnderNorth ? brightnessNorth : block.getBrightness(blockView, x, y - 1, z - 1);
brightnessSouthWest = !allowsGrassUnderWest && !allowsGrassUnderSouth ? brightnessSouth : block.getBrightness(blockView, x, y + 1, z - 1);
break;
default:
throw new IllegalStateException("Unexpected block face: " + face);
}
switch (face) {
case DOWN:
switch (quadPointOrder) {
case 0:
brightnessMultiplier = (brightnessNorthWest + brightnessNorth + brightnessWest + brightnessMiddle) / 4;
break;
case 1:
brightnessMultiplier = (brightnessNorth + brightnessNorthEast + brightnessMiddle + brightnessEast) / 4;
break;
case 2:
brightnessMultiplier = (brightnessMiddle + brightnessEast + brightnessSouth + brightnessSouthEast) / 4;
break;
case 3:
brightnessMultiplier = (brightnessWest + brightnessMiddle + brightnessSouthWest + brightnessSouth) / 4;
break;
default:
throw new IllegalStateException("Unexpected quad point order: " + quadPointOrder);
}
break;
case UP:
switch (quadPointOrder) {
case 0:
brightnessMultiplier = (brightnessWest + brightnessMiddle + brightnessSouthWest + brightnessSouth) / 4;
break;
case 1:
brightnessMultiplier = (brightnessMiddle + brightnessEast + brightnessSouth + brightnessSouthEast) / 4;
break;
case 2:
brightnessMultiplier = (brightnessNorth + brightnessNorthEast + brightnessMiddle + brightnessEast) / 4;
break;
case 3:
brightnessMultiplier = (brightnessNorthWest + brightnessNorth + brightnessWest + brightnessMiddle) / 4;
break;
default:
throw new IllegalStateException("Unexpected quad point order: " + quadPointOrder);
}
break;
case EAST:
switch (quadPointOrder) {
case 0:
brightnessMultiplier = (brightnessNorth + brightnessNorthEast + brightnessMiddle + brightnessEast) / 4;
break;
case 1:
brightnessMultiplier = (brightnessMiddle + brightnessEast + brightnessSouth + brightnessSouthEast) / 4;
break;
case 2:
brightnessMultiplier = (brightnessWest + brightnessMiddle + brightnessSouthWest + brightnessSouth) / 4;
break;
case 3:
brightnessMultiplier = (brightnessNorthWest + brightnessNorth + brightnessWest + brightnessMiddle) / 4;
break;
default:
throw new IllegalStateException("Unexpected quad point order: " + quadPointOrder);
}
break;
case WEST:
case SOUTH:
switch (quadPointOrder) {
case 0:
brightnessMultiplier = (brightnessNorth + brightnessNorthEast + brightnessMiddle + brightnessEast) / 4;
break;
case 1:
brightnessMultiplier = (brightnessNorthWest + brightnessNorth + brightnessWest + brightnessMiddle) / 4;
break;
case 2:
brightnessMultiplier = (brightnessWest + brightnessMiddle + brightnessSouthWest + brightnessSouth) / 4;
break;
case 3:
brightnessMultiplier = (brightnessMiddle + brightnessEast + brightnessSouth + brightnessSouthEast) / 4;
break;
default:
throw new IllegalStateException("Unexpected quad point order: " + quadPointOrder);
}
break;
case NORTH:
switch (quadPointOrder) {
case 0:
brightnessMultiplier = (brightnessMiddle + brightnessEast + brightnessSouth + brightnessSouthEast) / 4;
break;
case 1:
brightnessMultiplier = (brightnessWest + brightnessMiddle + brightnessSouthWest + brightnessSouth) / 4;
break;
case 2:
brightnessMultiplier = (brightnessNorthWest + brightnessNorth + brightnessWest + brightnessMiddle) / 4;
break;
case 3:
brightnessMultiplier = (brightnessNorth + brightnessNorthEast + brightnessMiddle + brightnessEast) / 4;
break;
default:
throw new IllegalStateException("Unexpected quad point order: " + quadPointOrder);
}
break;
default:
throw new IllegalStateException("Unexpected block face: " + face);
}
return (((int) (colourMultiplierRed * brightnessMultiplier * 255) & 255) << 16) +
(((int) (colourMultiplierGreen * brightnessMultiplier * 255) & 255) << 8) +
((int) (colourMultiplierBlue * brightnessMultiplier * 255) & 255);
}
}
<|start_filename|>station-dimensions-v0/src/main/java/net/modificationstation/stationapi/impl/server/level/dimension/DimensionHelperServerImpl.java<|end_filename|>
package net.modificationstation.stationapi.impl.server.level.dimension;
import net.minecraft.class_467;
import net.minecraft.entity.player.PlayerBase;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.impl.level.dimension.DimensionHelperImpl;
public class DimensionHelperServerImpl extends DimensionHelperImpl {
@Override
public void switchDimension(PlayerBase player, Identifier destination, double scale, class_467 travelAgent, String enteringMessage, String leavingMessage) {
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/texture/StationPortalTextureBinder.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.texture;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.block.BlockBase;
import net.minecraft.client.Minecraft;
import net.minecraft.client.resource.TexturePack;
import net.minecraft.util.maths.MathHelper;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlases;
import net.modificationstation.stationapi.api.client.texture.binder.StationTextureBinder;
import java.util.*;
public class StationPortalTextureBinder extends StationTextureBinder {
private int updatesRan = 0;
private final byte[][] texture = new byte[32][];
public StationPortalTextureBinder() {
super(Atlases.getTerrain().getTexture(BlockBase.PORTAL.texture));
//noinspection deprecation
reloadFromTexturePack(((Minecraft) FabricLoader.getInstance().getGameInstance()).texturePackManager.texturePack);
}
@Override
public void reloadFromTexturePack(TexturePack newTexturePack) {
int
textureWidth = getStaticReference().getWidth(),
textureHeight = getStaticReference().getHeight();
Random var1 = new Random(100L);
grid = new byte[textureWidth * textureHeight * 4];
for(int var2 = 0; var2 < 32; ++var2) {
texture[var2] = new byte[textureWidth * textureHeight * 4];
for(int var3 = 0; var3 < textureWidth; ++var3) {
for(int var4 = 0; var4 < textureHeight; ++var4) {
float var5 = 0.0F;
for(int var6 = 0; var6 < 2; ++var6) {
float var7 = (float)(var6 * textureWidth / 2);
float var8 = (float)(var6 * textureHeight / 2);
float var9 = ((float)var3 - var7) / textureWidth * 2.0F;
float var10 = ((float)var4 - var8) / textureHeight * 2.0F;
if (var9 < -1.0F) {
var9 += 2.0F;
}
if (var9 >= 1.0F) {
var9 -= 2.0F;
}
if (var10 < -1.0F) {
var10 += 2.0F;
}
if (var10 >= 1.0F) {
var10 -= 2.0F;
}
float var11 = var9 * var9 + var10 * var10;
float var12 = (float)Math.atan2(var10, var9) + ((float)var2 / 32.0F * (float)Math.PI * 2.0F - var11 * 10.0F + (float)(var6 * 2)) * (float)(var6 * 2 - 1);
var12 = (MathHelper.sin(var12) + 1.0F) / 2.0F;
var12 = var12 / (var11 + 1.0F);
var5 += var12 * 0.5F;
}
var5 = var5 + var1.nextFloat() * 0.1F;
int var14 = (int)(var5 * 100.0F + 155.0F);
int var15 = (int)(var5 * var5 * 200.0F + 55.0F);
int var16 = (int)(var5 * var5 * var5 * var5 * 255.0F);
int var17 = (int)(var5 * 100.0F + 155.0F);
int var18 = var4 * textureWidth + var3;
this.texture[var2][var18 * 4] = (byte)var15;
this.texture[var2][var18 * 4 + 1] = (byte)var16;
this.texture[var2][var18 * 4 + 2] = (byte)var14;
this.texture[var2][var18 * 4 + 3] = (byte)var17;
}
}
}
}
@Override
public void update() {
++this.updatesRan;
byte[] var1 = this.texture[this.updatesRan & 31];
for(int var2 = 0; var2 < getStaticReference().getWidth() * getStaticReference().getHeight(); ++var2) {
int var3 = var1[var2 * 4] & 255;
int var4 = var1[var2 * 4 + 1] & 255;
int var5 = var1[var2 * 4 + 2] & 255;
int var6 = var1[var2 * 4 + 3] & 255;
if (this.render3d) {
int var7 = (var3 * 30 + var4 * 59 + var5 * 11) / 100;
int var8 = (var3 * 30 + var4 * 70) / 100;
int var9 = (var3 * 30 + var5 * 70) / 100;
var3 = var7;
var4 = var8;
var5 = var9;
}
this.grid[var2 * 4] = (byte)var3;
this.grid[var2 * 4 + 1] = (byte)var4;
this.grid[var2 * 4 + 2] = (byte)var5;
this.grid[var2 * 4 + 3] = (byte)var6;
}
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/texture/atlas/ExpandableAtlas.java<|end_filename|>
package net.modificationstation.stationapi.api.client.texture.atlas;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.client.Minecraft;
import net.minecraft.client.resource.TexturePack;
import net.minecraft.client.texture.TextureManager;
import net.modificationstation.stationapi.api.client.resource.Resource;
import net.modificationstation.stationapi.api.client.texture.TextureAnimationData;
import net.modificationstation.stationapi.api.client.texture.TextureHelper;
import net.modificationstation.stationapi.api.client.texture.binder.AnimationTextureBinder;
import net.modificationstation.stationapi.api.client.texture.binder.StationTextureBinder;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.api.resource.ResourceManager;
import net.modificationstation.stationapi.mixin.render.client.TextureManagerAccessor;
import javax.imageio.*;
import java.awt.*;
import java.awt.image.*;
import java.io.*;
import java.util.*;
import java.util.function.*;
import static net.modificationstation.stationapi.api.StationAPI.MODID;
public class ExpandableAtlas extends Atlas {
private static final Map<String, ExpandableAtlas> PATH_TO_ATLAS = new HashMap<>();
protected final Map<String, Sprite> textureCache = new HashMap<>();
public ExpandableAtlas(final Identifier identifier) {
super("/assets/stationapi/atlases/" + identifier, 0, false);
}
public ExpandableAtlas(final Identifier identifier, final Atlas parent) {
super("/assets/stationapi/atlases/" + identifier, 0, false, parent);
}
@Override
protected void init() {
PATH_TO_ATLAS.put(spritesheet, this);
}
@Override
public BufferedImage getImage() {
return imageCache;
}
@Override
public InputStream getStream() {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try {
//noinspection deprecation
ImageIO.write(imageCache == null ? ((TextureManagerAccessor) ((Minecraft) FabricLoader.getInstance().getGameInstance()).textureManager).getMissingTexImage() : imageCache, "png", outputStream);
} catch (IOException e) {
throw new RuntimeException(e);
}
return new ByteArrayInputStream(outputStream.toByteArray());
}
public Sprite addTexture(Identifier texture) {
return addTexture(ResourceManager.parsePath(texture, "/" + MODID + "/textures", "png"));
}
public Sprite addTexture(String texturePath) {
if (textureCache.containsKey(texturePath))
return textureCache.get(texturePath);
else {
Resource textureResource = Resource.of(TextureHelper.getTextureStream(texturePath));
BufferedImage image = TextureHelper.readTextureStream(textureResource.getResource());
int width = image.getWidth();
int height = image.getHeight();
int previousAtlasWidth = imageCache == null ? 0 : imageCache.getWidth();
Optional<TextureAnimationData> animationDataOptional = TextureAnimationData.parse(textureResource);
boolean animationPresent = animationDataOptional.isPresent();
BufferedImage frames = null;
if (animationPresent) {
//noinspection SuspiciousNameCombination
height = width;
frames = image;
image = image.getSubimage(0, 0, width, height);
}
drawTextureOnSpritesheet(image);
refreshTextureID();
textures.forEach(Sprite::updateUVs);
FileSprite texture = new FileSprite(
texturePath, size++,
previousAtlasWidth, 0,
width, height
);
textureCache.put(texturePath, texture);
textures.add(texture);
if (animationPresent) {
BufferedImage finalFrames = frames;
addTextureBinder(texture, texture1 -> new AnimationTextureBinder(finalFrames, texture1, animationDataOptional.get()));
}
return texture;
}
}
public <T extends StationTextureBinder> T addTextureBinder(Identifier staticReference, Function<Sprite, T> initializer) {
return addTextureBinder(addTexture(staticReference), initializer);
}
private void drawTextureOnSpritesheet(BufferedImage image) {
if (imageCache == null) {
ColorModel cm = image.getColorModel();
boolean isAlphaPremultiplied = cm.isAlphaPremultiplied();
WritableRaster raster = image.copyData(null);
imageCache = new BufferedImage(cm, raster, isAlphaPremultiplied, null);
} else {
int previousAtlasWidth = imageCache.getWidth();
resizeSpritesheet(imageCache.getWidth() + image.getWidth(), Math.max(image.getHeight(), imageCache.getHeight()));
Graphics2D graphics = imageCache.createGraphics();
graphics.drawImage(image, previousAtlasWidth, 0, null);
graphics.dispose();
}
}
private void resizeSpritesheet(int targetWidth, int targetHeight) {
BufferedImage previousSpriteSheet = imageCache;
imageCache = new BufferedImage(targetWidth, targetHeight, BufferedImage.TYPE_INT_ARGB);
Graphics2D graphics = imageCache.createGraphics();
graphics.drawImage(previousSpriteSheet, 0, 0, null);
graphics.dispose();
}
protected void refreshTextureID() {
if (imageCache != null) {
//noinspection deprecation
Minecraft minecraft = (Minecraft) FabricLoader.getInstance().getGameInstance();
TextureManagerAccessor textureManager = (TextureManagerAccessor) minecraft.textureManager;
textureManager.invokeBindImageToId(imageCache, minecraft.textureManager.getTextureId(spritesheet));
}
}
@Override
public void reloadFromTexturePack(TexturePack newTexturePack) {
super.reloadFromTexturePack(newTexturePack);
textures.forEach(texture -> {
texture.x = imageCache == null ? 0 : imageCache.getWidth();
texture.y = 0;
Resource textureResource = Resource.of(newTexturePack.getResourceAsStream(((FileSprite) texture).path));
BufferedImage image = TextureHelper.readTextureStream(textureResource.getResource());
int
width = image.getWidth(),
height = image.getHeight();
Optional<TextureAnimationData> animationDataOptional = TextureAnimationData.parse(textureResource);
BufferedImage frames = null;
boolean animationPresent = animationDataOptional.isPresent();
if (animationPresent) {
//noinspection SuspiciousNameCombination
height = width;
frames = image;
image = image.getSubimage(0, 0, width, height);
}
texture.width = width;
texture.height = height;
drawTextureOnSpritesheet(image);
if (animationPresent) {
TextureAnimationData animationData = animationDataOptional.get();
//noinspection deprecation
TextureManager textureManager = ((Minecraft) FabricLoader.getInstance().getGameInstance()).textureManager;
textureManager.addTextureBinder(new AnimationTextureBinder(frames, texture, animationData));
}
});
textures.forEach(Sprite::updateUVs);
refreshTextureID();
}
public static ExpandableAtlas getByPath(String spritesheet) {
return PATH_TO_ATLAS.get(spritesheet);
}
public class FileSprite extends Sprite {
public final String path;
protected FileSprite(String path, int index, int x, int y, int width, int height) {
super(index, x, y, width, height);
this.path = path;
}
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/texture/atlas/Atlases.java<|end_filename|>
package net.modificationstation.stationapi.api.client.texture.atlas;
import net.modificationstation.stationapi.impl.client.texture.StationRenderAPI;
public final class Atlases {
public static SquareAtlas getTerrain() {
return StationRenderAPI.TERRAIN;
}
public static SquareAtlas getGuiItems() {
return StationRenderAPI.GUI_ITEMS;
}
public static ExpandableAtlas getStationTerrain() {
return StationRenderAPI.STATION_TERRAIN;
}
public static ExpandableAtlas getStationGuiItems() {
return StationRenderAPI.STATION_GUI_ITEMS;
}
public static JsonModelAtlas getStationJsonModels() {
return StationRenderAPI.STATION_JSON_MODELS;
}
}
<|start_filename|>station-tools-api-v0/src/main/java/net/modificationstation/stationapi/impl/item/ToolEffectivenessImpl.java<|end_filename|>
package net.modificationstation.stationapi.impl.item;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.mine_diver.unsafeevents.listener.ListenerPriority;
import net.minecraft.item.ItemBase;
import net.modificationstation.stationapi.api.block.BlockMiningLevel;
import net.modificationstation.stationapi.api.event.item.IsItemEffectiveOnBlockEvent;
import net.modificationstation.stationapi.api.event.item.ItemStrengthOnBlockEvent;
import net.modificationstation.stationapi.api.item.tool.OverrideIsEffectiveOn;
import net.modificationstation.stationapi.api.item.tool.ToolLevel;
import net.modificationstation.stationapi.api.mod.entrypoint.Entrypoint;
import net.modificationstation.stationapi.api.mod.entrypoint.EventBusPolicy;
@Entrypoint(eventBus = @EventBusPolicy(registerInstance = false))
public class ToolEffectivenessImpl {
@EventListener(priority = ListenerPriority.HIGH)
private static void isEffective(IsItemEffectiveOnBlockEvent event) {
ItemBase item = event.itemInstance.getType();
if (item instanceof ToolLevel) {
event.effective =
((BlockMiningLevel) event.block).getToolTypes(event.meta, event.itemInstance) != null &&
((BlockMiningLevel) event.block).getToolTypes(event.meta, event.itemInstance).stream().anyMatch(entry -> entry != null && entry.isInstance(event.itemInstance.getType())) &&
((ToolLevel) item).getToolLevel() >= ((BlockMiningLevel) event.block).getBlockLevel(event.meta, event.itemInstance);
if (item instanceof OverrideIsEffectiveOn)
event.effective = ((OverrideIsEffectiveOn) item).overrideIsEffectiveOn((ToolLevel) item, event.block, event.meta, event.effective);
}
}
@EventListener(priority = ListenerPriority.HIGH)
private static void getStrength(ItemStrengthOnBlockEvent event) {
if (
event.itemInstance.getType() instanceof ToolLevel &&
((BlockMiningLevel) event.block).getBlockLevel(event.meta, event.itemInstance) <= ((ToolLevel) event.itemInstance.getType()).getToolLevel() &&
((BlockMiningLevel) event.block).getBlockLevel(event.meta, event.itemInstance) != -1 &&
((BlockMiningLevel) event.block).getToolTypes(event.meta, event.itemInstance) != null &&
((BlockMiningLevel) event.block).getToolTypes(event.meta, event.itemInstance).stream().anyMatch((toolLevel) -> toolLevel != null && toolLevel.isInstance(event.itemInstance.getType()))
)
event.strength = ((BlockMiningLevel) event.block).getEffectiveMiningSpeed(event.itemInstance, event.meta, ((ToolLevel) event.itemInstance.getType()).getMaterial().getMiningSpeed());
}
}
<|start_filename|>station-items-v0/src/main/java/net/modificationstation/stationapi/impl/item/ItemRegistryInit.java<|end_filename|>
package net.modificationstation.stationapi.impl.item;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.mine_diver.unsafeevents.listener.ListenerPriority;
import net.modificationstation.stationapi.api.event.registry.ItemRegistryEvent;
import net.modificationstation.stationapi.api.mod.entrypoint.Entrypoint;
import net.modificationstation.stationapi.api.mod.entrypoint.EventBusPolicy;
import net.modificationstation.stationapi.api.registry.ItemRegistry;
import static net.minecraft.item.ItemBase.apple;
import static net.minecraft.item.ItemBase.arrow;
import static net.minecraft.item.ItemBase.bed;
import static net.minecraft.item.ItemBase.boat;
import static net.minecraft.item.ItemBase.bone;
import static net.minecraft.item.ItemBase.book;
import static net.minecraft.item.ItemBase.bow;
import static net.minecraft.item.ItemBase.bowl;
import static net.minecraft.item.ItemBase.bread;
import static net.minecraft.item.ItemBase.brick;
import static net.minecraft.item.ItemBase.bucket;
import static net.minecraft.item.ItemBase.cake;
import static net.minecraft.item.ItemBase.chainBoots;
import static net.minecraft.item.ItemBase.chainChestplate;
import static net.minecraft.item.ItemBase.chainHelmet;
import static net.minecraft.item.ItemBase.chainLeggings;
import static net.minecraft.item.ItemBase.clay;
import static net.minecraft.item.ItemBase.clock;
import static net.minecraft.item.ItemBase.coal;
import static net.minecraft.item.ItemBase.compass;
import static net.minecraft.item.ItemBase.cookedFish;
import static net.minecraft.item.ItemBase.cookedPorkchop;
import static net.minecraft.item.ItemBase.cookie;
import static net.minecraft.item.ItemBase.diamond;
import static net.minecraft.item.ItemBase.diamondAxe;
import static net.minecraft.item.ItemBase.diamondBoots;
import static net.minecraft.item.ItemBase.diamondChestplate;
import static net.minecraft.item.ItemBase.diamondHelmet;
import static net.minecraft.item.ItemBase.diamondHoe;
import static net.minecraft.item.ItemBase.diamondLeggings;
import static net.minecraft.item.ItemBase.diamondPickaxe;
import static net.minecraft.item.ItemBase.diamondShovel;
import static net.minecraft.item.ItemBase.diamondSword;
import static net.minecraft.item.ItemBase.dyePowder;
import static net.minecraft.item.ItemBase.egg;
import static net.minecraft.item.ItemBase.feather;
import static net.minecraft.item.ItemBase.fishingRod;
import static net.minecraft.item.ItemBase.flint;
import static net.minecraft.item.ItemBase.flintAndSteel;
import static net.minecraft.item.ItemBase.glowstoneDust;
import static net.minecraft.item.ItemBase.goldAxe;
import static net.minecraft.item.ItemBase.goldBoots;
import static net.minecraft.item.ItemBase.goldChestplate;
import static net.minecraft.item.ItemBase.goldHelmet;
import static net.minecraft.item.ItemBase.goldHoe;
import static net.minecraft.item.ItemBase.goldIngot;
import static net.minecraft.item.ItemBase.goldLeggings;
import static net.minecraft.item.ItemBase.goldPickaxe;
import static net.minecraft.item.ItemBase.goldShovel;
import static net.minecraft.item.ItemBase.goldSword;
import static net.minecraft.item.ItemBase.goldenApple;
import static net.minecraft.item.ItemBase.gunpowder;
import static net.minecraft.item.ItemBase.ironAxe;
import static net.minecraft.item.ItemBase.ironBoots;
import static net.minecraft.item.ItemBase.ironChestplate;
import static net.minecraft.item.ItemBase.ironDoor;
import static net.minecraft.item.ItemBase.ironHelmet;
import static net.minecraft.item.ItemBase.ironHoe;
import static net.minecraft.item.ItemBase.ironIngot;
import static net.minecraft.item.ItemBase.ironLeggings;
import static net.minecraft.item.ItemBase.ironPickaxe;
import static net.minecraft.item.ItemBase.ironShovel;
import static net.minecraft.item.ItemBase.ironSword;
import static net.minecraft.item.ItemBase.lavaBucket;
import static net.minecraft.item.ItemBase.leather;
import static net.minecraft.item.ItemBase.leatherBoots;
import static net.minecraft.item.ItemBase.leatherChestplate;
import static net.minecraft.item.ItemBase.leatherHelmet;
import static net.minecraft.item.ItemBase.leatherLeggings;
import static net.minecraft.item.ItemBase.map;
import static net.minecraft.item.ItemBase.milk;
import static net.minecraft.item.ItemBase.minecart;
import static net.minecraft.item.ItemBase.minecartChest;
import static net.minecraft.item.ItemBase.minecartFurnace;
import static net.minecraft.item.ItemBase.mushroomStew;
import static net.minecraft.item.ItemBase.painting;
import static net.minecraft.item.ItemBase.paper;
import static net.minecraft.item.ItemBase.rawFish;
import static net.minecraft.item.ItemBase.rawPorkchop;
import static net.minecraft.item.ItemBase.record13;
import static net.minecraft.item.ItemBase.recordCat;
import static net.minecraft.item.ItemBase.redstoneDust;
import static net.minecraft.item.ItemBase.redstoneRepeater;
import static net.minecraft.item.ItemBase.saddle;
import static net.minecraft.item.ItemBase.seeds;
import static net.minecraft.item.ItemBase.shears;
import static net.minecraft.item.ItemBase.sign;
import static net.minecraft.item.ItemBase.slimeball;
import static net.minecraft.item.ItemBase.snowball;
import static net.minecraft.item.ItemBase.stick;
import static net.minecraft.item.ItemBase.stoneAxe;
import static net.minecraft.item.ItemBase.stoneHoe;
import static net.minecraft.item.ItemBase.stonePickaxe;
import static net.minecraft.item.ItemBase.stoneShovel;
import static net.minecraft.item.ItemBase.stoneSword;
import static net.minecraft.item.ItemBase.string;
import static net.minecraft.item.ItemBase.sugar;
import static net.minecraft.item.ItemBase.sugarCanes;
import static net.minecraft.item.ItemBase.waterBucket;
import static net.minecraft.item.ItemBase.wheat;
import static net.minecraft.item.ItemBase.woodAxe;
import static net.minecraft.item.ItemBase.woodDoor;
import static net.minecraft.item.ItemBase.woodHoe;
import static net.minecraft.item.ItemBase.woodPickaxe;
import static net.minecraft.item.ItemBase.woodShovel;
import static net.minecraft.item.ItemBase.woodSword;
import static net.modificationstation.stationapi.api.StationAPI.LOGGER;
import static net.modificationstation.stationapi.api.registry.Identifier.of;
/**
* @author mine_diver
*/
@Entrypoint(eventBus = @EventBusPolicy(registerInstance = false))
public class ItemRegistryInit {
@EventListener(priority = ListenerPriority.HIGH)
private static void registerItems(ItemRegistryEvent event) {
ItemRegistry r = event.registry;
r.register(of("iron_shovel"), ironShovel);
r.register(of("iron_pickaxe"), ironPickaxe);
r.register(of("iron_axe"), ironAxe);
r.register(of("flint_and_steel"), flintAndSteel);
r.register(of("apple"), apple);
r.register(of("bow"), bow);
r.register(of("arrow"), arrow);
r.register(of("coal"), coal);
r.register(of("diamond"), diamond);
r.register(of("iron_ingot"), ironIngot);
r.register(of("gold_ingot"), goldIngot);
r.register(of("iron_sword"), ironSword);
r.register(of("wooden_sword"), woodSword);
r.register(of("wooden_shovel"), woodShovel);
r.register(of("wooden_pickaxe"), woodPickaxe);
r.register(of("wooden_axe"), woodAxe);
r.register(of("stone_sword"), stoneSword);
r.register(of("stone_shovel"), stoneShovel);
r.register(of("stone_pickaxe"), stonePickaxe);
r.register(of("stone_axe"), stoneAxe);
r.register(of("diamond_sword"), diamondSword);
r.register(of("diamond_shovel"), diamondShovel);
r.register(of("diamond_pickaxe"), diamondPickaxe);
r.register(of("diamond_axe"), diamondAxe);
r.register(of("stick"), stick);
r.register(of("bowl"), bowl);
r.register(of("mushroom_stew"), mushroomStew);
r.register(of("golden_sword"), goldSword);
r.register(of("golden_shovel"), goldShovel);
r.register(of("golden_pickaxe"), goldPickaxe);
r.register(of("golden_axe"), goldAxe);
r.register(of("string"), string);
r.register(of("feather"), feather);
r.register(of("gunpowder"), gunpowder);
r.register(of("wooden_hoe"), woodHoe);
r.register(of("stone_hoe"), stoneHoe);
r.register(of("iron_hoe"), ironHoe);
r.register(of("diamond_hoe"), diamondHoe);
r.register(of("golden_hoe"), goldHoe);
r.register(of("wheat_seeds"), seeds);
r.register(of("wheat"), wheat);
r.register(of("bread"), bread);
r.register(of("leather_helmet"), leatherHelmet);
r.register(of("leather_chestplate"), leatherChestplate);
r.register(of("leather_leggings"), leatherLeggings);
r.register(of("leather_boots"), leatherBoots);
r.register(of("chainmail_helmet"), chainHelmet);
r.register(of("chainmail_chestplate"), chainChestplate);
r.register(of("chainmail_leggings"), chainLeggings);
r.register(of("chainmail_boots"), chainBoots);
r.register(of("iron_helmet"), ironHelmet);
r.register(of("iron_chestplate"), ironChestplate);
r.register(of("iron_leggings"), ironLeggings);
r.register(of("iron_boots"), ironBoots);
r.register(of("diamond_helmet"), diamondHelmet);
r.register(of("diamond_chestplate"), diamondChestplate);
r.register(of("diamond_leggings"), diamondLeggings);
r.register(of("diamond_boots"), diamondBoots);
r.register(of("golden_helmet"), goldHelmet);
r.register(of("golden_chestplate"), goldChestplate);
r.register(of("golden_leggings"), goldLeggings);
r.register(of("golden_boots"), goldBoots);
r.register(of("flint"), flint);
r.register(of("porkchop"), rawPorkchop);
r.register(of("cooked_porkchop"), cookedPorkchop);
r.register(of("painting"), painting);
r.register(of("golden_apple"), goldenApple);
r.register(of("sign"), sign);
r.register(of("oak_door"), woodDoor);
r.register(of("bucket"), bucket);
r.register(of("water_bucket"), waterBucket);
r.register(of("lava_bucket"), lavaBucket);
r.register(of("minecart"), minecart);
r.register(of("saddle"), saddle);
r.register(of("iron_door"), ironDoor);
r.register(of("redstone"), redstoneDust);
r.register(of("snowball"), snowball);
r.register(of("oak_boat"), boat);
r.register(of("leather"), leather);
r.register(of("milk_bucket"), milk);
r.register(of("brick"), brick);
r.register(of("clay_ball"), clay);
r.register(of("sugar_cane"), sugarCanes);
r.register(of("paper"), paper);
r.register(of("book"), book);
r.register(of("slime_ball"), slimeball);
r.register(of("chest_minecart"), minecartChest);
r.register(of("furnace_minecart"), minecartFurnace);
r.register(of("egg"), egg);
r.register(of("compass"), compass);
r.register(of("fishing_rod"), fishingRod);
r.register(of("clock"), clock);
r.register(of("glowstone_dust"), glowstoneDust);
r.register(of("fish"), rawFish);
r.register(of("cooked_fish"), cookedFish);
r.register(of("dye"), dyePowder);
r.register(of("bone"), bone);
r.register(of("sugar"), sugar);
r.register(of("cake"), cake);
r.register(of("bed"), bed);
r.register(of("repeater"), redstoneRepeater);
r.register(of("cookie"), cookie);
r.register(of("map"), map);
r.register(of("shears"), shears);
r.register(of("music_disc_13"), record13);
r.register(of("music_disc_cat"), recordCat);
LOGGER.info("Added vanilla items to the registry.");
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/texture/binder/StationTextureBinder.java<|end_filename|>
package net.modificationstation.stationapi.api.client.texture.binder;
import lombok.Getter;
import net.minecraft.client.render.TextureBinder;
import net.minecraft.client.texture.TextureManager;
import net.modificationstation.stationapi.api.client.texture.TexturePackDependent;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
public abstract class StationTextureBinder extends TextureBinder implements StaticReferenceProvider, TexturePackDependent {
@Getter
private final Atlas.Sprite staticReference;
public StationTextureBinder(Atlas.Sprite staticReference) {
super(staticReference.index);
this.staticReference = staticReference;
}
@Override
public void bindTexture(TextureManager manager) {
staticReference.getAtlas().bindAtlas();
}
}
<|start_filename|>station-dimensions-v0/src/main/java/net/modificationstation/stationapi/mixin/dimension/client/MixinMinecraft.java<|end_filename|>
package net.modificationstation.stationapi.mixin.dimension.client;
import net.minecraft.client.Minecraft;
import net.modificationstation.stationapi.api.StationAPI;
import net.modificationstation.stationapi.api.event.registry.DimensionRegistryEvent;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
@Mixin(Minecraft.class)
public class MixinMinecraft {
@Inject(
method = "init()V",
at = @At("RETURN")
)
private void initDimensions(CallbackInfo ci) {
StationAPI.EVENT_BUS.post(new DimensionRegistryEvent());
}
}
<|start_filename|>station-items-v0/src/main/java/net/modificationstation/stationapi/api/client/event/gui/TooltipRenderEvent.java<|end_filename|>
package net.modificationstation.stationapi.api.client.event.gui;
import lombok.Getter;
import net.minecraft.client.gui.screen.container.ContainerBase;
import net.minecraft.client.render.TextRenderer;
import net.minecraft.entity.player.PlayerInventory;
import net.minecraft.item.ItemInstance;
import net.modificationstation.stationapi.api.event.item.ItemInstanceEvent;
public class TooltipRenderEvent extends ItemInstanceEvent {
@Getter
private final boolean cancellable = true;
public final ContainerBase container;
public final TextRenderer textManager;
public final PlayerInventory inventory;
public final int
containerX,
containerY,
mouseX,
mouseY;
public final float delta;
public final String originalTooltip;
public TooltipRenderEvent(ItemInstance itemInstance, ContainerBase container, TextRenderer textManager, PlayerInventory inventory, int containerX, int containerY, int mouseX, int mouseY, float delta, String originalTooltip) {
super(itemInstance);
this.container = container;
this.textManager = textManager;
this.inventory = inventory;
this.containerX = containerX;
this.containerY = containerY;
this.mouseX = mouseX;
this.mouseY = mouseY;
this.delta = delta;
this.originalTooltip = originalTooltip;
}
@Override
protected int getEventID() {
return ID;
}
public static final int ID = NEXT_ID.incrementAndGet();
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/mixin/render/client/BlockRendererAccessor.java<|end_filename|>
package net.modificationstation.stationapi.mixin.render.client;
import net.minecraft.client.render.block.BlockRenderer;
import net.minecraft.level.BlockView;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.gen.Accessor;
@Mixin(BlockRenderer.class)
public interface BlockRendererAccessor {
@Accessor
BlockView getBlockView();
@Accessor
float getField_56();
@Accessor
float getField_57();
@Accessor
float getField_58();
@Accessor
float getField_59();
@Accessor
float getField_60();
@Accessor
float getField_61();
@Accessor
float getField_62();
@Accessor
float getField_63();
@Accessor
float getField_64();
@Accessor
float getField_65();
@Accessor
float getField_66();
@Accessor
float getField_68();
@Accessor
int getTextureOverride();
@Accessor
boolean getMirrorTexture();
@Accessor
void setMirrorTexture(boolean mirrorTexture);
@Accessor
boolean getRenderAllSides();
@Accessor
int getEastFaceRotation();
@Accessor
void setEastFaceRotation(int eastFaceRotation);
@Accessor
int getWestFaceRotation();
@Accessor
void setWestFaceRotation(int westFaceRotation);
@Accessor
int getSouthFaceRotation();
@Accessor
void setSouthFaceRotation(int southFaceRotation);
@Accessor
int getNorthFaceRotation();
@Accessor
void setNorthFaceRotation(int northFaceRotation);
@Accessor
int getTopFaceRotation();
@Accessor
void setTopFaceRotation(int topFaceRotation);
@Accessor
int getBottomFaceRotation();
@Accessor
void setBottomFaceRotation(int bottomFaceRotation);
@Accessor
boolean getField_92();
}
<|start_filename|>station-lifecycle-events-v0/src/main/java/net/modificationstation/stationapi/api/client/event/network/ServerLoginSuccessEvent.java<|end_filename|>
package net.modificationstation.stationapi.api.client.event.network;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import net.mine_diver.unsafeevents.Event;
import net.minecraft.network.ClientPlayNetworkHandler;
import net.minecraft.packet.login.LoginRequest0x1Packet;
@RequiredArgsConstructor
public class ServerLoginSuccessEvent extends Event {
@Getter
private final boolean cancellable = true;
public final ClientPlayNetworkHandler networkHandler;
public final LoginRequest0x1Packet loginRequestPacket;
@Override
protected int getEventID() {
return ID;
}
public static final int ID = NEXT_ID.incrementAndGet();
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/model/BakedModelRenderer.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.model;
import com.google.common.collect.ImmutableList;
import net.minecraft.block.BlockBase;
import net.minecraft.client.render.Tessellator;
import net.minecraft.client.render.block.BlockRenderer;
import net.minecraft.level.BlockView;
import net.minecraft.sortme.GameRenderer;
import net.minecraft.util.Vec3i;
import net.modificationstation.stationapi.api.client.model.BakedModel;
import net.modificationstation.stationapi.api.client.model.Vertex;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlases;
import net.modificationstation.stationapi.api.util.math.Direction;
import net.modificationstation.stationapi.api.util.math.MathHelper;
import net.modificationstation.stationapi.impl.client.texture.StationBlockRendererProvider;
import net.modificationstation.stationapi.mixin.render.client.BlockRendererAccessor;
import net.modificationstation.stationapi.mixin.render.client.TessellatorAccessor;
import java.util.*;
public class BakedModelRenderer {
private static final Random random = new Random();
public static boolean renderWorld(BlockRenderer blockRenderer, BlockBase block, BakedModel model, BlockView blockView, int x, int y, int z) {
boolean rendered = false;
if (model != null) {
Vec3i pos = new Vec3i(x, y, z);
long seed = MathHelper.hashCode(x, y, z);
int textureOverridePosition = ((BlockRendererAccessor) blockRenderer).getTextureOverride();
Atlas.Sprite textureOverride = null;
Atlas atlas;
if (textureOverridePosition >= 0) {
atlas = Atlases.getTerrain();
textureOverride = atlas.getTexture(textureOverridePosition);
} else
atlas = model.getSprite().getAtlas();
boolean noTextureOverride = textureOverride == null;
TessellatorAccessor originalAccessor = (TessellatorAccessor) Tessellator.INSTANCE;
Tessellator tessellator = atlas.getTessellator();
if (!((TessellatorAccessor) tessellator).getDrawing()) {
((StationBlockRendererProvider) blockRenderer).getStationBlockRenderer().activeAtlases.add(atlas);
tessellator.start();
tessellator.setOffset(originalAccessor.getXOffset(), originalAccessor.getYOffset(), originalAccessor.getZOffset());
}
int colourMultiplier = block.getColourMultiplier(blockView, x, y, z);
float
colourMultiplierRed = (float)(colourMultiplier >> 16 & 255) / 255.0F,
colourMultiplierGreen = (float)(colourMultiplier >> 8 & 255) / 255.0F,
colourMultiplierBlue = (float)(colourMultiplier & 255) / 255.0F;
if (GameRenderer.field_2340) {
float
colourMultiplierGreenTmp = (colourMultiplierRed * 30.0F + colourMultiplierGreen * 70.0F) / 100.0F,
colourMultiplierBlueTmp = (colourMultiplierRed * 30.0F + colourMultiplierBlue * 70.0F) / 100.0F;
colourMultiplierRed = (colourMultiplierRed * 30.0F + colourMultiplierGreen * 59.0F + colourMultiplierBlue * 11.0F) / 100.0F;
colourMultiplierGreen = colourMultiplierGreenTmp;
colourMultiplierBlue = colourMultiplierBlueTmp;
}
float
brightnessMiddle = blockView.getBrightness(x, y, z),
brightnessBottom = blockView.getBrightness(x, y - 1, z),
brightnessTop = blockView.getBrightness(x, y + 1, z),
brightnessEast = blockView.getBrightness(x, y, z - 1),
brightnessWest = blockView.getBrightness(x, y, z + 1),
brightnessNorth = blockView.getBrightness(x - 1, y, z),
brightnessSouth = blockView.getBrightness(x + 1, y, z);
Direction[] directions = Arrays.copyOf(Direction.values(), Direction.values().length + 1);
for (int vertexSet = 0, vertexSetCount = directions.length; vertexSet < vertexSetCount; vertexSet++) {
Direction face = directions[vertexSet];
random.setSeed(seed);
ImmutableList<Vertex> vertexes = model.getVertexes(blockView, pos, face, random);
if (vertexes.isEmpty() || (face != null && !block.isSideRendered(blockView, x + face.vector.x, y + face.vector.y, z + face.vector.z, vertexSet)))
continue;
rendered = true;
Vertex vertex;
for (int i = 0, vertexesSize = vertexes.size(); i < vertexesSize; i++) {
vertex = vertexes.get(i);
if (vertex.shade)
tessellator.colour(
model.useAmbientOcclusion() ?
LightingHelper.getSmoothForVertex(
block, blockView, x, y, z,
vertex, i % 4,
colourMultiplierRed, colourMultiplierGreen, colourMultiplierBlue
) :
LightingHelper.getFastForVertex(
vertex,
colourMultiplierRed, colourMultiplierGreen, colourMultiplierBlue,
brightnessMiddle, brightnessBottom, brightnessTop, brightnessEast, brightnessWest, brightnessNorth, brightnessSouth
)
);
else
tessellator.colour(colourMultiplierRed, colourMultiplierGreen, colourMultiplierBlue);
tessellator.vertex(x + vertex.x, y + vertex.y, z + vertex.z,
noTextureOverride ? vertex.u : (textureOverride.getX() + vertex.lightingFace.axis.get2DX(vertex.x, vertex.y, vertex.z) * textureOverride.getWidth()) / textureOverride.getAtlas().getImage().getWidth(),
noTextureOverride ? vertex.v : (textureOverride.getY() + vertex.lightingFace.axis.get2DY(vertex.x, vertex.y, vertex.z) * textureOverride.getHeight()) / textureOverride.getAtlas().getImage().getHeight()
);
}
}
}
return rendered;
}
public static void renderInventory(BakedModel model) {
if (model != null) {
Atlas atlas = model.getSprite().getAtlas();
Tessellator tessellator = atlas.getTessellator();
tessellator.start();
Direction[] directions = Arrays.copyOf(Direction.values(), Direction.values().length + 1);
//noinspection ForLoopReplaceableByForEach
for (int vertexSet = 0; vertexSet < directions.length; vertexSet++) {
Direction face = directions[vertexSet];
random.setSeed(42);
ImmutableList<Vertex> vertexes = model.getVertexes(null, null, face, random);
Vertex vertex;
for (int i = 0, vertexesSize = vertexes.size(); i < vertexesSize; i++) {
vertex = vertexes.get(i);
tessellator.setNormal(vertex.normalX, vertex.normalY, vertex.normalZ);
tessellator.vertex(vertex.x - .5, vertex.y - .5, vertex.z - .5, vertex.u, vertex.v);
}
}
atlas.bindAtlas();
tessellator.draw();
}
}
}
<|start_filename|>station-dimensions-v0/src/main/java/net/modificationstation/stationapi/api/block/CustomPortal.java<|end_filename|>
package net.modificationstation.stationapi.api.block;
import net.minecraft.class_467;
import net.minecraft.entity.player.PlayerBase;
import net.modificationstation.stationapi.api.level.dimension.DimensionHelper;
import net.modificationstation.stationapi.api.level.dimension.TeleportationManager;
import net.modificationstation.stationapi.api.registry.Identifier;
public interface CustomPortal extends TeleportationManager {
@Override
default void switchDimension(PlayerBase player) {
DimensionHelper.switchDimension(player, getDimension(player), getDimensionScale(player), getTravelAgent(player), getDimensionEnteringMessage(player), getDimensionLeavingMessage(player));
}
Identifier getDimension(PlayerBase player);
default double getDimensionScale(PlayerBase player) {
return 1;
}
class_467 getTravelAgent(PlayerBase player);
String getDimensionEnteringMessage(PlayerBase player);
String getDimensionLeavingMessage(PlayerBase player);
}
<|start_filename|>station-vanilla-checker-v0/src/main/java/net/modificationstation/stationapi/impl/network/VanillaChecker.java<|end_filename|>
package net.modificationstation.stationapi.impl.network;
import com.google.common.hash.Hashing;
import net.fabricmc.loader.api.FabricLoader;
import net.fabricmc.loader.api.ModContainer;
import net.fabricmc.loader.api.metadata.ModMetadata;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.mine_diver.unsafeevents.listener.ListenerPriority;
import net.modificationstation.stationapi.api.StationAPI;
import net.modificationstation.stationapi.api.event.mod.PreInitEvent;
import net.modificationstation.stationapi.api.lang.I18n;
import net.modificationstation.stationapi.api.mod.entrypoint.Entrypoint;
import net.modificationstation.stationapi.api.mod.entrypoint.EventBusPolicy;
import net.modificationstation.stationapi.api.registry.ModID;
import net.modificationstation.stationapi.api.util.Null;
import java.util.*;
import static net.modificationstation.stationapi.api.StationAPI.LOGGER;
@Entrypoint(eventBus = @EventBusPolicy(registerInstance = false))
public class VanillaChecker {
@Entrypoint.ModID
private static final ModID MODID = Null.get();
public static final long MASK = Hashing.sipHash24().hashUnencodedChars(StationAPI.MODID.toString()).asLong();
/**
* A set of mods that need client-side verification when the client joins server.
*/
public static final Set<ModContainer> CLIENT_REQUIRED_MODS = new HashSet<>();
@EventListener(priority = ListenerPriority.HIGH)
private static void init(PreInitEvent event) {
LOGGER.info("Adding vanilla checker lang folder...");
I18n.addLangFolder(StationAPI.MODID, "/assets/" + MODID + "/lang");
LOGGER.info("Gathering mods that require client verification...");
String value = StationAPI.MODID + ":verify_client";
FabricLoader.getInstance().getAllMods().forEach(modContainer -> {
ModMetadata modMetadata = modContainer.getMetadata();
if (modMetadata.containsCustomValue(value) && modMetadata.getCustomValue(value).getAsBoolean())
CLIENT_REQUIRED_MODS.add(modContainer);
});
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/texture/atlas/SquareAtlas.java<|end_filename|>
package net.modificationstation.stationapi.api.client.texture.atlas;
import net.minecraft.client.resource.TexturePack;
import java.awt.image.*;
public class SquareAtlas extends Atlas {
public SquareAtlas(final String spritesheet, final int sizeSquareRoot) {
super(spritesheet, sizeSquareRoot * sizeSquareRoot, true);
}
public SquareAtlas(final String spritesheet, final int sizeSquareRoot, final Atlas parent) {
super(spritesheet, sizeSquareRoot * sizeSquareRoot, true, parent);
}
@Override
protected void init() {
BufferedImage image = getImage();
final int
sizeSquareRoot = (int) Math.sqrt(getUnitSize()),
textureWidth = image.getWidth() / sizeSquareRoot,
textureHeight = image.getHeight() / sizeSquareRoot;
for (int y = 0; y < sizeSquareRoot; y++) for (int x = 0; x < sizeSquareRoot; x++)
textures.add(new Sprite(
(parent == null ? 0 : parent.size) + y * sizeSquareRoot + x,
x * textureWidth, y * textureHeight,
textureWidth, textureHeight
));
}
@Override
public void reloadFromTexturePack(TexturePack newTexturePack) {
super.reloadFromTexturePack(newTexturePack);
BufferedImage image = getImage();
final int
sizeSquareRoot = (int) Math.sqrt(getUnitSize()),
textureWidth = image.getWidth() / sizeSquareRoot,
textureHeight = image.getHeight() / sizeSquareRoot;
textures.forEach(texture -> {
texture.x = (texture.index % sizeSquareRoot) * textureWidth;
texture.y = (texture.index / sizeSquareRoot) * textureHeight;
texture.width = textureWidth;
texture.height = textureHeight;
});
textures.forEach(Sprite::updateUVs);
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/model/BasicBakedModel.java<|end_filename|>
package net.modificationstation.stationapi.api.client.model;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import lombok.Getter;
import net.minecraft.level.BlockView;
import net.minecraft.util.Vec3i;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.util.math.Direction;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class BasicBakedModel implements BakedModel {
@NotNull
private final ImmutableMap<@NotNull Direction, @NotNull ImmutableList<@NotNull Vertex>> faceVertexes;
@NotNull
private final ImmutableList<@NotNull Vertex> vertexes;
private final boolean ambientocclusion;
@Getter
private final boolean isSideLit;
@Getter
private final Atlas.Sprite sprite;
@Getter
private final ModelTransformation transformation;
@Getter
private final ModelOverrideList overrides;
private BasicBakedModel(
final @NotNull ImmutableMap<@NotNull Direction, @NotNull ImmutableList<@NotNull Vertex>> faceVertexes,
final @NotNull ImmutableList<@NotNull Vertex> vertexes,
final boolean ambientocclusion,
final boolean isSideLit,
final @NotNull Atlas.Sprite sprite,
ModelTransformation transformation,
ModelOverrideList overrides
) {
this.faceVertexes = faceVertexes;
this.vertexes = vertexes;
this.ambientocclusion = ambientocclusion;
this.sprite = sprite;
this.isSideLit = isSideLit;
this.transformation = transformation;
this.overrides = overrides;
}
@Override
public ImmutableList<Vertex> getVertexes(@Nullable BlockView blockView, @Nullable Vec3i blockPos, @Nullable Direction face, Random random) {
return face == null ? vertexes : faceVertexes.get(face);
}
@Override
public boolean useAmbientOcclusion() {
return ambientocclusion;
}
@Override
public boolean hasDepth() {
return true;
}
@Override
public boolean isBuiltin() {
return false;
}
public final static class Builder {
private ImmutableMap<Direction, ImmutableList<Vertex>> faceVertexes = ImmutableMap.of();
private ImmutableList<Vertex> vertexes = ImmutableList.of();
private boolean useAO = true;
private boolean isSideLit = true;
private Atlas.Sprite sprite;
private ModelTransformation transformation = null;
private ModelOverrideList overrides = null;
public Builder faceVertexes(ImmutableMap<Direction, ImmutableList<Vertex>> faceVertexes) {
this.faceVertexes = faceVertexes;
return this;
}
public Builder vertexes(ImmutableList<Vertex> vertexes) {
this.vertexes = vertexes;
return this;
}
public Builder useAO(boolean useAO) {
this.useAO = useAO;
return this;
}
public Builder isSideLit(boolean isSideLit) {
this.isSideLit = isSideLit;
return this;
}
public Builder sprite(Atlas.Sprite sprite) {
this.sprite = sprite;
return this;
}
public Builder transformation(ModelTransformation transformation) {
this.transformation = transformation;
return this;
}
public Builder overrides(ModelOverrideList overrides) {
this.overrides = overrides;
return this;
}
public BasicBakedModel build() {
if (sprite == null)
throw new IllegalStateException("Sprite wasn't defined in the BasicBakedModel builder!");
return new BasicBakedModel(
faceVertexes,
vertexes,
useAO,
isSideLit,
sprite,
transformation,
overrides
);
}
}
}
<|start_filename|>station-dimensions-v0/src/main/java/net/modificationstation/stationapi/api/level/dimension/DimensionContainer.java<|end_filename|>
package net.modificationstation.stationapi.api.level.dimension;
import net.minecraft.level.dimension.Dimension;
import org.jetbrains.annotations.NotNull;
import java.util.function.*;
public class DimensionContainer<T extends Dimension> {
@NotNull
public final Supplier<@NotNull T> factory;
public int serialID;
public DimensionContainer(@NotNull IntFunction<@NotNull T> factory) {
this((@NotNull Function<@NotNull DimensionContainer<@NotNull T>, @NotNull Supplier<@NotNull T>>) dimensionContainer -> () -> factory.apply(dimensionContainer.serialID));
}
public DimensionContainer(@NotNull Supplier<@NotNull T> factory) {
this((@NotNull Function<@NotNull DimensionContainer<@NotNull T>, @NotNull Supplier<@NotNull T>>) dimensionContainer -> factory);
}
private DimensionContainer(@NotNull Function<@NotNull DimensionContainer<@NotNull T>, @NotNull Supplier<@NotNull T>> factoryFactory) {
this.factory = factoryFactory.apply(this);
}
@Override
public boolean equals(Object obj) {
return obj instanceof DimensionContainer && serialID == ((@NotNull DimensionContainer<?>) obj).serialID;
}
@Override
public int hashCode() {
return serialID;
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/mixin/render/MixinBlockBase.java<|end_filename|>
package net.modificationstation.stationapi.mixin.render;
import net.minecraft.block.BlockBase;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlas;
import net.modificationstation.stationapi.api.client.texture.atlas.Atlases;
import net.modificationstation.stationapi.api.client.texture.atlas.CustomAtlasProvider;
import org.spongepowered.asm.mixin.Mixin;
@Mixin(BlockBase.class)
public class MixinBlockBase implements CustomAtlasProvider {
@Override
public Atlas getAtlas() {
return Atlases.getTerrain();
}
}
<|start_filename|>station-items-v0/src/main/resources/station-items-v0.mixins.json<|end_filename|>
{
"required": true,
"minVersion": "0.8",
"package": "net.modificationstation.stationapi.mixin.item",
"compatibilityLevel": "JAVA_8",
"mixins": [
"ItemBaseAccessor",
"MixinItemBase",
"MixinItemInstance",
"MixinPlayerBase",
"MixinPlayerInventory"
],
"server": [
"server.Mixinclass_70",
"server.MixinServerPlayerPacketHandler"
],
"client": [
"client.DrawableHelperInvoker",
"client.Mixinclass_608",
"client.MixinClientInteractionManager",
"client.MixinContainerBase",
"client.MixinGameRenderer",
"client.MixinItemRenderer",
"client.MixinRemoteClientInteractionManager"
],
"injectors": {
"defaultRequire": 1
}
}
<|start_filename|>station-api-base/src/main/java/net/modificationstation/stationapi/api/util/math/Axis.java<|end_filename|>
package net.modificationstation.stationapi.api.util.math;
public enum Axis {
X {
@Override
public double get2DX(double x, double y, double z) {
return z;
}
@Override
public double get2DY(double x, double y, double z) {
return y;
}
},
Y {
@Override
public double get2DX(double x, double y, double z) {
return x;
}
@Override
public double get2DY(double x, double y, double z) {
return z;
}
},
Z {
@Override
public double get2DX(double x, double y, double z) {
return x;
}
@Override
public double get2DY(double x, double y, double z) {
return y;
}
};
public abstract double get2DX(double x, double y, double z);
public abstract double get2DY(double x, double y, double z);
}
<|start_filename|>station-tools-api-v0/src/main/java/net/modificationstation/stationapi/api/item/tool/ToolMaterialFactory.java<|end_filename|>
package net.modificationstation.stationapi.api.item.tool;
import net.minecraft.item.tool.ToolMaterial;
import net.modificationstation.stationapi.api.factory.EnumFactory;
public class ToolMaterialFactory {
public static ToolMaterial create(String materialName, int miningLevel, int durability, float miningSpeed, int attackDamage) {
return EnumFactory.addEnum(
ToolMaterial.class,
materialName,
new Class[] { int.class, int.class, float.class, int.class },
new Object[] { miningLevel, durability, miningSpeed, attackDamage }
);
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/mixin/render/client/MixinTextureManager.java<|end_filename|>
package net.modificationstation.stationapi.mixin.render.client;
import net.fabricmc.api.EnvType;
import net.fabricmc.api.Environment;
import net.minecraft.class_214;
import net.minecraft.client.TexturePackManager;
import net.minecraft.client.texture.TextureManager;
import net.modificationstation.stationapi.impl.client.texture.StationTextureManager;
import org.objectweb.asm.Opcodes;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.Unique;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import java.awt.image.*;
import java.nio.*;
@Mixin(TextureManager.class)
@Environment(EnvType.CLIENT)
public class MixinTextureManager {
@Unique
private final StationTextureManager stationTextureManager = new StationTextureManager((TextureManager) (Object) this);
@Shadow private ByteBuffer currentImageBuffer;
@Shadow private TexturePackManager texturePackManager;
@Inject(
method = "bindImageToId(Ljava/awt/image/BufferedImage;I)V",
at = @At(
value = "FIELD",
target = "Lnet/minecraft/client/texture/TextureManager;currentImageBuffer:Ljava/nio/ByteBuffer;",
opcode = Opcodes.GETFIELD,
ordinal = 1,
shift = At.Shift.BEFORE
),
locals = LocalCapture.CAPTURE_FAILHARD
)
private void method_1089_ensureBufferCapacity(BufferedImage bufferedImage, int i, CallbackInfo ci, int var3, int var4, int[] var5, byte[] var6) {
if (var6.length != currentImageBuffer.capacity())
currentImageBuffer = class_214.method_744(var6.length);
}
@Inject(
method = "bindImageToId([IIII)V",
at = @At(
value = "FIELD",
target = "Lnet/minecraft/client/texture/TextureManager;currentImageBuffer:Ljava/nio/ByteBuffer;",
opcode = Opcodes.GETFIELD,
ordinal = 1,
shift = At.Shift.BEFORE
),
locals = LocalCapture.CAPTURE_FAILHARD
)
private void method_1095_ensureBufferCapacity(int[] is, int i, int j, int k, CallbackInfo ci, byte[] var5) {
if (var5.length != currentImageBuffer.capacity())
currentImageBuffer = class_214.method_744(var5.length);
}
@Inject(
method = "tick()V",
at = @At("HEAD"),
cancellable = true
)
private void tick_redirect(CallbackInfo ci) {
stationTextureManager.tick();
ci.cancel();
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/texture/atlas/JsonModelAtlas.java<|end_filename|>
package net.modificationstation.stationapi.api.client.texture.atlas;
import net.minecraft.client.resource.TexturePack;
import net.modificationstation.stationapi.api.client.model.json.JsonModel;
import net.modificationstation.stationapi.api.client.registry.ModelRegistry;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.impl.client.texture.StationRenderAPI;
public final class JsonModelAtlas extends ExpandableAtlas {
public static final Identifier MISSING = Identifier.of(StationRenderAPI.MODID, "missing");
public JsonModelAtlas(Identifier identifier) {
super(identifier);
}
@Override
public Sprite addTexture(String texturePath) {
boolean newTexture = !textureCache.containsKey(texturePath);
Sprite textureInst = super.addTexture(texturePath);
if (newTexture)
ModelRegistry.INSTANCE.forEach((identifier, model) -> {
if (model instanceof JsonModel)
((JsonModel) model).updateUVs();
});
return textureInst;
}
@Override
public void reloadFromTexturePack(TexturePack newTexturePack) {
imageCache = null;
textures.clear();
textureCache.clear();
super.refreshTextureID();
}
}
<|start_filename|>station-dimensions-v0/src/main/java/net/modificationstation/stationapi/impl/level/dimension/DimensionRegistryInit.java<|end_filename|>
package net.modificationstation.stationapi.impl.level.dimension;
import net.mine_diver.unsafeevents.listener.EventListener;
import net.mine_diver.unsafeevents.listener.ListenerPriority;
import net.minecraft.level.dimension.Nether;
import net.minecraft.level.dimension.Overworld;
import net.minecraft.level.dimension.Skylands;
import net.modificationstation.stationapi.api.event.registry.DimensionRegistryEvent;
import net.modificationstation.stationapi.api.level.dimension.DimensionContainer;
import net.modificationstation.stationapi.api.mod.entrypoint.Entrypoint;
import net.modificationstation.stationapi.api.mod.entrypoint.EventBusPolicy;
import net.modificationstation.stationapi.api.registry.DimensionRegistry;
import static net.modificationstation.stationapi.api.level.dimension.VanillaDimensions.OVERWORLD;
import static net.modificationstation.stationapi.api.level.dimension.VanillaDimensions.SKYLANDS;
import static net.modificationstation.stationapi.api.level.dimension.VanillaDimensions.THE_NETHER;
@Entrypoint(eventBus = @EventBusPolicy(registerInstance = false))
public class DimensionRegistryInit {
@EventListener(priority = ListenerPriority.HIGH)
private static void registerDimensions(DimensionRegistryEvent event) {
DimensionRegistry r = event.registry;
r.register(THE_NETHER, -1, new DimensionContainer<>(Nether::new));
r.register(OVERWORLD, 0, new DimensionContainer<>(Overworld::new));
r.register(SKYLANDS, 1, new DimensionContainer<>(Skylands::new));
}
}
<|start_filename|>station-lifecycle-events-v0/src/main/java/net/modificationstation/stationapi/mixin/lifecycle/client/MixinClientPlayNetworkHandler.java<|end_filename|>
package net.modificationstation.stationapi.mixin.lifecycle.client;
import net.minecraft.network.ClientPlayNetworkHandler;
import net.minecraft.packet.login.LoginRequest0x1Packet;
import net.modificationstation.stationapi.api.StationAPI;
import net.modificationstation.stationapi.api.client.event.network.ServerLoginSuccessEvent;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
@Mixin(ClientPlayNetworkHandler.class)
public abstract class MixinClientPlayNetworkHandler {
@Shadow public abstract void method_1647();
@Inject(
method = "onLoginRequest(Lnet/minecraft/packet/login/LoginRequest0x1Packet;)V",
at = @At("HEAD"),
cancellable = true
)
private void onLoginSuccess(LoginRequest0x1Packet packet, CallbackInfo ci) {
if (StationAPI.EVENT_BUS.post(new ServerLoginSuccessEvent((ClientPlayNetworkHandler) (Object) this, packet)).isCancelled())
ci.cancel();
}
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/impl/client/model/GuiLightType.java<|end_filename|>
package net.modificationstation.stationapi.impl.client.model;
public enum GuiLightType {
SIDE, FRONT
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/model/ModelTransformation.java<|end_filename|>
package net.modificationstation.stationapi.api.client.model;
public class ModelTransformation {
}
<|start_filename|>station-render-api-v0/src/main/java/net/modificationstation/stationapi/api/client/model/Model.java<|end_filename|>
package net.modificationstation.stationapi.api.client.model;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.client.Minecraft;
import net.modificationstation.stationapi.api.client.registry.ModelRegistry;
import net.modificationstation.stationapi.api.client.texture.TexturePackDependent;
import net.modificationstation.stationapi.api.registry.Identifier;
import net.modificationstation.stationapi.api.resource.ResourceManager;
import java.util.function.*;
import static net.modificationstation.stationapi.api.StationAPI.MODID;
public abstract class Model implements TexturePackDependent {
public final Identifier id;
public final String modelPath;
private BakedModel baked;
protected boolean invalidated;
public static <T extends Model> T get(final Identifier identifier, final Function<Identifier, T> initializer) {
//noinspection unchecked
return (T) ModelRegistry.INSTANCE.computeIfAbsent(identifier, (Function<Identifier, Model>) initializer);
}
protected Model(final Identifier identifier, final String extension) {
this.id = identifier;
modelPath = ResourceManager.parsePath(identifier, "/" + MODID + "/models", extension);
//noinspection deprecation
reloadFromTexturePack(((Minecraft) FabricLoader.getInstance().getGameInstance()).texturePackManager.texturePack);
}
public final BakedModel getBaked() {
if (invalidated) {
invalidated = false;
baked = bake();
}
return baked;
}
protected abstract BakedModel bake();
}
| ModificationStation/StationAPI |
<|start_filename|>emoji_flag_test.go<|end_filename|>
package emojiflag
import (
"testing"
"unicode/utf8"
)
func Test_getFlag(t *testing.T) {
type args struct {
country string
}
tests := []struct {
name string
args args
expectedLen int
}{
{
"Should handle correct 3 char input",
args{"AUS"},
8,
},
{
"Should handle correct 2 char input",
args{"AU"},
8,
},
{
"Should return empty string if no 3 letter match can be found",
args{"BOB"},
0,
},
{
"Should return empty string if no 2 letter match can be found",
args{"AA"},
0,
},
{
"Should uppercase input",
args{"aus"},
8,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := GetFlag(tt.args.country)
if !utf8.ValidString(got) {
t.Errorf("GetFlag() expected valid flag got %v", got)
}
if len(got) != tt.expectedLen {
t.Errorf("expected length emoji of %v got %v", tt.expectedLen, got)
}
})
}
}
<|start_filename|>Makefile<|end_filename|>
test:
go vet -vettool=$(which shadow)
go test -v -count=1 -race $(go list ./...)
| TheTeaCat/go-emoji-flag |
<|start_filename|>pkg/provider/provider.go<|end_filename|>
package provider
import (
"fmt"
"io"
"path/filepath"
"os"
"k8s.io/client-go/informers"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
cloudprovider "k8s.io/cloud-provider"
)
// OutSideCluster allows the controller to be started using a local kubeConfig for testing
var OutSideCluster bool
const (
//ProviderName is the name of the cloud provider
ProviderName = "kubevip"
//KubeVipCloudConfig is the default name of the load balancer config Map
KubeVipCloudConfig = "kubevip"
//KubeVipClientConfig is the default name of the load balancer config Map
KubeVipClientConfig = "kubevip"
//KubeVipServicesKey is the key in the ConfigMap that has the services configuration
KubeVipServicesKey = "kubevip-services"
)
func init() {
cloudprovider.RegisterCloudProvider(ProviderName, newKubeVipCloudProvider)
}
// KubeVipCloudProvider - contains all of the interfaces for the cloud provider
type KubeVipCloudProvider struct {
lb cloudprovider.LoadBalancer
}
var _ cloudprovider.Interface = &KubeVipCloudProvider{}
func newKubeVipCloudProvider(io.Reader) (cloudprovider.Interface, error) {
ns := os.Getenv("KUBEVIP_NAMESPACE")
cm := os.Getenv("KUBEVIP_CONFIG_MAP")
if cm == "" {
cm = KubeVipCloudConfig
}
if ns == "" {
ns = "default"
}
var cl *kubernetes.Clientset
if !OutSideCluster {
// This will attempt to load the configuration when running within a POD
cfg, err := rest.InClusterConfig()
if err != nil {
return nil, fmt.Errorf("error creating kubernetes client config: %s", err.Error())
}
cl, err = kubernetes.NewForConfig(cfg)
if err != nil {
return nil, fmt.Errorf("error creating kubernetes client: %s", err.Error())
}
// use the current context in kubeconfig
} else {
config, err := clientcmd.BuildConfigFromFlags("", filepath.Join(os.Getenv("HOME"), ".kube", "config"))
if err != nil {
panic(err.Error())
}
cl, err = kubernetes.NewForConfig(config)
if err != nil {
return nil, fmt.Errorf("error creating kubernetes client: %s", err.Error())
}
}
return &KubeVipCloudProvider{
lb: newLoadBalancer(cl, ns, cm),
}, nil
}
// Initialize - starts the clound-provider controller
func (p *KubeVipCloudProvider) Initialize(clientBuilder cloudprovider.ControllerClientBuilder, stop <-chan struct{}) {
clientset := clientBuilder.ClientOrDie("do-shared-informers")
sharedInformer := informers.NewSharedInformerFactory(clientset, 0)
//res := NewResourcesController(c.resources, sharedInformer.Core().V1().Services(), clientset)
sharedInformer.Start(nil)
sharedInformer.WaitForCacheSync(nil)
//go res.Run(stop)
//go c.serveDebug(stop)
}
// LoadBalancer returns a loadbalancer interface. Also returns true if the interface is supported, false otherwise.
func (p *KubeVipCloudProvider) LoadBalancer() (cloudprovider.LoadBalancer, bool) {
return p.lb, true
}
// ProviderName returns the cloud provider ID.
func (p *KubeVipCloudProvider) ProviderName() string {
return ProviderName
}
| Sandah/kube-vip-cloud-provider |
<|start_filename|>config/src/test/java/com/quorum/tessera/config/constraints/SslConfigValidatorTest.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import com.quorum.tessera.config.SslAuthenticationMode;
import com.quorum.tessera.config.SslConfig;
import com.quorum.tessera.config.SslConfigType;
import com.quorum.tessera.config.SslTrustMode;
import com.quorum.tessera.config.util.EnvironmentVariableProvider;
import com.quorum.tessera.config.util.EnvironmentVariables;
import jakarta.validation.ConstraintValidatorContext;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
public class SslConfigValidatorTest {
@Rule public TemporaryFolder tmpDir = new TemporaryFolder();
private Path tmpFile;
@Mock private ConstraintValidatorContext context;
@Mock private ConstraintValidatorContext.ConstraintViolationBuilder builder;
private SslConfigValidator validator;
private EnvironmentVariableProvider envVarProvider;
@Before
public void setUp() throws IOException {
MockitoAnnotations.openMocks(this);
doNothing().when(context).disableDefaultConstraintViolation();
when(builder.addConstraintViolation()).thenReturn(context);
when(context.buildConstraintViolationWithTemplate(any())).thenReturn(builder);
tmpFile = Paths.get(tmpDir.getRoot().getPath(), "tmpFile");
Files.createFile(tmpFile);
assertThat(tmpFile).exists();
validator = new SslConfigValidator();
envVarProvider = mock(EnvironmentVariableProvider.class);
// when(envVarProvider.hasEnv(anyString())).thenReturn(false);
}
@Test
public void testSslConfigNull() {
SslConfig sslConfig = null;
assertThat(validator.isValid(sslConfig, context)).isTrue();
}
@Test
public void testSslConfigNotNullButTlsOff() {
SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.OFF);
assertThat(validator.isValid(sslConfig, context)).isTrue();
}
@Test
public void testTlsAllowKeyStoreGeneration() {
SslConfig sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
true,
null,
null,
null,
null,
SslTrustMode.NONE,
null,
null,
null,
null,
SslTrustMode.NONE,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isTrue();
}
@Test
public void testKeyStoreConfigInvalid() {
SslConfig sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
Paths.get("somefile"),
"somepassword".toCharArray(),
null,
null,
null,
Paths.get("somefile"),
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
null,
null,
null,
null,
Paths.get("somefile"),
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
null,
Paths.get("somefile"),
"password".toCharArray(),
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
null,
tmpFile,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
null,
null,
null,
null,
tmpFile,
null,
null,
null,
null,
null,
null,
null,
null,
Paths.get("someFile"),
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
null,
null,
null,
null,
tmpFile,
null,
null,
null,
null,
null,
null,
null,
null,
Paths.get("someFile"),
Paths.get("someFile"),
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
null,
null,
null,
null,
tmpFile,
null,
null,
null,
null,
null,
null,
null,
null,
tmpFile,
Paths.get("someFile"),
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
null,
null,
null,
null,
tmpFile,
null,
null,
null,
null,
null,
null,
null,
null,
tmpFile,
tmpFile,
Paths.get("someFile"),
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
null,
null,
null,
null,
tmpFile,
null,
null,
null,
null,
null,
null,
null,
null,
tmpFile,
tmpFile,
Paths.get("someFile"),
Paths.get("someFile"),
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
null,
null,
null,
null,
tmpFile,
null,
null,
null,
null,
null,
null,
null,
null,
tmpFile,
tmpFile,
tmpFile,
Paths.get("someFile"),
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
}
@Test
public void noServerKeyStorePasswordInConfigOrEnvVarsThenInvalid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword(null);
when(envVarProvider.hasEnv(anyString())).thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
assertThat(result).isFalse();
final String msg =
"Server keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context).buildConstraintViolationWithTemplate(msg);
}
@Test
public void serverKeyStorePasswordInConfigOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_KEYSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_KEYSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Server keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for some reason other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Ignore
@Test
public void serverKeyStorePasswordInGlobalEnvVarOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_KEYSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_KEYSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Server keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Ignore
@Test
public void serverKeyStorePasswordInPrefixedEnvVarOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setServerKeyStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_KEYSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_KEYSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Server keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void serverKeyStorePasswordInConfigAndGlobalEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_KEYSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_KEYSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Server keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void serverKeyStorePasswordInConfigAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setServerKeyStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_KEYSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_KEYSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Server keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Ignore
@Test
public void serverKeyStorePasswordInGlobalAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setServerKeyStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_KEYSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_KEYSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Server keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void serverKeyStorePasswordInConfigAndGlobalAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setServerKeyStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_KEYSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_KEYSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Server keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void noClientKeyStorePasswordInConfigOrEnvVarsThenInvalid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("<PASSWORD>".toCharArray());
sslConfig.setClientKeyStore(tmpFile);
sslConfig.setClientKeyStorePassword(null);
when(envVarProvider.hasEnv(anyString())).thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
assertThat(result).isFalse();
final String msg =
"Client keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context).buildConstraintViolationWithTemplate(msg);
}
@Test
public void clientKeyStorePasswordInConfigOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("password".toCharArray());
sslConfig.setClientKeyStore(tmpFile);
sslConfig.setClientKeyStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_KEYSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_KEYSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Client keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for some reason other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Ignore
@Test
public void clientKeyStorePasswordInGlobalEnvVarOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("<PASSWORD>".toCharArray());
sslConfig.setClientKeyStore(tmpFile);
sslConfig.setClientKeyStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_KEYSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_KEYSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Client keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Ignore
@Test
public void clientKeyStorePasswordInPrefixedEnvVarOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("<PASSWORD>".toCharArray());
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setClientKeyStore(tmpFile);
sslConfig.setClientKeyStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_KEYSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_KEYSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Client keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void clientKeyStorePasswordInConfigAndGlobalEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("<PASSWORD>".toCharArray());
sslConfig.setClientKeyStore(tmpFile);
sslConfig.setClientKeyStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_KEYSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_KEYSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Client keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void clientKeyStorePasswordInConfigAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("<PASSWORD>".toCharArray());
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setClientKeyStore(tmpFile);
sslConfig.setClientKeyStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_KEYSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_KEYSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Client keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Ignore
@Test
public void clientKeyStorePasswordInGlobalAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("<PASSWORD>".toCharArray());
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setClientKeyStore(tmpFile);
sslConfig.setClientKeyStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_KEYSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_KEYSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Client keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void clientKeyStorePasswordInConfigAndGlobalAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(false);
sslConfig.setServerKeyStore(tmpFile);
sslConfig.setServerKeyStorePassword("<PASSWORD>".toCharArray());
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setClientKeyStore(tmpFile);
sslConfig.setClientKeyStorePassword("password".<PASSWORD>());
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_KEYSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_KEYSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Client keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server keystore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void testTrustModeNull() {
SslConfig sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
null,
tmpFile,
"password".toCharArray(),
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.CA,
tmpFile,
"password".toCharArray(),
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
}
@Test
public void testTrustModeWhiteListButKnownHostsFileNotExisted() {
SslConfig sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.WHITELIST,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.WHITELIST,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.WHITELIST,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.WHITELIST,
Paths.get("somefile"),
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.WHITELIST,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.WHITELIST,
tmpFile,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.WHITELIST,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.WHITELIST,
tmpFile,
Paths.get("some"),
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
}
@Test
public void testTrustModeCAButTrustStoreConfigInvalid() {
SslConfig sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.CA,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.NONE,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
tmpFile,
null,
SslTrustMode.CA,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.NONE,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
Paths.get("somefile"),
"password".toCharArray(),
SslTrustMode.CA,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.NONE,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
tmpFile,
"p".toCharArray(),
SslTrustMode.CA,
tmpFile,
"password".toCharArray(),
null,
null,
SslTrustMode.CA,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
tmpFile,
null,
SslTrustMode.CA,
tmpFile,
"password".toCharArray(),
tmpFile,
null,
SslTrustMode.CA,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"password".toCharArray(),
Paths.get("somefile"),
"password".toCharArray(),
SslTrustMode.CA,
tmpFile,
"password".toCharArray(),
Paths.get("somefile"),
"p".toCharArray(),
SslTrustMode.CA,
null,
null,
null,
null,
null,
null,
null,
null,
null);
assertThat(validator.isValid(sslConfig, context)).isFalse();
}
@Test
public void serverCaModeNoTrustStorePasswordInConfigOrEnvVarsThenInvalid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword(null);
when(envVarProvider.hasEnv(anyString())).thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If server trust mode is CA, trust store must exist and not be null";
verify(context).buildConstraintViolationWithTemplate(msg);
assertThat(result).isFalse();
}
@Test
public void serverCaModeTrustStorePasswordInConfigOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_TRUSTSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_TRUSTSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If server trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server truststore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Ignore
@Test
public void serverCaModeTrustStorePasswordInGlobalEnvVarOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_TRUSTSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_TRUSTSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If server trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server truststore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Ignore
@Test
public void serverCaModeTrustStorePasswordInPrefixedEnvVarOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setServerTrustStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_TRUSTSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_TRUSTSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If server trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server truststore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void serverCaModeTrustStorePasswordInConfigAndGlobalEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_TRUSTSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_TRUSTSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If server trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server truststore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void serverCaModeTrustStorePasswordInConfigAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setServerTrustStorePassword("password".<PASSWORD>());
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_TRUSTSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_TRUSTSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If server trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server truststore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Ignore
@Test
public void serverCaModeTrustStorePasswordInGlobalAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setServerTrustStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_TRUSTSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_TRUSTSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If server trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server truststore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void serverCaModeTrustStorePasswordInConfigAndGlobalAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setServerTrustStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.SERVER_TRUSTSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.SERVER_TRUSTSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If server trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
// validation then fails for reasons other than server truststore config
verify(context).buildConstraintViolationWithTemplate(anyString());
assertThat(result).isFalse();
}
@Test
public void clientCaModeNoTrustStorePasswordInConfigOrEnvVarsThenInvalid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
sslConfig.setClientTrustStore(tmpFile);
sslConfig.setClientTrustStorePassword(null);
when(envVarProvider.hasEnv(anyString())).thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If client trust mode is CA, trust store must exist and not be null";
verify(context).buildConstraintViolationWithTemplate(msg);
assertThat(result).isFalse();
}
@Test
public void clientCaModeTrustStorePasswordInConfigOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
sslConfig.setClientTrustStore(tmpFile);
sslConfig.setClientTrustStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_TRUSTSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_TRUSTSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If client trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
assertThat(result).isTrue();
}
@Ignore
@Test
public void clientCaModeTrustStorePasswordInGlobalEnvVarOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
sslConfig.setClientTrustStore(tmpFile);
sslConfig.setClientTrustStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_TRUSTSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_TRUSTSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If client trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
assertThat(result).isTrue();
}
@Ignore
@Test
public void clientCaModeTrustStorePasswordInPrefixedEnvVarOnlyThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
sslConfig.setClientTrustStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setClientTrustStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_TRUSTSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_TRUSTSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If client trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
assertThat(result).isTrue();
}
@Test
public void clientCaModeTrustStorePasswordInConfigAndGlobalEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
sslConfig.setClientTrustStore(tmpFile);
sslConfig.setClientTrustStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_TRUSTSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_TRUSTSTORE_PWD))
.thenReturn(false);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If client trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
assertThat(result).isTrue();
}
@Test
public void clientCaModeTrustStorePasswordInConfigAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
sslConfig.setClientTrustStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setClientTrustStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_TRUSTSTORE_PWD)).thenReturn(false);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_TRUSTSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If client trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
assertThat(result).isTrue();
}
@Ignore
@Test
public void clientCaModeTrustStorePasswordInGlobalAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
sslConfig.setClientTrustStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setClientTrustStorePassword(null);
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_TRUSTSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig
.getEnvironmentVariablePrefix()
.concat("_")
.concat(EnvironmentVariables.CLIENT_TRUSTSTORE_PWD)))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If client trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
assertThat(result).isTrue();
}
@Test
public void clientCaModeTrustStorePasswordInConfigAndGlobalAndPrefixedEnvVarThenValid() {
final SslConfig sslConfig = new SslConfig();
sslConfig.setTls(SslAuthenticationMode.STRICT);
sslConfig.setGenerateKeyStoreIfNotExisted(true);
sslConfig.setServerTrustMode(SslTrustMode.CA);
sslConfig.setClientTrustMode(SslTrustMode.CA);
sslConfig.setServerTrustStore(tmpFile);
sslConfig.setServerTrustStorePassword("password".toCharArray());
sslConfig.setClientTrustStore(tmpFile);
sslConfig.setEnvironmentVariablePrefix("PREFIX");
sslConfig.setClientTrustStorePassword("password".toCharArray());
when(envVarProvider.hasEnv(EnvironmentVariables.CLIENT_TRUSTSTORE_PWD)).thenReturn(true);
when(envVarProvider.hasEnv(
sslConfig.getEnvironmentVariablePrefix()
+ "_"
+ EnvironmentVariables.CLIENT_TRUSTSTORE_PWD))
.thenReturn(true);
final boolean result = validator.isValid(sslConfig, context);
final String msg =
"Trust store config not valid. If client trust mode is CA, trust store must exist and not be null";
verify(context, never()).buildConstraintViolationWithTemplate(msg);
assertThat(result).isTrue();
}
@Test
public void testNoKeyStoreFilesButPemFilesProvided() {
SslConfig sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
null,
null,
null,
null,
SslTrustMode.CA,
null,
null,
null,
null,
SslTrustMode.CA,
null,
null,
Arrays.asList(tmpFile),
Arrays.asList(tmpFile),
tmpFile,
tmpFile,
tmpFile,
tmpFile,
null);
assertThat(validator.isValid(sslConfig, context)).isTrue();
}
@Test
public void testValidSsl() {
SslConfig sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"pw".toCharArray(),
tmpFile,
"pw".toCharArray(),
SslTrustMode.CA,
tmpFile,
"pw".toCharArray(),
tmpFile,
"pw".toCharArray(),
SslTrustMode.CA,
tmpFile,
tmpFile,
Arrays.asList(tmpFile),
Arrays.asList(tmpFile),
tmpFile,
tmpFile,
tmpFile,
tmpFile,
null);
assertThat(validator.isValid(sslConfig, context)).isTrue();
}
@Test
public void testValidSslServerOnly() {
SslConfig sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"pw".toCharArray(),
tmpFile,
"pw".toCharArray(),
SslTrustMode.CA,
tmpFile,
null,
null,
null,
null,
tmpFile,
null,
Arrays.asList(tmpFile),
null,
tmpFile,
tmpFile,
null,
null,
null);
sslConfig.setSslConfigType(SslConfigType.SERVER_ONLY);
assertThat(validator.isValid(sslConfig, context)).isTrue();
SslConfig secondSslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
null,
null,
null,
null,
null,
tmpFile,
"pw".toCharArray(),
tmpFile,
"pw".toCharArray(),
SslTrustMode.CA,
null,
tmpFile,
null,
Arrays.asList(tmpFile),
null,
null,
tmpFile,
tmpFile,
null);
secondSslConfig.setSslConfigType(SslConfigType.SERVER_ONLY);
assertThat(validator.isValid(secondSslConfig, context)).isFalse();
}
@Test
public void testValidSslClientOnly() {
SslConfig sslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
null,
null,
null,
null,
null,
tmpFile,
"pw".toCharArray(),
tmpFile,
"pw".toCharArray(),
SslTrustMode.CA,
null,
tmpFile,
null,
Arrays.asList(tmpFile),
null,
null,
tmpFile,
tmpFile,
null);
sslConfig.setSslConfigType(SslConfigType.CLIENT_ONLY);
assertThat(validator.isValid(sslConfig, context)).isTrue();
SslConfig secondSslConfig =
new SslConfig(
SslAuthenticationMode.STRICT,
false,
tmpFile,
"pw".toCharArray(),
tmpFile,
"pw".toCharArray(),
SslTrustMode.CA,
tmpFile,
null,
null,
null,
null,
tmpFile,
null,
Arrays.asList(tmpFile),
null,
tmpFile,
tmpFile,
null,
null,
null);
secondSslConfig.setSslConfigType(SslConfigType.CLIENT_ONLY);
assertThat(validator.isValid(secondSslConfig, context)).isFalse();
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/constraints/NoDuplicateKeyVaultConfigsValidator.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import com.quorum.tessera.config.KeyConfiguration;
import com.quorum.tessera.config.KeyVaultConfig;
import com.quorum.tessera.config.KeyVaultType;
import jakarta.validation.ConstraintValidator;
import jakarta.validation.ConstraintValidatorContext;
import java.util.*;
import java.util.stream.Collectors;
public class NoDuplicateKeyVaultConfigsValidator
implements ConstraintValidator<NoDuplicateKeyVaultConfigs, KeyConfiguration> {
private NoDuplicateKeyVaultConfigs config;
@Override
public void initialize(NoDuplicateKeyVaultConfigs config) {
this.config = config;
}
@Override
public boolean isValid(
KeyConfiguration keyConfiguration, ConstraintValidatorContext constraintValidatorContext) {
// cannot have duplicates if the KeyVaultConfigs list is empty
if (Objects.isNull(keyConfiguration)) {
return true;
}
if (Objects.isNull(keyConfiguration.getKeyVaultConfigs())) {
return true;
}
if (keyConfiguration.getKeyVaultConfigs().isEmpty()) {
return true;
}
final List<KeyVaultConfig> legacyConfigs = new ArrayList<>();
legacyConfigs.add(keyConfiguration.getHashicorpKeyVaultConfig());
legacyConfigs.add(keyConfiguration.getAzureKeyVaultConfig());
List<KeyVaultConfig> configs =
keyConfiguration.getKeyVaultConfigs().stream()
.map(KeyVaultConfig.class::cast)
.collect(Collectors.toList());
configs.addAll(legacyConfigs);
final Map<KeyVaultType, Integer> typeCount =
configs.stream()
.filter(Objects::nonNull)
.filter(c -> Objects.nonNull(c.getKeyVaultType()))
.collect(Collectors.toMap(e -> e.getKeyVaultType(), v -> 1, (l, r) -> l + 1));
typeCount.entrySet().stream()
.filter(e -> e.getValue() > 1)
.map(e -> e.getKey().name())
.forEach(
s -> {
String message =
String.join(
" ", s, constraintValidatorContext.getDefaultConstraintMessageTemplate());
constraintValidatorContext.disableDefaultConstraintViolation();
constraintValidatorContext
.buildConstraintViolationWithTemplate(message)
.addConstraintViolation();
});
return typeCount.values().stream().allMatch(v -> v == 1);
}
}
<|start_filename|>tests/acceptance-test/src/test/java/com/quorum/tessera/test/rest/MultipleKeyNodeIT.java<|end_filename|>
package com.quorum.tessera.test.rest;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.api.ReceiveResponse;
import com.quorum.tessera.api.SendResponse;
import com.quorum.tessera.test.Party;
import com.quorum.tessera.test.PartyHelper;
import jakarta.ws.rs.core.Response;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Arrays;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import suite.NodeAlias;
/**
* This tests that a node that hosts multiple sets of keys can send/receive transactions for both
* keys
*/
@RunWith(Parameterized.class)
public class MultipleKeyNodeIT {
private PartyHelper partyHelper = PartyHelper.create();
private final String recipientAlias;
private String txHash;
private RestUtils restUtils = new RestUtils();
public MultipleKeyNodeIT(String recipientAlias) {
this.recipientAlias = recipientAlias;
}
@Before
public void onSetUp() {
Party sender = partyHelper.findByAlias(NodeAlias.A);
Party recipient = partyHelper.findByAlias(recipientAlias);
byte[] transactionData = restUtils.createTransactionData();
final SendResponse result =
restUtils.sendRequestAssertSuccess(sender, transactionData, recipient);
assertThat(result.getKey()).isNotBlank();
this.txHash = result.getKey();
}
@Test
public void thenTransactionHasBeenPersistedOnOtherNode() throws UnsupportedEncodingException {
final byte[] transactionData = RestUtils.generateTransactionData();
Party recipient = partyHelper.findByAlias(recipientAlias);
// retrieve the transaction
final Response retrieveResponse =
recipient
.getRestClient()
.target(recipient.getQ2TUri())
.path("transaction")
.path(URLEncoder.encode(txHash, "UTF-8"))
.queryParam("to", recipient.getPublicKey())
.request()
.get();
assertThat(retrieveResponse).isNotNull();
assertThat(retrieveResponse.getStatus())
.describedAs("%s should be present on other node", txHash)
.isEqualTo(200);
final ReceiveResponse result = retrieveResponse.readEntity(ReceiveResponse.class);
// TODO: Verify payload
assertThat(result).isNotNull();
}
@Parameterized.Parameters
public static List<String> recipients() {
return Arrays.asList("C", "D");
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/ClientMode.java<|end_filename|>
package com.quorum.tessera.config;
import jakarta.xml.bind.annotation.XmlEnumValue;
public enum ClientMode {
@XmlEnumValue("tessera")
TESSERA,
@XmlEnumValue("orion")
ORION
}
<|start_filename|>server/jersey-server/src/test/java/com/quorum/tessera/server/jersey/SampleApplication.java<|end_filename|>
package com.quorum.tessera.server.jersey;
import jakarta.ws.rs.core.Application;
import java.util.Set;
public class SampleApplication extends Application {
@Override
public Set<Class<?>> getClasses() {
return Set.of(SampleResource.class);
}
}
<|start_filename|>migration/multitenancy/src/main/java/com/quorum/tessera/multitenancy/migration/RawTransactionMigrator.java<|end_filename|>
package com.quorum.tessera.multitenancy.migration;
import com.quorum.tessera.data.EncryptedRawTransaction;
import jakarta.persistence.EntityManager;
import jakarta.persistence.TypedQuery;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.IntStream;
public class RawTransactionMigrator {
private final EntityManager primaryEntityManager;
private final EntityManager secondaryEntityManager;
private final int maxBatchSize = 100;
public RawTransactionMigrator(
final EntityManager primaryEntityManager, final EntityManager secondaryEntityManager) {
this.primaryEntityManager = Objects.requireNonNull(primaryEntityManager);
this.secondaryEntityManager = Objects.requireNonNull(secondaryEntityManager);
}
public void migrate() {
final long secondaryTxCount =
secondaryEntityManager
.createQuery("select count(e) from EncryptedRawTransaction e", Long.class)
.getSingleResult();
final int batchCount = calculateBatchCount(maxBatchSize, secondaryTxCount);
IntStream.range(0, batchCount)
.map(i -> i * maxBatchSize)
.mapToObj(
offset ->
secondaryEntityManager
.createNamedQuery(
"EncryptedRawTransaction.FindAll", EncryptedRawTransaction.class)
.setFirstResult(offset)
.setMaxResults(maxBatchSize))
.flatMap(TypedQuery::getResultStream)
.forEach(
ert -> {
final Optional<EncryptedRawTransaction> existing =
Optional.ofNullable(
primaryEntityManager.find(EncryptedRawTransaction.class, ert.getHash()));
if (existing.isEmpty()) {
primaryEntityManager.getTransaction().begin();
primaryEntityManager.persist(ert);
primaryEntityManager.getTransaction().commit();
}
});
}
private static int calculateBatchCount(final long maxResults, final long total) {
return (int) Math.ceil((double) total / maxResults);
}
}
<|start_filename|>tessera-context/src/main/java/com/quorum/tessera/context/KeyVaultConfigValidations.java<|end_filename|>
package com.quorum.tessera.context;
import com.quorum.tessera.config.KeyConfiguration;
import com.quorum.tessera.config.keypairs.ConfigKeyPair;
import jakarta.validation.ConstraintViolation;
import java.util.List;
import java.util.ServiceLoader;
import java.util.Set;
public interface KeyVaultConfigValidations {
static KeyVaultConfigValidations create() {
return ServiceLoader.load(KeyVaultConfigValidations.class).findFirst().get();
}
Set<ConstraintViolation<?>> validate(KeyConfiguration keys, List<ConfigKeyPair> configKeyPairs);
}
<|start_filename|>tessera-data/src/main/java/com/quorum/tessera/data/EncryptedTransactionListener.java<|end_filename|>
package com.quorum.tessera.data;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import jakarta.persistence.PostLoad;
import jakarta.persistence.PrePersist;
import jakarta.persistence.PreUpdate;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class EncryptedTransactionListener {
private static final Logger LOGGER = LoggerFactory.getLogger(EncryptedTransactionListener.class);
@PreUpdate
public void onUpdate(EncryptedTransaction encryptedTransaction) {
LOGGER.debug("onUpdate {}", encryptedTransaction);
final EncodedPayload encodedPayload = encryptedTransaction.getPayload();
final EncodedPayloadCodec encodedPayloadCodec = encryptedTransaction.getEncodedPayloadCodec();
final PayloadEncoder payloadEncoder = PayloadEncoder.create(encodedPayloadCodec);
final byte[] encodedPayloadData = payloadEncoder.encode(encodedPayload);
encryptedTransaction.setEncodedPayload(encodedPayloadData);
}
@PrePersist
public void onSave(EncryptedTransaction encryptedTransaction) {
LOGGER.debug("onSave {}", encryptedTransaction);
final EncodedPayload encodedPayload = encryptedTransaction.getPayload();
final EncodedPayloadCodec encodedPayloadCodec = EncodedPayloadCodec.current();
final PayloadEncoder payloadEncoder = PayloadEncoder.create(encodedPayloadCodec);
final byte[] encodedPayloadData = payloadEncoder.encode(encodedPayload);
encryptedTransaction.setEncodedPayloadCodec(encodedPayloadCodec);
encryptedTransaction.setEncodedPayload(encodedPayloadData);
}
@PostLoad
public void onLoad(EncryptedTransaction encryptedTransaction) {
LOGGER.debug("onLoad[{}]", encryptedTransaction);
final EncodedPayloadCodec encodedPayloadCodec =
Optional.ofNullable(encryptedTransaction.getEncodedPayloadCodec())
.orElse(EncodedPayloadCodec.LEGACY);
final byte[] encodedPayloadData = encryptedTransaction.getEncodedPayload();
final PayloadEncoder payloadEncoder = PayloadEncoder.create(encodedPayloadCodec);
final EncodedPayload encodedPayload = payloadEncoder.decode(encodedPayloadData);
encryptedTransaction.setPayload(encodedPayload);
encryptedTransaction.setEncodedPayloadCodec(encodedPayloadCodec);
}
}
<|start_filename|>tests/acceptance-test/src/test/java/com/quorum/tessera/test/rest/ReceiveRawIT.java<|end_filename|>
package com.quorum.tessera.test.rest;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.api.SendRequest;
import com.quorum.tessera.api.SendResponse;
import com.quorum.tessera.test.Party;
import com.quorum.tessera.test.PartyHelper;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import org.junit.Before;
import org.junit.Test;
import suite.NodeAlias;
public class ReceiveRawIT {
private static final String RECEIVE_PATH = "/receiveraw";
private static final String C11N_TO = "c11n-to";
private static final String C11N_KEY = "<KEY>";
private static final byte[] PAYLOAD = "TXN_DATA".getBytes();
private PartyHelper partyHelper = PartyHelper.create();
private String hash;
private Party partyOne;
private Party partyTwo;
// Persist a single transaction that can be used later
@Before
public void beforeTest() {
this.partyOne = partyHelper.findByAlias(NodeAlias.A);
this.partyTwo = partyHelper.findByAlias(NodeAlias.B);
SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(PAYLOAD);
sendRequest.setTo(partyTwo.getPublicKey());
sendRequest.setFrom(partyOne.getPublicKey());
final Response response =
partyOne
.getRestClient()
.target(partyOne.getQ2TUri())
.path("/send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
final SendResponse result = response.readEntity(SendResponse.class);
this.hash = result.getKey();
}
@Test
public void fetchExistingTransactionUsingOwnKey() {
final Response response =
partyOne
.getRestClient()
.target(partyOne.getQ2TUri())
.path(RECEIVE_PATH)
.request()
.header(C11N_KEY, this.hash)
.header(C11N_TO, partyOne.getPublicKey())
.buildGet()
.invoke();
// validate result
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
final byte[] result = response.readEntity(byte[].class);
assertThat(result).isEqualTo(PAYLOAD);
}
@Test
public void fetchExistingTransactionNotUsingKeyOnSender() {
final Response response =
partyOne
.getRestClient()
.target(partyOne.getQ2TUri())
.path(RECEIVE_PATH)
.request()
.header(C11N_KEY, this.hash)
.buildGet()
.invoke();
// validate result
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
final byte[] result = response.readEntity(byte[].class);
assertThat(result).isEqualTo(PAYLOAD);
}
@Test
public void fetchExistingTransactionNotUsingKeyOnRecipient() {
Party sender = partyHelper.findByAlias("A");
byte[] transactionPayload = new RestUtils().createTransactionData();
SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(transactionPayload);
sendRequest.setFrom(sender.getPublicKey());
sendRequest.setTo(partyHelper.findByAlias("B").getPublicKey());
final Response r =
sender
.getRestClient()
.target(sender.getQ2TUri())
.path("/send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
final SendResponse sendResponse = r.readEntity(SendResponse.class);
final Party pty = partyHelper.findByAlias(NodeAlias.B);
final Response response =
pty.getRestClient()
.target(pty.getQ2TUri())
.path(RECEIVE_PATH)
.request()
.header(C11N_KEY, sendResponse.getKey())
.buildGet()
.invoke();
// validate result
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
final byte[] result = response.readEntity(byte[].class);
assertThat(result).isEqualTo(transactionPayload);
}
@Test
public void fetchExistingTransactionUsingRecipientKey() {
final Response response =
partyTwo
.getRestClient()
.target(partyTwo.getQ2TUri())
.path(RECEIVE_PATH)
.request()
.header(C11N_KEY, this.hash)
.header(C11N_TO, partyTwo.getPublicKey())
.buildGet()
.invoke();
// validate result
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
final byte[] result = response.readEntity(byte[].class);
assertThat(result).isEqualTo(PAYLOAD);
}
@Test
public void fetchNonexistentTransactionFails() {
final Response response =
partyOne
.getRestClient()
.target(partyOne.getQ2TUri())
.path(RECEIVE_PATH)
.request()
.header(C11N_KEY, "invalidhashvalue")
.buildGet()
.invoke();
// validate result
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(404);
final String result = response.readEntity(String.class);
assertThat(result).isEqualTo("Message with hash invalidhashvalue was not found");
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/constraints/ValidContent.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import static java.lang.annotation.ElementType.*;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import jakarta.validation.Constraint;
import jakarta.validation.Payload;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@Target({FIELD, METHOD, PARAMETER, ANNOTATION_TYPE, TYPE_PARAMETER, TYPE_USE})
@Retention(RUNTIME)
@Constraint(validatedBy = ValidContentValidator.class)
@Documented
public @interface ValidContent {
String message() default "{ValidContent.message}";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
int minLines() default 0;
int maxLines() default Integer.MAX_VALUE;
}
<|start_filename|>tessera-recover/src/main/java/com/quorum/tessera/recovery/workflow/internal/LegacyResendManagerProvider.java<|end_filename|>
package com.quorum.tessera.recovery.workflow.internal;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.discovery.Discovery;
import com.quorum.tessera.enclave.Enclave;
import com.quorum.tessera.recovery.workflow.LegacyResendManager;
import com.quorum.tessera.transaction.publish.PayloadPublisher;
public class LegacyResendManagerProvider {
public static LegacyResendManager provider() {
final Enclave enclave = Enclave.create();
final EncryptedTransactionDAO encryptedTransactionDAO = EncryptedTransactionDAO.create();
final int resendFetchSize = 100;
final PayloadPublisher payloadPublisher = PayloadPublisher.create();
final Discovery discovery = Discovery.create();
return new LegacyResendManagerImpl(
enclave, encryptedTransactionDAO, resendFetchSize, payloadPublisher, discovery);
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/test/java/com/quorum/tessera/p2p/PartyInfoResourceTest.java<|end_filename|>
package com.quorum.tessera.p2p;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
import static org.mockito.Mockito.*;
import com.quorum.tessera.discovery.Discovery;
import com.quorum.tessera.enclave.Enclave;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.enclave.PrivacyMetadata;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.p2p.partyinfo.PartyInfoParser;
import com.quorum.tessera.p2p.partyinfo.PartyStore;
import com.quorum.tessera.partyinfo.model.NodeInfoUtil;
import com.quorum.tessera.partyinfo.model.Party;
import com.quorum.tessera.partyinfo.model.PartyInfo;
import com.quorum.tessera.partyinfo.model.Recipient;
import com.quorum.tessera.partyinfo.node.NodeInfo;
import jakarta.json.Json;
import jakarta.json.JsonObject;
import jakarta.json.JsonReader;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.client.Invocation;
import jakarta.ws.rs.client.WebTarget;
import jakarta.ws.rs.core.Response;
import java.io.IOException;
import java.io.StringReader;
import java.io.UncheckedIOException;
import java.net.URI;
import java.util.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
public class PartyInfoResourceTest {
private Discovery discovery;
private PartyInfoResource partyInfoResource;
private PartyInfoParser partyInfoParser;
private Enclave enclave;
private Client restClient;
private PayloadEncoder payloadEncoder;
private PartyStore partyStore;
@Before
public void beforeTest() {
this.discovery = mock(Discovery.class);
this.partyInfoParser = mock(PartyInfoParser.class);
this.enclave = mock(Enclave.class);
this.restClient = mock(Client.class);
this.payloadEncoder = mock(PayloadEncoder.class);
this.partyStore = mock(PartyStore.class);
this.partyInfoResource =
new PartyInfoResource(
discovery, partyInfoParser, restClient, enclave, payloadEncoder, true, partyStore);
}
@After
public void afterTest() {
verifyNoMoreInteractions(discovery, partyInfoParser, restClient, enclave, payloadEncoder);
}
@Test
public void partyInfoGet() {
final String partyInfoJson =
"{\"url\":\"http://localhost:9001/\",\"peers\":[{\"url\":\"http://localhost:9006/\"},{\"url\":\"http://localhost:9005/\"}],\"keys\":[{\"key\":\"<KEY>\",\"url\":\"http://localhost:9001/\"},{\"key\":\"<KEY>\",\"url\":\"http://localhost:9002/\"}]}";
NodeInfo partyInfo =
NodeInfo.Builder.create()
.withUrl("http://localhost:9001/")
.withRecipients(
List.of(
com.quorum.tessera.partyinfo.node.Recipient.of(
PublicKey.from(
Base64.getDecoder()
.decode("<KEY>)),
"http://localhost:9001/"),
com.quorum.tessera.partyinfo.node.Recipient.of(
PublicKey.from(
Base64.getDecoder()
.decode("QfeDAys9MPDs2XHExtc84jKGHxZg/aj52DTh0vtA3Xc=")),
"http://localhost:9002/")))
.build();
when(discovery.getCurrent()).thenReturn(partyInfo);
when(partyStore.getParties())
.thenReturn(
Set.of(URI.create("http://localhost:9006/"), URI.create("http://localhost:9005/")));
final Response response = partyInfoResource.getPartyInfo();
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
final String output = response.getEntity().toString();
final JsonReader expected = Json.createReader(new StringReader(partyInfoJson));
final JsonReader actual = Json.createReader(new StringReader(output));
JsonObject expectedJsonObject = expected.readObject();
JsonObject actualJsonObject = actual.readObject();
assertThat(actualJsonObject.getJsonArray("keys"))
.containsExactlyInAnyOrderElementsOf(expectedJsonObject.getJsonArray("keys"));
assertThat(actualJsonObject.getJsonArray("peers"))
.containsExactlyInAnyOrderElementsOf(expectedJsonObject.getJsonArray("peers"));
assertThat(actualJsonObject.getString("url")).isEqualTo(expectedJsonObject.getString("url"));
verify(discovery).getCurrent();
}
@Test
public void partyInfo() {
String url = "http://www.bogus.com";
PublicKey myKey = PublicKey.from("myKey".getBytes());
PublicKey recipientKey = PublicKey.from("recipientKey".getBytes());
String message = "I love sparrows";
byte[] payload = message.getBytes();
Recipient recipient = Recipient.of(recipientKey, url);
Set<Recipient> recipientList = Collections.singleton(recipient);
PartyInfo partyInfo = new PartyInfo(url, recipientList, Collections.emptySet());
when(partyInfoParser.from(payload)).thenReturn(partyInfo);
when(enclave.defaultPublicKey()).thenReturn(myKey);
when(partyInfoParser.to(partyInfo)).thenReturn(payload);
EncodedPayload encodedPayload = mock(EncodedPayload.class);
List<String> uuidList = new ArrayList<>();
doAnswer(
(invocation) -> {
byte[] d = invocation.getArgument(0);
uuidList.add(new String(d));
return encodedPayload;
})
.when(enclave)
.encryptPayload(
any(byte[].class), any(PublicKey.class), anyList(), any(PrivacyMetadata.class));
when(payloadEncoder.encode(encodedPayload)).thenReturn(payload);
WebTarget webTarget = mock(WebTarget.class);
when(restClient.target(url)).thenReturn(webTarget);
when(webTarget.path(anyString())).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
when(webTarget.request()).thenReturn(invocationBuilder);
Response response = mock(Response.class);
when(response.getStatus()).thenReturn(200);
doAnswer((invocation) -> uuidList.get(0)).when(response).readEntity(String.class);
when(invocationBuilder.post(any(Entity.class))).thenReturn(response);
Response result = partyInfoResource.partyInfo(payload, List.of("v1,v2"));
assertThat(result.getStatus()).isEqualTo(200);
verify(partyInfoParser).from(payload);
verify(enclave).defaultPublicKey();
verify(enclave)
.encryptPayload(
any(byte[].class), any(PublicKey.class), anyList(), any(PrivacyMetadata.class));
verify(payloadEncoder).encode(encodedPayload);
verify(restClient).target(url);
ArgumentCaptor<NodeInfo> argCaptor = ArgumentCaptor.forClass(NodeInfo.class);
verify(discovery).onUpdate(argCaptor.capture());
final NodeInfo nodeInfo = argCaptor.getValue();
assertThat(nodeInfo).isNotNull();
assertThat(nodeInfo.getUrl()).isEqualTo(url);
assertThat(nodeInfo.supportedApiVersions()).containsExactlyInAnyOrder("v1", "v2");
}
@Test
public void validate() {
String message = UUID.randomUUID().toString();
byte[] payload = message.getBytes();
PublicKey myKey = PublicKey.from("myKey".getBytes());
EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getRecipientKeys()).thenReturn(Collections.singletonList(myKey));
when(payloadEncoder.decode(payload)).thenReturn(encodedPayload);
when(enclave.unencryptTransaction(encodedPayload, myKey)).thenReturn(message.getBytes());
Response result = partyInfoResource.validate(payload);
assertThat(result.getStatus()).isEqualTo(200);
assertThat(result.getEntity()).isEqualTo(message);
verify(payloadEncoder).decode(payload);
verify(enclave).unencryptTransaction(encodedPayload, myKey);
}
@Test
public void validateReturns400IfMessageIsNotUUID() {
String message = "I love sparrows";
byte[] payload = message.getBytes();
PublicKey myKey = PublicKey.from("myKey".getBytes());
EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getRecipientKeys()).thenReturn(Collections.singletonList(myKey));
when(payloadEncoder.decode(payload)).thenReturn(encodedPayload);
when(enclave.unencryptTransaction(encodedPayload, myKey)).thenReturn(message.getBytes());
Response result = partyInfoResource.validate(payload);
assertThat(result.getStatus()).isEqualTo(400);
assertThat(result.getEntity()).isNull();
verify(payloadEncoder).decode(payload);
verify(enclave).unencryptTransaction(encodedPayload, myKey);
}
@Test
public void constructWithMinimalArgs() {
PartyInfoResource instance =
new PartyInfoResource(discovery, partyInfoParser, restClient, enclave, true);
assertThat(instance).isNotNull();
}
@Test
public void partyInfoExceptionIfValidationFailsWith200() {
final int validateResponseCode = 200;
final String validateResponseMsg = "BADRESPONSE";
String url = "http://www.bogus.com";
PublicKey myKey = PublicKey.from("myKey".getBytes());
PublicKey recipientKey = PublicKey.from("recipientKey".getBytes());
String message = "I love sparrows";
byte[] payload = message.getBytes();
Recipient recipient = Recipient.of(recipientKey, url);
Set<Recipient> recipientList = Collections.singleton(recipient);
PartyInfo partyInfo = new PartyInfo(url, recipientList, Collections.emptySet());
when(partyInfoParser.from(payload)).thenReturn(partyInfo);
when(enclave.defaultPublicKey()).thenReturn(myKey);
when(partyInfoParser.to(partyInfo)).thenReturn(payload);
EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(enclave.encryptPayload(
any(byte[].class), any(PublicKey.class), anyList(), any(PrivacyMetadata.class)))
.thenReturn(encodedPayload);
when(payloadEncoder.encode(encodedPayload)).thenReturn(payload);
WebTarget webTarget = mock(WebTarget.class);
when(restClient.target(url)).thenReturn(webTarget);
when(webTarget.path(anyString())).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
when(webTarget.request()).thenReturn(invocationBuilder);
Response response = mock(Response.class);
when(response.getStatus()).thenReturn(validateResponseCode);
doAnswer((invocation) -> validateResponseMsg).when(response).readEntity(String.class);
when(invocationBuilder.post(any(Entity.class))).thenReturn(response);
try {
partyInfoResource.partyInfo(payload, Collections.emptyList());
failBecauseExceptionWasNotThrown(SecurityException.class);
} catch (SecurityException ex) {
verify(partyInfoParser).from(payload);
verify(enclave).defaultPublicKey();
verify(enclave)
.encryptPayload(
any(byte[].class), any(PublicKey.class), anyList(), any(PrivacyMetadata.class));
verify(payloadEncoder).encode(encodedPayload);
verify(restClient).target(url);
}
}
@Test
public void partyInfoExceptionIfValidationFailsWith400() {
final int validateResponseCode = 400;
final String validateResponseMsg = null;
String url = "http://www.bogus.com";
PublicKey myKey = PublicKey.from("myKey".getBytes());
PublicKey recipientKey = PublicKey.from("recipientKey".getBytes());
String message = "I love sparrows";
byte[] payload = message.getBytes();
Recipient recipient = Recipient.of(recipientKey, url);
Set<Recipient> recipientList = Collections.singleton(recipient);
PartyInfo partyInfo = new PartyInfo(url, recipientList, Collections.emptySet());
when(partyInfoParser.from(payload)).thenReturn(partyInfo);
when(enclave.defaultPublicKey()).thenReturn(myKey);
when(partyInfoParser.to(partyInfo)).thenReturn(payload);
EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(enclave.encryptPayload(
any(byte[].class), any(PublicKey.class), anyList(), any(PrivacyMetadata.class)))
.thenReturn(encodedPayload);
when(payloadEncoder.encode(encodedPayload)).thenReturn(payload);
WebTarget webTarget = mock(WebTarget.class);
when(restClient.target(url)).thenReturn(webTarget);
when(webTarget.path(anyString())).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
when(webTarget.request()).thenReturn(invocationBuilder);
Response response = mock(Response.class);
when(response.getStatus()).thenReturn(validateResponseCode);
doAnswer((invocation) -> validateResponseMsg).when(response).readEntity(String.class);
when(invocationBuilder.post(any(Entity.class))).thenReturn(response);
try {
partyInfoResource.partyInfo(payload, List.of("v1", "v2"));
failBecauseExceptionWasNotThrown(SecurityException.class);
} catch (SecurityException ex) {
verify(partyInfoParser).from(payload);
verify(enclave).defaultPublicKey();
verify(enclave)
.encryptPayload(
any(byte[].class), any(PublicKey.class), anyList(), any(PrivacyMetadata.class));
verify(payloadEncoder).encode(encodedPayload);
verify(restClient).target(url);
}
}
@Test
public void partyInfoValidateThrowsException() {
String url = "http://www.bogus.com";
PublicKey myKey = PublicKey.from("myKey".getBytes());
PublicKey recipientKey = PublicKey.from("recipientKey".getBytes());
String message = "I love sparrows";
byte[] payload = message.getBytes();
Recipient recipient = Recipient.of(recipientKey, url);
Set<Recipient> recipientList = Collections.singleton(recipient);
PartyInfo partyInfo = new PartyInfo(url, recipientList, Collections.emptySet());
when(partyInfoParser.from(payload)).thenReturn(partyInfo);
when(enclave.defaultPublicKey()).thenReturn(myKey);
when(partyInfoParser.to(partyInfo)).thenReturn(payload);
EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(enclave.encryptPayload(
any(byte[].class), any(PublicKey.class), anyList(), any(PrivacyMetadata.class)))
.thenReturn(encodedPayload);
when(payloadEncoder.encode(encodedPayload)).thenReturn(payload);
WebTarget webTarget = mock(WebTarget.class);
when(restClient.target(url)).thenReturn(webTarget);
when(webTarget.path(anyString())).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
when(webTarget.request()).thenReturn(invocationBuilder);
when(invocationBuilder.post(any(Entity.class)))
.thenThrow(new UncheckedIOException(new IOException("GURU meditation")));
try {
partyInfoResource.partyInfo(payload, null);
failBecauseExceptionWasNotThrown(SecurityException.class);
} catch (SecurityException ex) {
verify(partyInfoParser).from(payload);
verify(enclave).defaultPublicKey();
verify(enclave)
.encryptPayload(
any(byte[].class), any(PublicKey.class), anyList(), any(PrivacyMetadata.class));
verify(payloadEncoder).encode(encodedPayload);
verify(restClient).target(url);
}
}
@Test
public void validationDisabledPassesAllKeysToStore() {
this.partyInfoResource =
new PartyInfoResource(
discovery, partyInfoParser, restClient, enclave, payloadEncoder, false, partyStore);
final byte[] payload = "Test message".getBytes();
final String url = "http://www.bogus.com";
final String otherurl = "http://www.randomaddress.com";
final PublicKey recipientKey = PublicKey.from("recipientKey".getBytes());
final Set<Recipient> recipientList =
new HashSet<>(
Arrays.asList(Recipient.of(recipientKey, url), Recipient.of(recipientKey, otherurl)));
final PartyInfo partyInfo = new PartyInfo(url, recipientList, Collections.emptySet());
final NodeInfo nodeInfo = NodeInfoUtil.from(partyInfo, null);
final ArgumentCaptor<PartyInfo> captor = ArgumentCaptor.forClass(PartyInfo.class);
final byte[] serialisedData = "SERIALISED".getBytes();
when(partyInfoParser.from(payload)).thenReturn(partyInfo);
when(discovery.getCurrent()).thenReturn(nodeInfo);
when(partyInfoParser.to(captor.capture())).thenReturn(serialisedData);
final Response callResponse = partyInfoResource.partyInfo(payload, null);
final byte[] data = (byte[]) callResponse.getEntity();
assertThat(captor.getValue().getUrl()).isEqualTo(url);
assertThat(captor.getValue().getRecipients()).isEmpty();
assertThat(captor.getValue().getParties()).isEmpty();
assertThat(new String(data)).isEqualTo("SERIALISED");
verify(partyInfoParser).from(payload);
verify(partyInfoParser).to(any(PartyInfo.class));
final ArgumentCaptor<NodeInfo> modifiedPartyInfoCaptor =
ArgumentCaptor.forClass(NodeInfo.class);
verify(discovery).onUpdate(modifiedPartyInfoCaptor.capture());
final NodeInfo modified = modifiedPartyInfoCaptor.getValue();
assertThat(modified.getUrl()).isEqualTo(url);
Set<com.quorum.tessera.partyinfo.node.Recipient> updatedRecipients = modified.getRecipients();
assertThat(updatedRecipients)
.containsExactlyInAnyOrder(
com.quorum.tessera.partyinfo.node.Recipient.of(recipientKey, url),
com.quorum.tessera.partyinfo.node.Recipient.of(recipientKey, otherurl));
verify(discovery).getCurrent();
}
@Test
public void partyInfoValidationEncryptsUniqueDataForEachKey() {
String url = "http://bogus";
Set<Party> parties = Collections.emptySet();
Set<Recipient> recipients = new HashSet<>();
recipients.add(Recipient.of(mock(PublicKey.class), url));
recipients.add(Recipient.of(mock(PublicKey.class), url));
PartyInfo partyInfo = new PartyInfo(url, recipients, parties);
byte[] payload = new byte[] {};
when(partyInfoParser.from(payload)).thenReturn(partyInfo);
when(enclave.defaultPublicKey()).thenReturn(PublicKey.from("defaultKey".getBytes()));
EncodedPayload encodedPayload = mock(EncodedPayload.class);
List<String> uuidList = new ArrayList<>();
doAnswer(
(invocation) -> {
byte[] d = invocation.getArgument(0);
uuidList.add(new String(d));
return encodedPayload;
})
.when(enclave)
.encryptPayload(
any(byte[].class), any(PublicKey.class), anyList(), any(PrivacyMetadata.class));
when(payloadEncoder.encode(any(EncodedPayload.class))).thenReturn("somedata".getBytes());
WebTarget webTarget = mock(WebTarget.class);
when(restClient.target(url)).thenReturn(webTarget);
when(webTarget.path(anyString())).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
when(webTarget.request()).thenReturn(invocationBuilder);
Response response = mock(Response.class);
when(invocationBuilder.post(any(Entity.class))).thenReturn(response);
when(response.getStatus()).thenReturn(200);
when(response.getEntity()).thenReturn("");
doAnswer(
new Answer() {
private int i = 0;
public Object answer(InvocationOnMock invocation) {
String result = uuidList.get(i);
i++;
return result;
}
})
.when(response)
.readEntity(String.class);
// the test
partyInfoResource.partyInfo(payload, null);
ArgumentCaptor<byte[]> uuidCaptor = ArgumentCaptor.forClass(byte[].class);
verify(enclave, times(2))
.encryptPayload(
uuidCaptor.capture(), any(PublicKey.class), anyList(), any(PrivacyMetadata.class));
List<byte[]> capturedUUIDs = uuidCaptor.getAllValues();
assertThat(capturedUUIDs).hasSize(2);
assertThat(capturedUUIDs.get(0)).isNotEqualTo(capturedUUIDs.get(1));
// other verifications
verify(discovery).onUpdate(any(NodeInfo.class));
verify(partyInfoParser).from(payload);
verify(enclave).defaultPublicKey();
verify(payloadEncoder, times(2)).encode(encodedPayload);
verify(restClient, times(2)).target(url);
}
}
<|start_filename|>config/src/test/java/com/quorum/tessera/config/constraints/UrlValidatorTest.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import jakarta.validation.ConstraintValidatorContext;
import org.junit.Before;
import org.junit.Test;
public class UrlValidatorTest {
private UrlValidator urlValidator;
@Before
public void setUp() {
urlValidator = new UrlValidator();
}
@Test
public void valid() {
final ConstraintValidatorContext context = mock(ConstraintValidatorContext.class);
final boolean result = urlValidator.isValid("http://validurl:8080", context);
assertThat(result).isTrue();
verifyNoMoreInteractions(context);
}
@Test
public void invalid() {
final ConstraintValidatorContext context = mock(ConstraintValidatorContext.class);
final ConstraintValidatorContext.ConstraintViolationBuilder builder =
mock(ConstraintValidatorContext.ConstraintViolationBuilder.class);
when(context.buildConstraintViolationWithTemplate(anyString())).thenReturn(builder);
final boolean result = urlValidator.isValid("invalidurl", context);
assertThat(result).isFalse();
verify(context).disableDefaultConstraintViolation();
verify(context).buildConstraintViolationWithTemplate("Invalid URL: no protocol: invalidurl");
verifyNoMoreInteractions(context);
}
}
<|start_filename|>enclave/enclave-api/src/test/java/com/quorum/tessera/enclave/EncodedPayloadBuilderTest.java<|end_filename|>
package com.quorum.tessera.enclave;
import static java.util.Collections.singletonMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import com.quorum.tessera.encryption.Nonce;
import com.quorum.tessera.encryption.PublicKey;
import java.util.*;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class EncodedPayloadBuilderTest {
private final String sampleTxHash =
"bfMIqWJ/QGQhkK4USxMBxduzfgo/SIGoCros5bWYfPKUBinlAUCqLVOUAP9q+BgLlsWni1M6rnzfmaqSw2J5hQ==";
final PublicKey senderKey = PublicKey.from("SENDER_KEY".getBytes());
final PublicKey recipientKey = PublicKey.from("RECIPIENT_KEY".getBytes());
final byte[] cipherText = "cipherText".getBytes();
final byte[] cipherTextNonce = "cipherTextNonce".getBytes();
final byte[] recipientNonce = "recipientNonce".getBytes();
final byte[] recipientBox = "recipientBox".getBytes();
final Map<TxHash, byte[]> affectedContractTransactionsRaw =
Map.of(new TxHash(sampleTxHash), "transaction".getBytes());
final byte[] execHash = "execHash".getBytes();
@Test
public void build() {
final EncodedPayload sample =
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBox(recipientBox)
.withRecipientNonce(recipientNonce)
.withPrivacyFlag(3)
.withAffectedContractTransactions(affectedContractTransactionsRaw)
.withExecHash(execHash)
.withRecipientKey(recipientKey)
.build();
assertThat(sample.getSenderKey()).isEqualTo(senderKey);
assertThat(sample.getCipherText()).isEqualTo("cipherText".getBytes());
assertThat(sample.getCipherTextNonce().getNonceBytes()).isEqualTo(cipherTextNonce);
assertThat(sample.getRecipientNonce().getNonceBytes()).isEqualTo(recipientNonce);
assertThat(sample.getRecipientBoxes())
.hasSize(1)
.containsExactlyInAnyOrder(RecipientBox.from(recipientBox));
assertThat(sample.getRecipientKeys()).hasSize(1).containsExactlyInAnyOrder(recipientKey);
assertThat(sample.getAffectedContractTransactions()).hasSize(1);
assertThat(sample.getAffectedContractTransactions().keySet())
.containsExactly(new TxHash(sampleTxHash));
assertThat(sample.getExecHash()).isEqualTo(execHash);
assertThat(sample.getPrivacyMode()).isEqualTo(PrivacyMode.PRIVATE_STATE_VALIDATION);
byte[] otherRecipientBox = "OTHETRBIX".getBytes();
EncodedPayload fromSample =
EncodedPayload.Builder.from(sample).withRecipientBox(otherRecipientBox).build();
assertThat(fromSample.getRecipientBoxes())
.containsExactly(RecipientBox.from(recipientBox), RecipientBox.from(otherRecipientBox));
}
@Test
public void withNewKeysReplacedOld() {
final EncodedPayload sample =
EncodedPayload.Builder.create().withRecipientKey(recipientKey).build();
assertThat(sample.getRecipientKeys()).containsExactly(recipientKey);
final PublicKey replacementKey = PublicKey.from("replacement".getBytes());
final EncodedPayload updatedPayload =
EncodedPayload.Builder.from(sample).withNewRecipientKeys(List.of(replacementKey)).build();
assertThat(updatedPayload.getRecipientKeys()).containsExactly(replacementKey);
}
@Test
public void fromPSV() {
final EncodedPayload sample =
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBoxes(List.of(recipientBox))
.withRecipientNonce(recipientNonce)
.withRecipientKeys(List.of(recipientKey))
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withExecHash(execHash)
.build();
EncodedPayload result = EncodedPayload.Builder.from(sample).build();
assertThat(result).isNotSameAs(sample).isEqualTo(sample);
EqualsVerifier.forClass(EncodedPayload.class)
.withIgnoredFields("affectedContractTransactions")
.usingGetClass()
.verify();
}
@Test
public void fromMR() {
final EncodedPayload sample =
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBoxes(List.of(recipientBox))
.withRecipientNonce(recipientNonce)
.withRecipientKeys(List.of(recipientKey))
.withPrivacyMode(PrivacyMode.MANDATORY_RECIPIENTS)
.withMandatoryRecipients(Set.of(recipientKey))
.build();
EncodedPayload result = EncodedPayload.Builder.from(sample).build();
assertThat(result).isNotSameAs(sample).isEqualTo(sample);
}
@Test
public void withPrivacyGroupId() {
final EncodedPayload sample =
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBox(recipientBox)
.withRecipientNonce(recipientNonce)
.withPrivacyFlag(3)
.withAffectedContractTransactions(affectedContractTransactionsRaw)
.withExecHash(execHash)
.withRecipientKey(recipientKey)
.withPrivacyGroupId(PrivacyGroup.Id.fromBytes("PRIVACYGROUPID".getBytes()))
.build();
final EncodedPayload result = EncodedPayload.Builder.from(sample).build();
assertThat(result).isNotSameAs(sample).isEqualTo(sample);
EqualsVerifier.forClass(EncodedPayload.class)
.withIgnoredFields("affectedContractTransactions")
.usingGetClass()
.verify();
assertThat(result.getPrivacyGroupId()).isPresent();
assertThat(result.getPrivacyGroupId().get())
.isEqualTo(PrivacyGroup.Id.fromBytes("PRIVACYGROUPID".getBytes()));
}
@Test(expected = RuntimeException.class)
public void nonPSVButExecHashPresent() {
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBox(recipientBox)
.withRecipientNonce(recipientNonce)
.withPrivacyFlag(1)
.withAffectedContractTransactions(affectedContractTransactionsRaw)
.withExecHash(execHash)
.withRecipientKey(recipientKey)
.withPrivacyGroupId(PrivacyGroup.Id.fromBytes("PRIVACYGROUPID".getBytes()))
.build();
}
@Test(expected = RuntimeException.class)
public void psvTxWithoutExecHash() {
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBox(recipientBox)
.withRecipientNonce(recipientNonce)
.withPrivacyFlag(3)
.withAffectedContractTransactions(affectedContractTransactionsRaw)
.withRecipientKey(recipientKey)
.withPrivacyGroupId(PrivacyGroup.Id.fromBytes("PRIVACYGROUPID".getBytes()))
.build();
}
@Test(expected = RuntimeException.class)
public void mandatoryRecipientsInvalid() {
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBox(recipientBox)
.withRecipientNonce(recipientNonce)
.withPrivacyFlag(1)
.withMandatoryRecipients(Set.of(PublicKey.from("KEY1".getBytes())))
.build();
}
@Test(expected = RuntimeException.class)
public void mandatoryRecipientsInvalidNoData() {
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBox(recipientBox)
.withRecipientNonce(recipientNonce)
.withPrivacyFlag(2)
.build();
}
@Test
public void encodeForSpecificRecipientNoPsv() {
final PublicKey key1 = mock(PublicKey.class);
final PublicKey key2 = mock(PublicKey.class);
final PublicKey key3 = mock(PublicKey.class);
final byte[] box1 = "box1".getBytes();
final byte[] box2 = "box2".getBytes();
final byte[] box3 = "box3".getBytes();
final EncodedPayload original =
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBoxes(List.of(box1, box2, box3))
.withRecipientNonce(recipientNonce)
.withRecipientKeys(List.of(key1, key2, key3))
.build();
final EncodedPayload result = EncodedPayload.Builder.forRecipient(original, key2).build();
assertThat(result).isNotNull();
assertThat(result.getCipherText()).isEqualTo(original.getCipherText());
assertThat(result.getSenderKey()).isEqualTo(original.getSenderKey());
assertThat(result.getRecipientNonce()).isEqualTo(original.getRecipientNonce());
assertThat(result.getCipherTextNonce()).isEqualTo(original.getCipherTextNonce());
assertThat(result.getRecipientKeys()).hasSize(1).containsExactly(key2);
assertThat(result.getRecipientBoxes()).hasSize(1).containsExactly(RecipientBox.from(box2));
assertThat(result.getRecipientBoxes()).isNotEqualTo(original.getRecipientBoxes());
assertThat(result.getPrivacyMode()).isEqualTo(PrivacyMode.STANDARD_PRIVATE);
assertThat(result.getPrivacyGroupId()).isNotPresent();
}
@Test
public void encodeForSpecificRecipientWithPsv() {
final byte[] sender =
new byte[] {
5, 66, -34, 71, -62, 114, 81, 104, 98, -70, -32, -116, 83, -15, -53, 3, 68, 57, -89, 57,
24, 79, -25, 7, 32, -115, -39, 40, 23, -78, -36, 26
};
final byte[] cipherText =
new byte[] {
-46, -26, -18, 127, 37, -2, -84, -56, -71, 26, 3, 102, -61, 38, -1, 37, 105, 2, 10, 86, 6,
117, 69, 73, 91, 81, 68, 106, 23, 74, 12, 104, -63, 63, -119, 95, -16, -82, -34, 101, 89,
38, -19, 8, 23, -70, 90, 5, -7, -15, 23, -8, -88, 47, 72, 105, -103, -34, 10, 109, -48,
114, -127, -38, 41, 12, 3, 72, 113, -56, -90, -70, 124, -25, 127, 60, 100, 95, 127, 31,
-72, -101, 26, -12, -9, 108, 54, 2, 124, 22, 55, 9, 123, 54, -16, 51, 28, -25, -102, -100,
-23, 89, -15, 86, 22, -100, -63, -110, -2, -32, -1, 12, -116, 102, -43, 92, 2, 105, -78,
-73, 111, -123, -59, -118, -32, 47, -63, 41, 72, -72, 35, -68, 45, 77, 110, -24, -113,
-106, -31, -42, 13, -123, 54, 45, 83, -38, -57, 116, 107, -84, 22, -30, -49, 84, 39, 17,
-20, -75, -122, -6, 73, -61, 70, -53, -65, -22, 13, 23, 43, -101, 23, 16, 31, -1, -19, -8,
-94, -119, -28, -127, -101, 43, 31, -28, 16, -78, -86, 47, 42, 21, 115, 127, -81, 44, -33,
-12, -74, -77, 111, 0, 121, 70, 67, 81, 74, 90, 116, -14, -75, 82, -110, -119, -23, 84,
74, 61, -31, -66, -71, -106, 60, 127, -113, -26, 73, -50, -112, -45, 82, 37, -68, -49, 40,
-73, -53, 85, -71, 82, 32, 117, 25, -81, -13, -30, -48, -118, -82, 125, -63, 1, -46, -115,
-104, 32, 2, -1, -124, -88, -20, -77, 108, 123, 41, 78, 108, -88, 65, 84, 66, -40, 79,
-118, 63, -109, -85, -52, 8, -97, -49, 87, -27, -63, 75, -45, 51, 7, 116, -68, 16, 89, 53,
14, -121, 53, 38, -16, 122, -47, -110, -19, 72, 102, -81, 13, 13, -28, -103, 39, -26, 36,
-15, -61, -91, -64, -99, 118, -34, -45, -119, 33, 57, 92, 119, 95, -17, 19, 50, 46, -119,
88, -123, -49, -68, -105, 74, -15, 102, 74, -19, 29, 75, -114, -34, -54, -6, 111, 122, 2,
55, 99, 58, -31, 123, 50, -84, -128, 71, 79, 19, -40, 92, 7, 75, -31, -113, -60, -8, 121,
105, 91, -127, 69, 106, -49, -13, -91, -34
};
final byte[] nonce =
new byte[] {
-114, -128, 47, 49, 6, -71, -111, -76, -100, -16, 113, -126, 3, 107, 55, 1, 43, -6, -43,
-104, -128, -125, -37, 31
};
final byte[] recipientNonce =
new byte[] {
-110, 45, 44, -76, 17, 23, -76, 0, -75, 112, 70, 97, 108, -70, -76, 32, 100, -46, -67,
107, -89, 98, 64, -85
};
final PublicKey recipient1 = PublicKey.from("recipient".getBytes());
final PublicKey recipient2 = PublicKey.from("anotherRecipient".getBytes());
List<PublicKey> recipientList = new ArrayList<>();
recipientList.add(recipient1);
recipientList.add(recipient2);
List<byte[]> recipientBoxes = new ArrayList<>();
recipientBoxes.add("box".getBytes());
recipientBoxes.add("anotherBox".getBytes());
final PrivacyGroup.Id groupId = PrivacyGroup.Id.fromBytes("group".getBytes());
final EncodedPayload originalPayload =
EncodedPayload.Builder.create()
.withSenderKey(PublicKey.from(sender))
.withCipherText(cipherText)
.withCipherTextNonce(new Nonce(nonce))
.withRecipientBoxes(recipientBoxes)
.withRecipientNonce(new Nonce(recipientNonce))
.withRecipientKeys(recipientList)
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withAffectedContractTransactions(
singletonMap(new TxHash("test".getBytes()), "test".getBytes()))
.withExecHash("execHash".getBytes())
.withPrivacyGroupId(groupId)
.build();
final EncodedPayload payload1 =
EncodedPayload.Builder.forRecipient(originalPayload, recipient1).build();
assertThat(payload1).isNotNull();
assertThat(payload1.getCipherText()).isEqualTo(originalPayload.getCipherText());
assertThat(payload1.getSenderKey()).isEqualTo(originalPayload.getSenderKey());
assertThat(payload1.getRecipientNonce()).isEqualTo(originalPayload.getRecipientNonce());
assertThat(payload1.getCipherTextNonce()).isEqualTo(originalPayload.getCipherTextNonce());
assertThat(payload1.getRecipientKeys()).hasSize(2).containsExactly(recipient1, recipient2);
assertThat(payload1.getRecipientBoxes()).isNotEqualTo(originalPayload.getRecipientBoxes());
assertThat(payload1.getRecipientBoxes())
.hasSize(1)
.containsExactly(RecipientBox.from("box".getBytes()));
assertThat(payload1.getPrivacyGroupId()).isPresent().get().isEqualTo(groupId);
final EncodedPayload payload2 =
EncodedPayload.Builder.forRecipient(originalPayload, recipient2).build();
assertThat(payload2).isNotNull();
assertThat(payload2.getCipherText()).isEqualTo(originalPayload.getCipherText());
assertThat(payload2.getSenderKey()).isEqualTo(originalPayload.getSenderKey());
assertThat(payload2.getRecipientNonce()).isEqualTo(originalPayload.getRecipientNonce());
assertThat(payload2.getCipherTextNonce()).isEqualTo(originalPayload.getCipherTextNonce());
assertThat(payload2.getRecipientKeys()).hasSize(2).containsExactly(recipient2, recipient1);
assertThat(payload2.getRecipientBoxes()).isNotEqualTo(originalPayload.getRecipientBoxes());
assertThat(payload2.getRecipientBoxes())
.hasSize(1)
.containsExactly(RecipientBox.from("anotherBox".getBytes()));
assertThat(payload1.getPrivacyGroupId()).isPresent().get().isEqualTo(groupId);
}
@Test(expected = InvalidRecipientException.class)
public void encodeForSpecificRecipientNotContainedInPayload() {
final EncodedPayload original =
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText(cipherText)
.withCipherTextNonce(cipherTextNonce)
.withRecipientBox(recipientBox)
.withRecipientNonce(recipientNonce)
.withRecipientKey(recipientKey)
.build();
final PublicKey recipientKey = mock(PublicKey.class);
EncodedPayload.Builder.forRecipient(original, recipientKey);
}
}
<|start_filename|>tessera-data/src/main/java/com/quorum/tessera/data/EntityManagerCallback.java<|end_filename|>
package com.quorum.tessera.data;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceException;
public interface EntityManagerCallback<T> {
T execute(EntityManager entityManager) throws PersistenceException;
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/keypairs/AzureVaultKeyPair.java<|end_filename|>
package com.quorum.tessera.config.keypairs;
import com.quorum.tessera.config.constraints.ValidAzureVaultKeyPair;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import jakarta.validation.constraints.Size;
import jakarta.xml.bind.annotation.XmlElement;
@ValidAzureVaultKeyPair
public class AzureVaultKeyPair implements ConfigKeyPair {
@NotNull
@XmlElement
@Pattern(
regexp = "^[0-9a-zA-Z\\-]*$",
message = "Azure Key Vault key IDs can only contain alphanumeric characters and dashes (-)")
private String publicKeyId;
@NotNull
@XmlElement
@Pattern(
regexp = "^[0-9a-zA-Z\\-]*$",
message = "Azure Key Vault key IDs can only contain alphanumeric characters and dashes (-)")
private String privateKeyId;
@XmlElement
@Size(min = 32, max = 32, message = "length must be 32 characters")
private String publicKeyVersion;
@XmlElement
@Size(min = 32, max = 32, message = "length must be 32 characters")
private String privateKeyVersion;
public AzureVaultKeyPair(
String publicKeyId, String privateKeyId, String publicKeyVersion, String privateKeyVersion) {
this.publicKeyId = publicKeyId;
this.privateKeyId = privateKeyId;
this.publicKeyVersion = publicKeyVersion;
this.privateKeyVersion = privateKeyVersion;
}
public String getPublicKeyId() {
return this.publicKeyId;
}
public String getPrivateKeyId() {
return this.privateKeyId;
}
public String getPublicKeyVersion() {
return publicKeyVersion;
}
public String getPrivateKeyVersion() {
return privateKeyVersion;
}
@Override
public String getPublicKey() {
// keys are not fetched from vault yet so return null
return null;
}
@Override
public String getPrivateKey() {
// keys are not fetched from vault yet so return null
return null;
}
@Override
public void withPassword(char[] password) {
// password not used with vault stored keys
}
@Override
public char[] getPassword() {
// no password to return
return new char[0];
}
}
<|start_filename|>tessera-data/src/main/java/com/quorum/tessera/data/staging/internal/StagingEntityDAOProvider.java<|end_filename|>
package com.quorum.tessera.data.staging.internal;
import com.quorum.tessera.config.Config;
import com.quorum.tessera.config.ConfigFactory;
import com.quorum.tessera.data.DataSourceFactory;
import com.quorum.tessera.data.staging.StagingEntityDAO;
import jakarta.persistence.EntityManagerFactory;
import jakarta.persistence.Persistence;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class StagingEntityDAOProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(StagingEntityDAOProvider.class);
public static StagingEntityDAO provider() {
LOGGER.debug("Creating StagingEntityDAO");
Config config = ConfigFactory.create().getConfig();
final DataSource dataSource = DataSourceFactory.create().create(config.getJdbcConfig());
Map properties = new HashMap();
properties.put("jakarta.persistence.nonJtaDataSource", dataSource);
properties.put(
"eclipselink.logging.logger", "org.eclipse.persistence.logging.slf4j.SLF4JLogger");
properties.put("eclipselink.logging.level", "FINE");
properties.put("eclipselink.logging.parameters", "true");
properties.put("eclipselink.logging.level.sql", "FINE");
properties.put(
"jakarta.persistence.schema-generation.database.action",
config.getJdbcConfig().isAutoCreateTables() ? "drop-and-create" : "none");
properties.put(
"eclipselink.session.customizer", "com.quorum.tessera.eclipselink.AtomicLongSequence");
LOGGER.debug("Creating EntityManagerFactory from {}", properties);
final EntityManagerFactory entityManagerFactory =
Persistence.createEntityManagerFactory("tessera-recover", properties);
LOGGER.debug("Created EntityManagerFactory from {}", properties);
StagingEntityDAO stagingEntityDAO = new StagingEntityDAOImpl(entityManagerFactory);
LOGGER.debug("Created StagingEntityDAO {}", stagingEntityDAO);
return stagingEntityDAO;
}
}
<|start_filename|>tessera-recover/src/main/java/com/quorum/tessera/recovery/workflow/PreparePayloadForRecipient.java<|end_filename|>
package com.quorum.tessera.recovery.workflow;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.encryption.PublicKey;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
public class PreparePayloadForRecipient implements BatchWorkflowAction {
@Override
public boolean execute(final BatchWorkflowContext event) {
final EncodedPayload payload = event.getEncodedPayload();
final PublicKey targetPublicKey = event.getRecipientKey();
if (!Objects.equals(payload.getSenderKey(), targetPublicKey)) {
// we are the sender, so need to format the payload for the recipient
// which is: for PSV, all recipients and one box, or just one box and one recipient
final EncodedPayload adjustedPayload =
EncodedPayload.Builder.forRecipient(payload, targetPublicKey).build();
event.setPayloadsToPublish(Set.of(adjustedPayload));
return true;
}
// the resend key is the sender of the tx, trying to rebuild its contents
// we have the keys, so just matching keys to boxes
if (!payload.getRecipientKeys().isEmpty()) {
final int numberOfBoxes = payload.getRecipientBoxes().size();
// we know the recipients, we just need to format them per recipient we have
// but only for ones we have boxes for
final Set<EncodedPayload> formattedPayloads =
payload.getRecipientKeys().stream()
.filter(key -> payload.getRecipientKeys().indexOf(key) < numberOfBoxes)
.map(key -> EncodedPayload.Builder.forRecipient(payload, key).build())
.collect(Collectors.toSet());
event.setPayloadsToPublish(formattedPayloads);
return true;
}
// We only have boxes, no recipients (pre-1.0 standard private)
// Create individual payloads with each box and search for each box's key.
final Set<EncodedPayload> formattedPayloads =
payload.getRecipientBoxes().stream()
.map(
box ->
EncodedPayload.Builder.from(payload)
.withRecipientBoxes(List.of(box.getData()))
.build())
.collect(Collectors.toSet());
event.setPayloadsToPublish(formattedPayloads);
return true;
}
}
<|start_filename|>tessera-core/src/test/java/com/quorum/tessera/transaction/resend/internal/ResendManagerImplTest.java<|end_filename|>
package com.quorum.tessera.transaction.resend.internal;
import static java.util.Collections.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.catchThrowable;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import com.quorum.tessera.data.EncryptedTransaction;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.data.MessageHash;
import com.quorum.tessera.enclave.*;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.transaction.resend.ResendManager;
import java.util.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
public class ResendManagerImplTest {
private EncryptedTransactionDAO encryptedTransactionDAO;
private Enclave enclave;
private ResendManager resendManager;
final PublicKey senderKey = PublicKey.from("SENDER".getBytes());
final byte[] cipherText = "CIPHERTEXT".getBytes();
final PublicKey recipientKey1 = PublicKey.from("RECIPIENT-KEY1".getBytes());
final RecipientBox recipientBox1 = RecipientBox.from("BOX1".getBytes());
final PublicKey recipientKey2 = PublicKey.from("RECIPIENT-KEY2".getBytes());
final RecipientBox recipientBox2 = RecipientBox.from("BOX2".getBytes());
@Before
public void init() {
this.encryptedTransactionDAO = mock(EncryptedTransactionDAO.class);
this.enclave = mock(Enclave.class);
PayloadDigest payloadDigest = cipherText -> cipherText;
this.resendManager = new ResendManagerImpl(encryptedTransactionDAO, enclave, payloadDigest);
}
@After
public void after() {
verifyNoMoreInteractions(encryptedTransactionDAO, enclave);
}
@Test
public void storePayloadAsSenderWhenTxIsNotPresent() {
final PublicKey senderKey = PublicKey.from("SENDER".getBytes());
// A legacy payload has empty recipient and box
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getCipherText()).thenReturn("CIPHERTEXT".getBytes());
when(encodedPayload.getSenderKey()).thenReturn(senderKey);
final byte[] newEncryptedMasterKey = "newbox".getBytes();
when(enclave.getPublicKeys()).thenReturn(singleton(senderKey));
when(encryptedTransactionDAO.retrieveByHash(any(MessageHash.class)))
.thenReturn(Optional.empty());
when(enclave.createNewRecipientBox(any(), any())).thenReturn(newEncryptedMasterKey);
resendManager.acceptOwnMessage(encodedPayload);
ArgumentCaptor<EncryptedTransaction> updatedTxCaptor =
ArgumentCaptor.forClass(EncryptedTransaction.class);
verify(encryptedTransactionDAO).save(updatedTxCaptor.capture());
final EncodedPayload updatedPayload = updatedTxCaptor.getValue().getPayload();
assertThat(updatedPayload).isNotNull();
// The sender was added
assertThat(updatedPayload.getRecipientKeys()).containsExactly(senderKey);
// New box was created
assertThat(updatedPayload.getRecipientBoxes())
.containsExactly(RecipientBox.from(newEncryptedMasterKey));
verify(encryptedTransactionDAO).retrieveByHash(any(MessageHash.class));
verify(enclave).getPublicKeys();
verify(enclave).createNewRecipientBox(any(), any());
verify(enclave).unencryptTransaction(encodedPayload, senderKey);
}
@Test
public void storePayloadAsSenderWhenTxIsPresent() {
final PublicKey senderKey = PublicKey.from("SENDER".getBytes());
final PublicKey recipientKey1 = PublicKey.from("RECIPIENT-KEY1".getBytes());
final RecipientBox recipientBox1 = RecipientBox.from("BOX1".getBytes());
final PublicKey recipientKey2 = PublicKey.from("RECIPIENT-KEY2".getBytes());
final RecipientBox recipientBox2 = RecipientBox.from("BOX2".getBytes());
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getCipherText()).thenReturn("CIPHERTEXT".getBytes());
when(encodedPayload.getSenderKey()).thenReturn(senderKey);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipientKey2));
when(encodedPayload.getRecipientBoxes()).thenReturn(List.of(recipientBox2));
final EncodedPayload existingEncodedPayload = mock(EncodedPayload.class);
when(existingEncodedPayload.getCipherText()).thenReturn("CIPHERTEXT".getBytes());
when(existingEncodedPayload.getSenderKey()).thenReturn(senderKey);
when(existingEncodedPayload.getRecipientKeys()).thenReturn(List.of(recipientKey1));
when(existingEncodedPayload.getRecipientBoxes()).thenReturn(List.of(recipientBox1));
final EncryptedTransaction et =
new EncryptedTransaction(mock(MessageHash.class), existingEncodedPayload);
when(enclave.getPublicKeys()).thenReturn(singleton(senderKey));
when(encryptedTransactionDAO.retrieveByHash(any(MessageHash.class)))
.thenReturn(Optional.of(et));
resendManager.acceptOwnMessage(encodedPayload);
assertThat(encodedPayload.getRecipientKeys()).containsExactly(recipientKey2);
assertThat(encodedPayload.getRecipientBoxes()).containsExactly(recipientBox2);
ArgumentCaptor<EncryptedTransaction> updatedTxCaptor =
ArgumentCaptor.forClass(EncryptedTransaction.class);
verify(encryptedTransactionDAO).update(updatedTxCaptor.capture());
final EncodedPayload updated = updatedTxCaptor.getValue().getPayload();
// Check recipients are being added
assertThat(updated.getRecipientKeys())
.hasSize(2)
.containsExactlyInAnyOrder(recipientKey1, recipientKey2);
// Check boxes are being added
assertThat(updated.getRecipientBoxes()).hasSize(2);
verify(encryptedTransactionDAO).retrieveByHash(any(MessageHash.class));
verify(enclave).getPublicKeys();
verify(enclave).unencryptTransaction(encodedPayload, senderKey);
verify(enclave).unencryptTransaction(existingEncodedPayload, senderKey);
}
@Test
public void storePayloadAsSenderWhenTxIsPresentAndPsv() {
final PublicKey senderKey = PublicKey.from("SENDER".getBytes());
final PublicKey recipientKey1 = PublicKey.from("RECIPIENT-KEY1".getBytes());
final RecipientBox recipientBox1 = RecipientBox.from("BOX1".getBytes());
final PublicKey recipientKey2 = PublicKey.from("RECIPIENT-KEY2".getBytes());
final RecipientBox recipientBox2 = RecipientBox.from("BOX2".getBytes());
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getCipherText()).thenReturn("CIPHERTEXT".getBytes());
when(encodedPayload.getSenderKey()).thenReturn(senderKey);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipientKey2, senderKey));
when(encodedPayload.getRecipientBoxes()).thenReturn(List.of(recipientBox2));
when(encodedPayload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
final EncodedPayload existingEncodedPayload = mock(EncodedPayload.class);
when(existingEncodedPayload.getCipherText()).thenReturn("CIPHERTEXT".getBytes());
when(existingEncodedPayload.getSenderKey()).thenReturn(senderKey);
when(existingEncodedPayload.getRecipientKeys()).thenReturn(List.of(recipientKey1));
when(existingEncodedPayload.getRecipientBoxes()).thenReturn(List.of(recipientBox1));
final EncryptedTransaction et =
new EncryptedTransaction(mock(MessageHash.class), existingEncodedPayload);
when(enclave.getPublicKeys()).thenReturn(singleton(senderKey));
when(encryptedTransactionDAO.retrieveByHash(any(MessageHash.class)))
.thenReturn(Optional.of(et));
resendManager.acceptOwnMessage(encodedPayload);
ArgumentCaptor<EncryptedTransaction> updatedTxCaptor =
ArgumentCaptor.forClass(EncryptedTransaction.class);
verify(encryptedTransactionDAO).update(updatedTxCaptor.capture());
final EncodedPayload updated = updatedTxCaptor.getValue().getPayload();
// Check recipients are being added
assertThat(updated.getRecipientKeys()).containsExactlyInAnyOrder(recipientKey1, recipientKey2);
// Check boxes are being added
assertThat(updated.getRecipientBoxes()).hasSize(2);
verify(encryptedTransactionDAO).retrieveByHash(any(MessageHash.class));
verify(enclave).getPublicKeys();
verify(enclave, times(2)).unencryptTransaction(any(EncodedPayload.class), eq(senderKey));
}
@Test
public void storePayloadAsSenderWhenTxIsPresentAndRecipientAlreadyExists() {
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getCipherText()).thenReturn("CIPHERTEXT".getBytes());
when(encodedPayload.getSenderKey()).thenReturn(senderKey);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipientKey2));
when(encodedPayload.getRecipientBoxes()).thenReturn(List.of(recipientBox2));
final EncodedPayload existingEncodedPayload = mock(EncodedPayload.class);
when(existingEncodedPayload.getCipherText()).thenReturn("CIPHERTEXT".getBytes());
when(existingEncodedPayload.getSenderKey()).thenReturn(senderKey);
when(existingEncodedPayload.getRecipientKeys())
.thenReturn(List.of(recipientKey1, recipientKey2));
when(existingEncodedPayload.getRecipientBoxes())
.thenReturn(List.of(recipientBox1, recipientBox2));
final EncryptedTransaction et = mock(EncryptedTransaction.class);
when(et.getPayload()).thenReturn(existingEncodedPayload);
when(enclave.getPublicKeys()).thenReturn(Set.of(senderKey));
when(encryptedTransactionDAO.retrieveByHash(any(MessageHash.class)))
.thenReturn(Optional.of(et));
resendManager.acceptOwnMessage(encodedPayload);
assertThat(encodedPayload.getRecipientKeys()).containsExactly(recipientKey2);
assertThat(encodedPayload.getRecipientBoxes()).containsExactly(recipientBox2);
verify(encryptedTransactionDAO).retrieveByHash(any(MessageHash.class));
verify(enclave).getPublicKeys();
verify(enclave).unencryptTransaction(encodedPayload, senderKey);
}
@Test
public void storePayloadAsSenderWhenTxIsPresentAndRecipientExisted() {
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getSenderKey()).thenReturn(senderKey);
when(encodedPayload.getCipherText()).thenReturn(cipherText);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipientKey1));
when(encodedPayload.getRecipientBoxes()).thenReturn(List.of(recipientBox1));
when(enclave.getPublicKeys()).thenReturn(singleton(senderKey));
final EncryptedTransaction et = mock(EncryptedTransaction.class);
when(et.getPayload()).thenReturn(encodedPayload);
when(encryptedTransactionDAO.retrieveByHash(any(MessageHash.class)))
.thenReturn(Optional.of(et));
resendManager.acceptOwnMessage(encodedPayload);
assertThat(encodedPayload.getRecipientKeys()).containsExactly(recipientKey1);
assertThat(encodedPayload.getRecipientBoxes()).containsExactly(recipientBox1);
verify(encryptedTransactionDAO).retrieveByHash(any(MessageHash.class));
verify(enclave).getPublicKeys();
verify(enclave).unencryptTransaction(encodedPayload, senderKey);
}
@Test
public void messageMustContainManagedKeyAsSender() {
final PublicKey someSender = PublicKey.from("SENDER_WHO_ISNT_US".getBytes());
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getSenderKey()).thenReturn(someSender);
when(encodedPayload.getCipherText()).thenReturn(cipherText);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipientKey1));
when(encodedPayload.getRecipientBoxes()).thenReturn(List.of(recipientBox1));
when(enclave.getPublicKeys()).thenReturn(singleton(PublicKey.from("OTHER".getBytes())));
final Throwable throwable =
catchThrowable(() -> this.resendManager.acceptOwnMessage(encodedPayload));
assertThat(throwable)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Message Q0lQSEVSVEVYVA== does not have one the nodes own keys as a sender");
verify(enclave).getPublicKeys();
verify(enclave).unencryptTransaction(encodedPayload, someSender);
}
@Test
public void invalidPayloadFromMaliciousRecipient() {
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getSenderKey()).thenReturn(senderKey);
when(encodedPayload.getCipherText()).thenReturn(cipherText);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipientKey1));
when(encodedPayload.getRecipientBoxes()).thenReturn(List.of(recipientBox1));
final EncodedPayload existingEncodedPayload = mock(EncodedPayload.class);
when(existingEncodedPayload.getCipherText()).thenReturn("CIPHERTEXT".getBytes());
when(existingEncodedPayload.getSenderKey()).thenReturn(senderKey);
when(existingEncodedPayload.getRecipientKeys()).thenReturn(List.of());
when(existingEncodedPayload.getRecipientBoxes()).thenReturn(List.of());
final EncryptedTransaction et = mock(EncryptedTransaction.class);
when(et.getPayload()).thenReturn(existingEncodedPayload);
when(enclave.getPublicKeys()).thenReturn(singleton(senderKey));
when(encryptedTransactionDAO.retrieveByHash(any(MessageHash.class)))
.thenReturn(Optional.of(et));
when(enclave.unencryptTransaction(existingEncodedPayload, senderKey))
.thenReturn("payload1".getBytes());
final Throwable throwable =
catchThrowable(() -> resendManager.acceptOwnMessage(encodedPayload));
assertThat(throwable)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid payload provided");
verify(encryptedTransactionDAO).retrieveByHash(any(MessageHash.class));
verify(enclave).getPublicKeys();
verify(enclave).unencryptTransaction(encodedPayload, senderKey);
verify(enclave).unencryptTransaction(existingEncodedPayload, senderKey);
}
@Test
public void constructWithMinimalArgs() {
assertThat(new ResendManagerImpl(encryptedTransactionDAO, enclave, mock(PayloadDigest.class)))
.isNotNull();
}
}
<|start_filename|>tessera-data/src/main/java/com/quorum/tessera/data/staging/StagingTransactionListener.java<|end_filename|>
package com.quorum.tessera.data.staging;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import jakarta.persistence.PostLoad;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class StagingTransactionListener {
private static final Logger LOGGER = LoggerFactory.getLogger(StagingTransactionListener.class);
@PostLoad
public void onLoad(StagingTransaction stagingTransaction) {
LOGGER.debug("onLoad[{}]", stagingTransaction);
final EncodedPayloadCodec encodedPayloadCodec = stagingTransaction.getEncodedPayloadCodec();
final byte[] encodedPayloadData = stagingTransaction.getPayload();
final PayloadEncoder payloadEncoder = PayloadEncoder.create(encodedPayloadCodec);
final EncodedPayload encodedPayload = payloadEncoder.decode(encodedPayloadData);
stagingTransaction.setEncodedPayload(encodedPayload);
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/test/java/com/quorum/tessera/p2p/PrivacyGroupResourceTest.java<|end_filename|>
package com.quorum.tessera.p2p;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.privacygroup.PrivacyGroupManager;
import jakarta.ws.rs.core.Response;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class PrivacyGroupResourceTest {
private PrivacyGroupManager privacyGroupManager;
private PrivacyGroupResource privacyGroupResource;
@Before
public void beforeTest() throws Exception {
privacyGroupManager = mock(PrivacyGroupManager.class);
privacyGroupResource = new PrivacyGroupResource(privacyGroupManager);
}
@After
public void afterTest() throws Exception {
verifyNoMoreInteractions(privacyGroupManager);
}
@Test
public void testStorePrivacyGroup() {
doNothing().when(privacyGroupManager).storePrivacyGroup("encoded".getBytes());
final Response response = privacyGroupResource.storePrivacyGroup("encoded".getBytes());
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
verify(privacyGroupManager).storePrivacyGroup("encoded".getBytes());
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/test/java/com/quorum/tessera/p2p/resend/RestResendClientTest.java<|end_filename|>
package com.quorum.tessera.p2p.resend;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.client.Invocation;
import jakarta.ws.rs.client.WebTarget;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class RestResendClientTest {
private Response.Status expectedResponseStatus;
public RestResendClientTest(Response.Status expectedResponseStatus) {
this.expectedResponseStatus = expectedResponseStatus;
}
@Test
public void makeResendRequest() {
try (var entityMockedStatic = mockStatic(Entity.class)) {
Entity<ResendRequest> outboundEntity = mock(Entity.class);
ResendRequest resendRequest = mock(ResendRequest.class);
entityMockedStatic
.when(() -> Entity.entity(resendRequest, MediaType.APPLICATION_JSON))
.thenReturn(outboundEntity);
String targetUrl = "targetUrl";
Client client = mock(Client.class);
WebTarget webTarget = mock(WebTarget.class);
when(client.target(targetUrl)).thenReturn(webTarget);
when(webTarget.path("/resend")).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
when(webTarget.request()).thenReturn(invocationBuilder);
Response response = mock(Response.class);
when(response.getStatus()).thenReturn(expectedResponseStatus.getStatusCode());
when(invocationBuilder.post(outboundEntity)).thenReturn(response);
RestResendClient restResendClient = new RestResendClient(client);
boolean outcome = restResendClient.makeResendRequest(targetUrl, resendRequest);
if (expectedResponseStatus == Response.Status.OK) {
assertThat(outcome).isTrue();
} else {
assertThat(outcome).isFalse();
}
entityMockedStatic.verify(() -> Entity.entity(resendRequest, MediaType.APPLICATION_JSON));
entityMockedStatic.verifyNoMoreInteractions();
verify(client).target(targetUrl);
verify(webTarget).path("/resend");
verify(webTarget).request();
verify(invocationBuilder).post(outboundEntity);
verifyNoMoreInteractions(outboundEntity, resendRequest, client, webTarget, invocationBuilder);
}
}
@Parameterized.Parameters(name = "ResponseStatus {0}")
public static Collection<Response.Status> statuses() {
return Arrays.asList(Response.Status.values());
}
}
<|start_filename|>tessera-jaxrs/transaction-jaxrs/src/main/java/com/quorum/tessera/q2t/internal/RestPayloadPublisher.java<|end_filename|>
package com.quorum.tessera.q2t.internal;
import com.quorum.tessera.discovery.Discovery;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.enclave.PrivacyMode;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.partyinfo.node.NodeInfo;
import com.quorum.tessera.transaction.exception.EnhancedPrivacyNotSupportedException;
import com.quorum.tessera.transaction.exception.MandatoryRecipientsNotSupportedException;
import com.quorum.tessera.transaction.publish.NodeOfflineException;
import com.quorum.tessera.transaction.publish.PayloadPublisher;
import com.quorum.tessera.transaction.publish.PublishPayloadException;
import com.quorum.tessera.version.EnhancedPrivacyVersion;
import com.quorum.tessera.version.MandatoryRecipientsVersion;
import jakarta.ws.rs.ProcessingException;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.net.URI;
import java.util.Objects;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RestPayloadPublisher implements PayloadPublisher {
private static final Logger LOGGER = LoggerFactory.getLogger(RestPayloadPublisher.class);
private final Client client;
private final Discovery discovery;
RestPayloadPublisher(Client client, Discovery discovery) {
this.client = Objects.requireNonNull(client);
this.discovery = Objects.requireNonNull(discovery);
}
@Override
public void publishPayload(EncodedPayload payload, PublicKey recipientKey) {
final NodeInfo remoteNodeInfo = discovery.getRemoteNodeInfo(recipientKey);
final Set<String> supportedApiVersions = remoteNodeInfo.supportedApiVersions();
final EncodedPayloadCodec preferredCodec =
EncodedPayloadCodec.getPreferredCodec(supportedApiVersions);
final PayloadEncoder payloadEncoder = PayloadEncoder.create(preferredCodec);
if (PrivacyMode.STANDARD_PRIVATE != payload.getPrivacyMode()
&& !supportedApiVersions.contains(EnhancedPrivacyVersion.API_VERSION_2)) {
throw new EnhancedPrivacyNotSupportedException(
"Transactions with enhanced privacy is not currently supported on recipient "
+ recipientKey.encodeToBase64());
}
if (PrivacyMode.MANDATORY_RECIPIENTS == payload.getPrivacyMode()
&& !supportedApiVersions.contains(MandatoryRecipientsVersion.API_VERSION_4)) {
throw new MandatoryRecipientsNotSupportedException(
"Transactions with mandatory recipients are not currently supported on recipient "
+ recipientKey.encodeToBase64());
}
final String targetUrl = remoteNodeInfo.getUrl();
LOGGER.info("Publishing message to {}", targetUrl);
final byte[] encoded = payloadEncoder.encode(payload);
try (Response response =
client
.target(targetUrl)
.path("/push")
.request()
.post(Entity.entity(encoded, MediaType.APPLICATION_OCTET_STREAM_TYPE))) {
if (Response.Status.OK.getStatusCode() != response.getStatus()
&& Response.Status.CREATED.getStatusCode() != response.getStatus()) {
throw new PublishPayloadException("Unable to push payload to recipient url " + targetUrl);
}
LOGGER.info("Published to {}", targetUrl);
} catch (ProcessingException ex) {
LOGGER.debug("", ex);
throw new NodeOfflineException(URI.create(targetUrl));
}
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/test/java/com/quorum/tessera/api/exception/TransactionNotFoundExceptionMapperTest.java<|end_filename|>
package com.quorum.tessera.api.exception;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.transaction.exception.TransactionNotFoundException;
import jakarta.ws.rs.core.Response;
import org.junit.Test;
public class TransactionNotFoundExceptionMapperTest {
private TransactionNotFoundExceptionMapper instance = new TransactionNotFoundExceptionMapper();
@Test
public void toResponse() {
final TransactionNotFoundException transactionNotFoundException =
new TransactionNotFoundException("OUCH");
final Response result = instance.toResponse(transactionNotFoundException);
assertThat(result).isNotNull();
final String message = result.getEntity().toString();
assertThat(message).isEqualTo("OUCH");
assertThat(result.getStatus()).isEqualTo(404);
}
}
<|start_filename|>tessera-data/src/main/java/com/quorum/tessera/data/MessageHash.java<|end_filename|>
package com.quorum.tessera.data;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Lob;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Base64;
@Embeddable
public class MessageHash implements Serializable {
@Lob private byte[] hashBytes;
public MessageHash() {}
public MessageHash(final byte[] hashBytes) {
this.hashBytes = Arrays.copyOf(hashBytes, hashBytes.length);
}
public void setHashBytes(final byte[] hashBytes) {
this.hashBytes = Arrays.copyOf(hashBytes, hashBytes.length);
}
public byte[] getHashBytes() {
return Arrays.copyOf(hashBytes, hashBytes.length);
}
@Override
public boolean equals(final Object o) {
return (o instanceof MessageHash) && Arrays.equals(hashBytes, ((MessageHash) o).hashBytes);
}
@Override
public int hashCode() {
return Arrays.hashCode(getHashBytes());
}
// FIXME: toString is being used as a message format thing used to messages
// rather than being a string representation of the object.
@Override
public String toString() {
return Base64.getEncoder().encodeToString(hashBytes);
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/main/java/com/quorum/tessera/p2p/recovery/ResendBatchPublisherProvider.java<|end_filename|>
package com.quorum.tessera.p2p.recovery;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.recovery.resend.ResendBatchPublisher;
public class ResendBatchPublisherProvider {
public static ResendBatchPublisher provider() {
RecoveryClient client = RecoveryClient.create();
PayloadEncoder payloadEncoder = PayloadEncoder.create(EncodedPayloadCodec.LEGACY);
return new RestResendBatchPublisher(payloadEncoder, client);
}
}
<|start_filename|>tessera-data/src/test/java/com/quorum/tessera/data/staging/internal/StagingEntityDAOProviderTest.java<|end_filename|>
package com.quorum.tessera.data.staging.internal;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.config.Config;
import com.quorum.tessera.config.ConfigFactory;
import com.quorum.tessera.config.JdbcConfig;
import com.quorum.tessera.data.DataSourceFactory;
import com.quorum.tessera.data.staging.StagingEntityDAO;
import jakarta.persistence.EntityManagerFactory;
import jakarta.persistence.Persistence;
import java.util.Collection;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class StagingEntityDAOProviderTest {
private boolean autocreateTables;
public StagingEntityDAOProviderTest(boolean autocreateTables) {
this.autocreateTables = autocreateTables;
}
@Test
public void defaultConstructorForCoverage() {
assertThat(new StagingEntityDAOProvider()).isNotNull();
}
@Test
public void provider() {
try (var mockedConfigFactory = mockStatic(ConfigFactory.class);
var mockedDataSourceFactory = mockStatic(DataSourceFactory.class);
var mockedPersistence = mockStatic(Persistence.class)) {
mockedPersistence
.when(() -> Persistence.createEntityManagerFactory(anyString(), anyMap()))
.thenReturn(mock(EntityManagerFactory.class));
Config config = mock(Config.class);
JdbcConfig jdbcConfig = mock(JdbcConfig.class);
when(jdbcConfig.isAutoCreateTables()).thenReturn(autocreateTables);
when(config.getJdbcConfig()).thenReturn(jdbcConfig);
ConfigFactory configFactory = mock(ConfigFactory.class);
when(configFactory.getConfig()).thenReturn(config);
mockedConfigFactory.when(ConfigFactory::create).thenReturn(configFactory);
mockedDataSourceFactory
.when(DataSourceFactory::create)
.thenReturn(mock(DataSourceFactory.class));
StagingEntityDAO result = StagingEntityDAOProvider.provider();
assertThat(result).isNotNull().isExactlyInstanceOf(StagingEntityDAOImpl.class);
mockedPersistence.verify(() -> Persistence.createEntityManagerFactory(anyString(), anyMap()));
mockedPersistence.verifyNoMoreInteractions();
}
}
@Parameterized.Parameters
public static Collection<Boolean> autoCreateTables() {
return List.of(true, false);
}
}
<|start_filename|>argon2/src/main/java/module-info.java<|end_filename|>
module tessera.argontwo {
requires de.mkammerer.argon2;
requires tessera.shared;
requires org.slf4j;
exports com.quorum.tessera.argon2;
uses com.quorum.tessera.argon2.Argon2;
provides com.quorum.tessera.argon2.Argon2 with
com.quorum.tessera.argon2.Argon2Impl;
}
<|start_filename|>server/jersey-server/src/main/java/com/quorum/tessera/server/http/VersionHeaderDecorator.java<|end_filename|>
package com.quorum.tessera.server.http;
import static com.quorum.tessera.shared.Constants.API_VERSION_HEADER;
import com.quorum.tessera.version.ApiVersion;
import jakarta.servlet.*;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class VersionHeaderDecorator implements Filter {
private static final Logger LOGGER = LoggerFactory.getLogger(VersionHeaderDecorator.class);
@Override
public void doFilter(
ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain)
throws IOException, ServletException {
final HttpServletResponse httpServletResponse = HttpServletResponse.class.cast(servletResponse);
final HttpServletRequest httpServletRequest = HttpServletRequest.class.cast(servletRequest);
LOGGER.debug("caller uri {}", httpServletRequest.getRequestURI());
final List<String> supportedApiVersions =
Collections.list(httpServletRequest.getHeaders(API_VERSION_HEADER));
LOGGER.debug("httpServletRequest.headers[{}] {}", API_VERSION_HEADER, supportedApiVersions);
List<String> versions = ApiVersion.versions();
versions.forEach(v -> httpServletResponse.addHeader(API_VERSION_HEADER, v));
filterChain.doFilter(servletRequest, servletResponse);
}
}
<|start_filename|>tessera-jaxrs/jaxrs-client/src/main/java/com/quorum/tessera/jaxrs/client/VersionHeaderDecorator.java<|end_filename|>
package com.quorum.tessera.jaxrs.client;
import com.quorum.tessera.shared.Constants;
import com.quorum.tessera.version.ApiVersion;
import jakarta.ws.rs.client.ClientRequestContext;
import jakarta.ws.rs.client.ClientRequestFilter;
import java.io.IOException;
public class VersionHeaderDecorator implements ClientRequestFilter {
@Override
public void filter(ClientRequestContext requestContext) throws IOException {
ApiVersion.versions()
.forEach(v -> requestContext.getHeaders().add(Constants.API_VERSION_HEADER, v));
}
}
<|start_filename|>server/jersey-server/src/test/java/com/quorum/tessera/server/jersey/SampleResource.java<|end_filename|>
package com.quorum.tessera.server.jersey;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.DELETE;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.Context;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.UriInfo;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
@Singleton
@Path("/")
public class SampleResource {
private final Ping ping;
@Inject
public SampleResource(@Named("myBean") Ping ping) {
this.ping = Objects.requireNonNull(ping);
}
private Map<String, SamplePayload> store = new HashMap<>();
@Path("ping")
@GET
public String ping() {
System.out.println("PING");
return ping.ping();
}
@Produces(MediaType.APPLICATION_JSON)
@GET
@Path("find/{id}")
public Response find(@PathParam("id") String id) {
System.out.println("FIND " + id);
SamplePayload payload = store.get(id);
return Response.ok(payload, MediaType.APPLICATION_JSON).build();
}
@Consumes(MediaType.APPLICATION_JSON)
@Path("create")
@POST
public Response create(SamplePayload payload, @Context UriInfo uriInfo)
throws UnsupportedEncodingException {
System.out.println("CREATE" + payload);
String id = UUID.randomUUID().toString();
payload.setId(id);
store.put(id, payload);
URI location =
uriInfo.getBaseUriBuilder().path("find").path(URLEncoder.encode(id, "UTF-8")).build();
System.out.println("CREATE " + location);
return Response.status(Response.Status.CREATED).location(location).build();
}
@Path("{id}")
@DELETE
public Response delete(@PathParam("id") String id) {
SamplePayload deleted = store.remove(id);
return Response.ok(deleted).build();
}
}
<|start_filename|>tessera-core/src/main/java/com/quorum/tessera/transaction/exception/MandatoryRecipientsNotAvailableException.java<|end_filename|>
package com.quorum.tessera.transaction.exception;
import com.quorum.tessera.exception.TesseraException;
public class MandatoryRecipientsNotAvailableException extends TesseraException {
public MandatoryRecipientsNotAvailableException(String message) {
super(message);
}
}
<|start_filename|>tessera-data/src/main/java/module-info.java<|end_filename|>
open module tessera.data {
requires java.instrument;
requires jakarta.persistence;
requires org.bouncycastle.provider;
requires org.slf4j;
requires tessera.config;
requires tessera.enclave.api;
requires tessera.encryption.api;
requires tessera.shared;
requires java.sql;
requires java.sql.rowset;
requires com.zaxxer.hikari;
requires jakarta.validation;
requires tessera.eclipselink.utils;
// opens com.quorum.tessera.data to org.eclipse.persistence.core;
// opens com.quorum.tessera.data.staging to org.eclipse.persistence.core;
exports com.quorum.tessera.data;
exports com.quorum.tessera.data.staging;
uses com.quorum.tessera.enclave.PayloadDigest;
uses com.quorum.tessera.data.EncryptedTransactionDAO;
uses com.quorum.tessera.data.EncryptedRawTransactionDAO;
uses com.quorum.tessera.data.staging.StagingEntityDAO;
uses com.quorum.tessera.data.DataSourceFactory;
uses com.quorum.tessera.data.PrivacyGroupDAO;
provides com.quorum.tessera.data.EncryptedTransactionDAO with
com.quorum.tessera.data.internal.EncryptedTransactionDAOProvider;
provides com.quorum.tessera.data.EncryptedRawTransactionDAO with
com.quorum.tessera.data.internal.EncryptedRawTransactionDAOProvider;
provides com.quorum.tessera.data.staging.StagingEntityDAO with
com.quorum.tessera.data.staging.internal.StagingEntityDAOProvider;
provides com.quorum.tessera.data.PrivacyGroupDAO with
com.quorum.tessera.data.internal.PrivacyGroupDAOProvider;
provides com.quorum.tessera.data.DataSourceFactory with
com.quorum.tessera.data.internal.DataSourceFactoryProvider;
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/main/java/module-info.java<|end_filename|>
module tessera.partyinfo.jaxrs {
requires jakarta.json;
requires jakarta.validation;
requires jakarta.ws.rs;
requires org.slf4j;
requires tessera.config;
requires tessera.enclave.api;
requires tessera.encryption.api;
requires tessera.security;
requires tessera.shared;
requires tessera.context;
requires tessera.transaction;
requires tessera.data;
requires tessera.common.jaxrs;
requires tessera.jaxrs.client;
requires tessera.partyinfo;
requires org.apache.commons.lang3;
requires tessera.partyinfo.model;
requires tessera.recovery;
requires jakarta.xml.bind;
requires io.swagger.v3.oas.annotations;
exports com.quorum.tessera.p2p;
exports com.quorum.tessera.p2p.resend;
exports com.quorum.tessera.p2p.partyinfo;
exports com.quorum.tessera.p2p.recovery;
opens com.quorum.tessera.p2p.recovery;
// to
// org.eclipse.persistence.moxy,
// org.eclipse.persistence.core;
opens com.quorum.tessera.p2p.resend;
// to
// org.eclipse.persistence.moxy,
// org.eclipse.persistence.core,
// org.hibernate.validator;
uses com.quorum.tessera.p2p.recovery.RecoveryClient;
uses com.quorum.tessera.p2p.resend.ResendClient;
uses com.quorum.tessera.p2p.resend.TransactionRequester;
uses com.quorum.tessera.p2p.resend.ResendPartyStore;
provides com.quorum.tessera.config.apps.TesseraApp with
com.quorum.tessera.p2p.P2PRestApp;
provides com.quorum.tessera.recovery.resend.BatchTransactionRequester with
com.quorum.tessera.p2p.recovery.BatchTransactionRequesterProvider;
provides com.quorum.tessera.recovery.resend.ResendBatchPublisher with
com.quorum.tessera.p2p.recovery.ResendBatchPublisherProvider;
provides com.quorum.tessera.p2p.resend.ResendClient with
com.quorum.tessera.p2p.resend.ResendClientProvider;
provides com.quorum.tessera.p2p.recovery.RecoveryClient with
com.quorum.tessera.p2p.recovery.RecoveryClientProvider;
provides com.quorum.tessera.p2p.resend.TransactionRequester with
com.quorum.tessera.p2p.resend.TransactionRequesterProvider;
provides com.quorum.tessera.partyinfo.P2pClient with
com.quorum.tessera.p2p.partyinfo.P2pClientProvider;
provides com.quorum.tessera.p2p.resend.ResendPartyStore with
com.quorum.tessera.p2p.resend.ResendPartyStoreImpl;
}
<|start_filename|>shared/src/main/java/com/quorum/tessera/threading/TesseraScheduledExecutor.java<|end_filename|>
package com.quorum.tessera.threading;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import java.util.Objects;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Schedules a continuous running task as an alternative to a {@link Thread} running a {@code
* while(true)} loop
*
* <p>Also allows delays if required between each execution of the loop
*/
public class TesseraScheduledExecutor {
private static final Logger LOGGER = LoggerFactory.getLogger(TesseraScheduledExecutor.class);
private final ScheduledExecutorService executor;
private final Runnable action;
private final long rate;
private final long initialDelay;
public TesseraScheduledExecutor(
final ScheduledExecutorService executor,
final Runnable action,
final long rate,
final long delay) {
this.executor = Objects.requireNonNull(executor);
this.action = Objects.requireNonNull(action);
this.rate = rate;
this.initialDelay = delay;
}
/**
* Starts the submitted task and schedules it to run every given time frame. Catches any Throwable
* and logs it so that the scheduling doesn't break
*/
@PostConstruct
public void start() {
LOGGER.info("Starting {}", this.action.getClass().getSimpleName());
final Runnable exceptionSafeRunnable =
() -> {
try {
LOGGER.debug("{} has started running", action.getClass().getSimpleName());
this.action.run();
} catch (final Throwable ex) {
LOGGER.error(
"Error when executing action {}, exception details:",
action.getClass().getSimpleName(),
ex);
} finally {
LOGGER.debug("{} has finished running", action.getClass().getSimpleName());
}
};
this.executor.scheduleWithFixedDelay(
exceptionSafeRunnable, initialDelay, rate, TimeUnit.MILLISECONDS);
LOGGER.info("Started {}", this.action.getClass().getSimpleName());
}
/**
* Stops any more executions of the submitted task from running. Does not cancel the currently
* running task, which may be blocking
*/
@PreDestroy
public void stop() {
LOGGER.info("Stopping {}", this.action.getClass().getSimpleName());
this.executor.shutdown();
LOGGER.info("Stopped {}", this.action.getClass().getSimpleName());
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/FeatureToggles.java<|end_filename|>
package com.quorum.tessera.config;
import jakarta.xml.bind.annotation.XmlAccessType;
import jakarta.xml.bind.annotation.XmlAccessorType;
import jakarta.xml.bind.annotation.XmlElement;
import java.util.Objects;
@XmlAccessorType(XmlAccessType.FIELD)
public class FeatureToggles {
@XmlElement(defaultValue = "false")
private boolean enableRemoteKeyValidation;
@XmlElement(defaultValue = "false")
private boolean enablePrivacyEnhancements;
@XmlElement(defaultValue = "false")
private boolean enableMultiplePrivateStates;
public FeatureToggles() {}
public boolean isEnableRemoteKeyValidation() {
return enableRemoteKeyValidation;
}
public void setEnableRemoteKeyValidation(final boolean enableRemoteKeyValidation) {
this.enableRemoteKeyValidation = enableRemoteKeyValidation;
}
public boolean isEnablePrivacyEnhancements() {
return enablePrivacyEnhancements;
}
public void setEnablePrivacyEnhancements(boolean enablePrivacyEnhancements) {
this.enablePrivacyEnhancements = enablePrivacyEnhancements;
}
public boolean isEnableMultiplePrivateStates() {
return enableMultiplePrivateStates;
}
public void setEnableMultiplePrivateStates(boolean enableMultiplePrivateStates) {
this.enableMultiplePrivateStates = enableMultiplePrivateStates;
}
@Override
public boolean equals(final Object o) {
if (!(o instanceof FeatureToggles)) {
return false;
}
final FeatureToggles that = (FeatureToggles) o;
return isEnableRemoteKeyValidation() == that.isEnableRemoteKeyValidation()
&& isEnablePrivacyEnhancements() == that.isEnablePrivacyEnhancements()
&& isEnableMultiplePrivateStates() == that.isEnableMultiplePrivateStates();
}
@Override
public int hashCode() {
return Objects.hash(
isEnableRemoteKeyValidation(),
isEnablePrivacyEnhancements(),
isEnableMultiplePrivateStates());
}
}
<|start_filename|>migration/multitenancy/src/main/java/com/quorum/tessera/multitenancy/migration/MigrationRunner.java<|end_filename|>
package com.quorum.tessera.multitenancy.migration;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityManagerFactory;
import java.util.Objects;
public class MigrationRunner {
private final EntityManagerFactory primary;
private final EntityManagerFactory secondary;
public MigrationRunner(final EntityManagerFactory primary, final EntityManagerFactory secondary) {
this.primary = Objects.requireNonNull(primary);
this.secondary = Objects.requireNonNull(secondary);
}
public void run() {
final EntityManager primaryEntityManager = primary.createEntityManager();
final EntityManager secondaryEntityManager = secondary.createEntityManager();
// migrate raw
final RawTransactionMigrator rawMigrator =
new RawTransactionMigrator(primaryEntityManager, secondaryEntityManager);
rawMigrator.migrate();
// migrate regular
final EncryptedTransactionMigrator etMigrator =
new EncryptedTransactionMigrator(primaryEntityManager, secondaryEntityManager);
etMigrator.migrate();
}
}
<|start_filename|>tessera-context/src/test/java/com/quorum/tessera/context/internal/DefaultRuntimeContextTest.java<|end_filename|>
package com.quorum.tessera.context.internal;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import com.openpojo.reflection.impl.PojoClassFactory;
import com.openpojo.validation.Validator;
import com.openpojo.validation.ValidatorBuilder;
import com.openpojo.validation.rule.impl.GetterMustExistRule;
import com.openpojo.validation.test.impl.GetterTester;
import com.quorum.tessera.config.keys.KeyEncryptor;
import com.quorum.tessera.encryption.PublicKey;
import jakarta.ws.rs.client.Client;
import java.net.URI;
import java.util.List;
import java.util.Set;
import org.junit.Test;
public class DefaultRuntimeContextTest {
@Test
public void openPojoTest() {
final Validator pojoValidator =
ValidatorBuilder.create().with(new GetterMustExistRule()).with(new GetterTester()).build();
pojoValidator.validate(PojoClassFactory.getPojoClass(DefaultRuntimeContext.class));
}
@Test
public void testToString() {
DefaultRuntimeContext instance =
new DefaultRuntimeContext(
Set.of(),
mock(KeyEncryptor.class),
List.of(),
List.of(),
mock(Client.class),
true,
true,
mock(URI.class),
true,
true,
true,
true,
true);
assertThat(instance).isNotNull();
assertThat(instance.toString()).isNotNull().isNotBlank();
}
@Test
public void getPublicKeys() {
PublicKey publicKey = mock(PublicKey.class);
Set<PublicKey> keys = Set.of(publicKey);
DefaultRuntimeContext instance =
new DefaultRuntimeContext(
keys,
mock(KeyEncryptor.class),
List.of(),
List.of(),
mock(Client.class),
true,
true,
mock(URI.class),
true,
true,
true,
true,
true);
assertThat(instance.getPublicKeys()).containsExactly(publicKey);
}
}
<|start_filename|>tessera-jaxrs/jaxrs-client/src/test/java/com/quorum/tessera/jaxrs/client/VersionHeaderDecoratorTest.java<|end_filename|>
package com.quorum.tessera.jaxrs.client;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.shared.Constants;
import com.quorum.tessera.version.ApiVersion;
import jakarta.ws.rs.client.ClientRequestContext;
import jakarta.ws.rs.core.MultivaluedHashMap;
import jakarta.ws.rs.core.MultivaluedMap;
import org.junit.Test;
public class VersionHeaderDecoratorTest {
@Test
public void filter() throws Exception {
ClientRequestContext requestContext = mock(ClientRequestContext.class);
MultivaluedMap headers = new MultivaluedHashMap();
when(requestContext.getHeaders()).thenReturn(headers);
VersionHeaderDecorator versionHeaderDecorator = new VersionHeaderDecorator();
versionHeaderDecorator.filter(requestContext);
assertThat(headers.get(Constants.API_VERSION_HEADER))
.isNotNull()
.isEqualTo(ApiVersion.versions());
int count = ApiVersion.versions().size();
verify(requestContext, times(count)).getHeaders();
verifyNoMoreInteractions(requestContext);
}
}
<|start_filename|>tessera-jaxrs/thirdparty-jaxrs/src/test/java/com/quorum/tessera/thirdparty/KeyResourceTest.java<|end_filename|>
package com.quorum.tessera.thirdparty;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.context.RuntimeContext;
import com.quorum.tessera.encryption.PublicKey;
import jakarta.json.Json;
import jakarta.json.JsonReader;
import jakarta.ws.rs.core.Response;
import java.io.StringReader;
import java.util.Base64;
import java.util.HashSet;
import java.util.Set;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class KeyResourceTest {
private KeyResource keyResource;
private RuntimeContext runtimeContext;
@Before
public void onSetUp() {
runtimeContext = mock(RuntimeContext.class);
keyResource = new KeyResource();
}
@After
public void onTearDown() {
verifyNoMoreInteractions(runtimeContext);
}
@Test
public void testGetPublicKeys() {
try (var mockedStaticRuntimeContext = mockStatic(RuntimeContext.class)) {
mockedStaticRuntimeContext.when(RuntimeContext::getInstance).thenReturn(runtimeContext);
Base64.Decoder base64Decoder = Base64.getDecoder();
final String keyJsonString =
"{\"keys\": [{\"key\": \"<KEY>}]}";
String key = "<KEY>;
Set<PublicKey> publicKeys = new HashSet<>();
publicKeys.add(PublicKey.from(base64Decoder.decode(key)));
when(runtimeContext.getPublicKeys()).thenReturn(publicKeys);
Response response = keyResource.getPublicKeys();
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
final String output = response.getEntity().toString();
final JsonReader expected = Json.createReader(new StringReader(keyJsonString));
final JsonReader actual = Json.createReader(new StringReader(output));
assertThat(expected.readObject()).isEqualTo(actual.readObject());
verify(runtimeContext).getPublicKeys();
mockedStaticRuntimeContext.verify(RuntimeContext::getInstance);
mockedStaticRuntimeContext.verifyNoMoreInteractions();
}
}
}
<|start_filename|>server/server-utils/src/main/java/module-info.java<|end_filename|>
module tessera.server.utils {
requires org.eclipse.jetty.server;
requires org.eclipse.jetty.util;
requires tessera.config;
requires tessera.security;
requires org.eclipse.jetty.unixsocket.server;
exports com.quorum.tessera.server.utils;
}
<|start_filename|>server/jersey-server/src/test/java/com/quorum/tessera/server/jersey/PingImpl.java<|end_filename|>
package com.quorum.tessera.server.jersey;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import java.util.Objects;
@Named("myBean")
@Singleton
public class PingImpl implements Ping {
private Pong pong;
public PingImpl() {
this.pong = null;
}
@Inject
public PingImpl(Pong pong) {
this.pong = Objects.requireNonNull(pong);
System.out.println("new PingImpl()" + this);
}
@PostConstruct
public void onConstruct() {
System.out.println("PingImpl.onConstruct " + this);
}
@PreDestroy
public void onDestroy() {
System.out.println("PingImpl.onDestroy " + this);
}
@Override
public String ping() {
return pong.pong();
}
}
<|start_filename|>enclave/enclave-jaxrs/src/main/java/com/quorum/tessera/enclave/rest/KeyValuePair.java<|end_filename|>
package com.quorum.tessera.enclave.rest;
import jakarta.xml.bind.annotation.XmlMimeType;
import jakarta.xml.bind.annotation.XmlRootElement;
import java.io.Serializable;
@XmlRootElement
public class KeyValuePair implements Serializable {
@XmlMimeType("base64Binary")
private byte[] key;
@XmlMimeType("base64Binary")
private byte[] value;
public KeyValuePair() {}
public KeyValuePair(byte[] key, byte[] value) {
this.key = key;
this.value = value;
}
public byte[] getKey() {
return key;
}
public void setKey(byte[] key) {
this.key = key;
}
public byte[] getValue() {
return value;
}
public void setValue(byte[] value) {
this.value = value;
}
}
<|start_filename|>enclave/enclave-jaxrs/src/main/java/com/quorum/tessera/enclave/rest/EnclaveApplication.java<|end_filename|>
package com.quorum.tessera.enclave.rest;
import com.quorum.tessera.config.AppType;
import com.quorum.tessera.config.CommunicationType;
import com.quorum.tessera.enclave.Enclave;
import com.quorum.tessera.enclave.EnclaveServer;
import jakarta.ws.rs.core.Application;
import java.util.Objects;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class EnclaveApplication extends Application
implements com.quorum.tessera.config.apps.TesseraApp {
private static final Logger LOGGER = LoggerFactory.getLogger(EnclaveApplication.class);
private final Enclave enclave;
public EnclaveApplication() {
this(EnclaveServer.create());
}
public EnclaveApplication(Enclave enclave) {
LOGGER.debug("Create EnclaveApplication with {}", enclave);
this.enclave = Objects.requireNonNull(enclave);
}
@Override
public Set<Object> getSingletons() {
return Set.of(new EnclaveResource(enclave), new DefaultExceptionMapper());
}
@Override
public AppType getAppType() {
return AppType.ENCLAVE;
}
@Override
public CommunicationType getCommunicationType() {
return CommunicationType.REST;
}
}
<|start_filename|>tests/acceptance-test/src/test/java/com/quorum/tessera/test/rest/PrivacyIT.java<|end_filename|>
package com.quorum.tessera.test.rest;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.api.SendRequest;
import com.quorum.tessera.api.SendResponse;
import com.quorum.tessera.enclave.PrivacyMode;
import com.quorum.tessera.test.Party;
import com.quorum.tessera.test.PartyHelper;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.util.List;
import org.junit.*;
import suite.*;
public class PrivacyIT {
private PartyHelper partyHelper = PartyHelper.create();
@Test
public void enhancedPrivacyTransactionsNotEnabled() {
Party legacySender = partyHelper.findByAlias(NodeAlias.D);
SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(new RestUtils().createTransactionData());
sendRequest.setFrom(legacySender.getPublicKey());
List<String> recipientList = List.of(partyHelper.findByAlias(NodeAlias.A).getPublicKey());
sendRequest.setTo(recipientList.toArray(new String[recipientList.size()]));
sendRequest.setPrivacyFlag(PrivacyMode.PARTY_PROTECTION.getPrivacyFlag());
sendRequest.setAffectedContractTransactions(new String[0]);
Response response =
legacySender
.getRestClientWebTarget()
.path("send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(403);
}
@Test
public void targetedNodeDoesNotHaveEnhancedPrivacyEnabled() {
Party sender = partyHelper.findByAlias(NodeAlias.A);
SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(new RestUtils().createTransactionData());
sendRequest.setFrom(sender.getPublicKey());
List<String> recipientList = List.of(partyHelper.findByAlias(NodeAlias.D).getPublicKey());
sendRequest.setTo(recipientList.toArray(new String[recipientList.size()]));
sendRequest.setPrivacyFlag(PrivacyMode.PARTY_PROTECTION.getPrivacyFlag());
sendRequest.setAffectedContractTransactions(new String[0]);
Response response =
sender
.getRestClientWebTarget()
.path("send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(500);
}
@Test
public void oneOfTargetedRecipientsDoesNotHaveEnhancedPrivacyEnabled() {
Party sender = partyHelper.findByAlias(NodeAlias.A);
SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(new RestUtils().createTransactionData());
sendRequest.setFrom(sender.getPublicKey());
List<String> recipientList =
List.of(
partyHelper.findByAlias(NodeAlias.C).getPublicKey(),
partyHelper.findByAlias(NodeAlias.D).getPublicKey());
sendRequest.setTo(recipientList.toArray(new String[recipientList.size()]));
sendRequest.setPrivacyFlag(PrivacyMode.PARTY_PROTECTION.getPrivacyFlag());
sendRequest.setAffectedContractTransactions(new String[0]);
Response response =
sender
.getRestClientWebTarget()
.path("send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(500);
}
@Test
public void sendPSVTransactionWithoutExecHashWillBeRejected() {
Party sender = partyHelper.findByAlias(NodeAlias.A);
SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(new RestUtils().createTransactionData());
sendRequest.setFrom(sender.getPublicKey());
List<String> recipientList = List.of(partyHelper.findByAlias(NodeAlias.B).getPublicKey());
sendRequest.setTo(recipientList.toArray(new String[recipientList.size()]));
sendRequest.setPrivacyFlag(PrivacyMode.PRIVATE_STATE_VALIDATION.getPrivacyFlag());
sendRequest.setAffectedContractTransactions(new String[0]);
Response response =
sender
.getRestClientWebTarget()
.path("send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(400);
}
@Test
public void sendTransactionsWithFlagMismatched() {
Party sender = partyHelper.findByAlias(NodeAlias.A);
final String originalHash = sendContractCreationTransaction(PrivacyMode.PARTY_PROTECTION);
SendRequest secondRequest = new SendRequest();
secondRequest.setPayload(new RestUtils().createTransactionData());
secondRequest.setFrom(sender.getPublicKey());
List<String> recipientList = List.of(partyHelper.findByAlias(NodeAlias.B).getPublicKey());
secondRequest.setTo(recipientList.toArray(new String[recipientList.size()]));
secondRequest.setPrivacyFlag(PrivacyMode.STANDARD_PRIVATE.getPrivacyFlag());
secondRequest.setAffectedContractTransactions(originalHash);
Response secondResponse =
sender
.getRestClientWebTarget()
.path("send")
.request()
.post(Entity.entity(secondRequest, MediaType.APPLICATION_JSON));
assertThat(secondResponse.getStatus()).isEqualTo(403);
}
@Test
public void sendPSVTransactionsWithRecipientsMismatched() {
Party sender = partyHelper.findByAlias(NodeAlias.A);
final String originalHash =
sendContractCreationTransaction(PrivacyMode.PRIVATE_STATE_VALIDATION);
SendRequest secondRequest = new SendRequest();
secondRequest.setPayload(new RestUtils().createTransactionData());
secondRequest.setFrom(sender.getPublicKey());
List<String> anotherList =
List.of(
partyHelper.findByAlias(NodeAlias.B).getPublicKey(),
partyHelper.findByAlias(NodeAlias.C).getPublicKey());
secondRequest.setTo(anotherList.toArray(new String[anotherList.size()]));
secondRequest.setPrivacyFlag(PrivacyMode.PRIVATE_STATE_VALIDATION.getPrivacyFlag());
secondRequest.setAffectedContractTransactions(originalHash);
secondRequest.setExecHash("execHash");
Response secondResponse =
sender
.getRestClientWebTarget()
.path("send")
.request()
.post(Entity.entity(secondRequest, MediaType.APPLICATION_JSON));
assertThat(secondResponse.getStatus()).isEqualTo(403);
}
@Test
public void updateExistingContractByParticipant() {
final String originalHash = sendContractCreationTransaction(PrivacyMode.PARTY_PROTECTION);
Party sender = partyHelper.findByAlias(NodeAlias.B);
SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(new RestUtils().createTransactionData());
sendRequest.setFrom(sender.getPublicKey());
List<String> recipientList = List.of(partyHelper.findByAlias(NodeAlias.A).getPublicKey());
sendRequest.setTo(recipientList.toArray(new String[recipientList.size()]));
sendRequest.setPrivacyFlag(PrivacyMode.PARTY_PROTECTION.getPrivacyFlag());
sendRequest.setAffectedContractTransactions(originalHash);
Response response =
sender
.getRestClientWebTarget()
.path("send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(201);
}
@Test
public void updateExistingContractByNonParticipant() {
final String originalHash = sendContractCreationTransaction(PrivacyMode.PARTY_PROTECTION);
Party sender = partyHelper.findByAlias(NodeAlias.C);
SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(new RestUtils().createTransactionData());
sendRequest.setFrom(sender.getPublicKey());
List<String> recipientList = List.of(partyHelper.findByAlias(NodeAlias.B).getPublicKey());
sendRequest.setTo(recipientList.toArray(new String[recipientList.size()]));
sendRequest.setPrivacyFlag(PrivacyMode.PARTY_PROTECTION.getPrivacyFlag());
sendRequest.setAffectedContractTransactions(originalHash);
Response response =
sender
.getRestClientWebTarget()
.path("send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(403);
}
private String sendContractCreationTransaction(PrivacyMode privacyMode) {
Party sender = partyHelper.findByAlias(NodeAlias.A);
SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(new RestUtils().createTransactionData());
sendRequest.setFrom(sender.getPublicKey());
List<String> recipientList = List.of(partyHelper.findByAlias(NodeAlias.B).getPublicKey());
sendRequest.setTo(recipientList.toArray(new String[recipientList.size()]));
sendRequest.setPrivacyFlag(privacyMode.getPrivacyFlag());
sendRequest.setAffectedContractTransactions(new String[0]);
if (privacyMode == PrivacyMode.PRIVATE_STATE_VALIDATION) {
sendRequest.setExecHash("execHash");
}
Response response =
sender
.getRestClientWebTarget()
.path("send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(201);
final SendResponse result = response.readEntity(SendResponse.class);
return result.getKey();
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/keypairs/HashicorpVaultKeyPair.java<|end_filename|>
package com.quorum.tessera.config.keypairs;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.PositiveOrZero;
import jakarta.xml.bind.annotation.XmlElement;
public class HashicorpVaultKeyPair implements ConfigKeyPair {
@NotNull @XmlElement private String publicKeyId;
@NotNull @XmlElement private String privateKeyId;
@NotNull @XmlElement private String secretEngineName;
@NotNull @XmlElement private String secretName;
@PositiveOrZero(message = "{ValidPositiveInteger.message}")
@XmlElement
private Integer secretVersion;
public HashicorpVaultKeyPair(
String publicKeyId,
String privateKeyId,
String secretEngineName,
String secretName,
Integer secretVersion) {
this.publicKeyId = publicKeyId;
this.privateKeyId = privateKeyId;
this.secretEngineName = secretEngineName;
this.secretName = secretName;
this.secretVersion = secretVersion;
}
public String getPublicKeyId() {
return publicKeyId;
}
public String getPrivateKeyId() {
return privateKeyId;
}
public String getSecretEngineName() {
return secretEngineName;
}
public String getSecretName() {
return secretName;
}
public Integer getSecretVersion() {
return secretVersion;
}
@Override
public String getPublicKey() {
// keys are not fetched from vault yet so return null
return null;
}
@Override
public String getPrivateKey() {
// keys are not fetched from vault yet so return null
return null;
}
@Override
public void withPassword(char[] password) {
// password not used with vault stored keys
}
@Override
public char[] getPassword() {
// no password to return
return new char[0];
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/main/java/com/quorum/tessera/api/StoreRawRequest.java<|end_filename|>
package com.quorum.tessera.api;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import jakarta.xml.bind.annotation.XmlInlineBinaryData;
import java.util.Optional;
/**
* Model representation of a JSON body on incoming HTTP requests
*
* <p>Used when a new raw transaction is to be created where this node is the sender
*/
public class StoreRawRequest {
@Schema(
description = "data to be encrypted and stored",
required = true,
type = "string",
format = "base64")
@Size(min = 1)
@NotNull
@XmlInlineBinaryData
private byte[] payload;
@Schema(
description =
"public key identifying the key pair that will be used in the encryption; if not set, default used",
type = "string",
format = "base64")
@XmlInlineBinaryData
private byte[] from;
@XmlInlineBinaryData
public byte[] getPayload() {
return payload;
}
public void setPayload(byte[] payload) {
this.payload = payload;
}
public Optional<byte[]> getFrom() {
return Optional.ofNullable(from);
}
public void setFrom(byte[] from) {
this.from = from;
}
}
<|start_filename|>tests/acceptance-test/src/test/java/com/quorum/tessera/test/rest/SendReceiveBesuIT.java<|end_filename|>
package com.quorum.tessera.test.rest;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.api.ReceiveRequest;
import com.quorum.tessera.api.ReceiveResponse;
import com.quorum.tessera.api.SendRequest;
import com.quorum.tessera.api.SendResponse;
import com.quorum.tessera.test.Party;
import com.quorum.tessera.test.PartyHelper;
import jakarta.json.Json;
import jakarta.json.JsonArray;
import jakarta.json.JsonObject;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.ClientBuilder;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.io.StringReader;
import java.util.Base64;
import java.util.Optional;
import org.junit.Test;
public class SendReceiveBesuIT {
private final Client client = ClientBuilder.newClient();
private final PartyHelper partyHelper = PartyHelper.create();
private RestUtils utils = new RestUtils();
private final PrivacyGroupTestUtil privacyGroupTestUtil = new PrivacyGroupTestUtil();
@Test
public void sendAndReceivePrivacyGroup() {
final Party a = partyHelper.findByAlias("A");
final Party b = partyHelper.findByAlias("B");
final String output = privacyGroupTestUtil.create("A", "B");
final JsonObject jsonObj = Json.createReader(new StringReader(output)).readObject();
final String groupId = jsonObj.getString("privacyGroupId");
byte[] transactionData = utils.createTransactionData();
final SendRequest sendRequest = new SendRequest();
sendRequest.setPrivacyGroupId(groupId);
sendRequest.setPayload(transactionData);
final Response response =
client
.target(partyHelper.findByAlias("A").getQ2TUri())
.path("/send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
final SendResponse result = response.readEntity(SendResponse.class);
final String hash = result.getKey();
assertThat(hash).isNotNull().isNotBlank();
// Hash length = 32 bytes
assertThat(Base64.getDecoder().decode(hash)).hasSize(32);
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
ReceiveRequest receiveRequest = new ReceiveRequest();
receiveRequest.setKey(hash);
final Response receiveResponse =
client
.target(a.getQ2TUri())
.path("/receive")
.request()
.post(Entity.entity(receiveRequest, MediaType.APPLICATION_JSON));
// validate result
assertThat(receiveResponse).isNotNull();
assertThat(receiveResponse.getStatus()).isEqualTo(200);
final ReceiveResponse receiveResult = receiveResponse.readEntity(ReceiveResponse.class);
assertThat(receiveResult.getPayload()).isEqualTo(transactionData);
assertThat(receiveResult.getSenderKey()).isEqualTo(a.getPublicKey());
assertThat(receiveResult.getPrivacyGroupId()).isEqualTo(groupId);
final Response receiveResponseOnB =
client
.target(b.getQ2TUri())
.path("/receive")
.request()
.post(Entity.entity(receiveRequest, MediaType.APPLICATION_JSON));
// validate result
assertThat(receiveResponseOnB).isNotNull();
assertThat(receiveResponseOnB.getStatus()).isEqualTo(200);
final ReceiveResponse receiveResultOnB = receiveResponseOnB.readEntity(ReceiveResponse.class);
assertThat(receiveResultOnB.getPayload()).isEqualTo(transactionData);
assertThat(receiveResultOnB.getSenderKey()).isEqualTo(a.getPublicKey());
assertThat(receiveResultOnB.getPrivacyGroupId()).isEqualTo(groupId);
}
@Test
public void sendAndReceivePrivacyFor() throws InterruptedException {
final Party a = partyHelper.findByAlias("A");
final Party d = partyHelper.findByAlias("D");
byte[] transactionData = utils.createTransactionData();
final SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(transactionData);
sendRequest.setTo(d.getPublicKey());
final Response response =
client
.target(a.getQ2TUri())
.path("/send")
.request()
.post(Entity.entity(sendRequest, MediaType.APPLICATION_JSON));
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
final SendResponse result = response.readEntity(SendResponse.class);
final String hash = result.getKey();
// Hash length = 32 bytes
assertThat(Base64.getDecoder().decode(hash)).hasSize(32);
String findOutput = privacyGroupTestUtil.find("A", "A", "D");
final JsonArray json = Json.createReader(new StringReader(findOutput)).readArray();
Optional<JsonObject> legacyGroup =
json.getValuesAs(JsonObject.class).stream()
.filter(v -> v.getString("type").equals("LEGACY"))
.findAny();
// Legacy privacy group was created
assertThat(legacyGroup).isPresent();
final String groupId = legacyGroup.get().getString("privacyGroupId");
ReceiveRequest receiveRequest = new ReceiveRequest();
receiveRequest.setKey(hash);
final Response receiveResponse =
client
.target(a.getQ2TUri())
.path("/receive")
.request()
.post(Entity.entity(receiveRequest, MediaType.APPLICATION_JSON));
// validate result
assertThat(receiveResponse).isNotNull();
assertThat(receiveResponse.getStatus()).isEqualTo(200);
final ReceiveResponse receiveResult = receiveResponse.readEntity(ReceiveResponse.class);
assertThat(receiveResult.getPayload()).isEqualTo(transactionData);
assertThat(receiveResult.getSenderKey()).isEqualTo(a.getPublicKey());
assertThat(receiveResult.getPrivacyGroupId()).isEqualTo(groupId);
final Response receiveResponseOnB =
client
.target(d.getQ2TUri())
.path("/receive")
.request()
.post(Entity.entity(receiveRequest, MediaType.APPLICATION_JSON));
// validate result
assertThat(receiveResponseOnB).isNotNull();
assertThat(receiveResponseOnB.getStatus()).isEqualTo(200);
final ReceiveResponse receiveResultOnB = receiveResponseOnB.readEntity(ReceiveResponse.class);
assertThat(receiveResultOnB.getPayload()).isEqualTo(transactionData);
assertThat(receiveResultOnB.getSenderKey()).isEqualTo(a.getPublicKey());
assertThat(receiveResultOnB.getPrivacyGroupId()).isEqualTo(groupId);
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/constraints/ValidContentValidator.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import com.quorum.tessera.io.FilesDelegate;
import jakarta.validation.ConstraintValidator;
import jakarta.validation.ConstraintValidatorContext;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class ValidContentValidator implements ConstraintValidator<ValidContent, Path> {
private ValidContent config;
@Override
public void initialize(ValidContent constraintAnnotation) {
this.config = constraintAnnotation;
}
@Override
public boolean isValid(Path path, ConstraintValidatorContext context) {
if (Objects.isNull(path)) {
return true;
}
if (!Files.exists(path)) {
return true;
}
List<String> lines =
FilesDelegate.create()
.lines(path)
.filter(line -> !Objects.equals("", line))
.collect(Collectors.toList());
return lines.size() >= config.minLines() && lines.size() <= config.maxLines();
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/Peer.java<|end_filename|>
package com.quorum.tessera.config;
import com.quorum.tessera.config.constraints.ValidUrl;
import jakarta.validation.constraints.NotNull;
import jakarta.xml.bind.annotation.XmlAccessType;
import jakarta.xml.bind.annotation.XmlAccessorType;
import jakarta.xml.bind.annotation.XmlElement;
import java.util.Objects;
@XmlAccessorType(XmlAccessType.FIELD)
public class Peer extends ConfigItem {
@ValidUrl
@NotNull
@XmlElement(required = true)
private String url;
public Peer(String url) {
this.url = url;
}
public Peer() {}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
Peer peer = (Peer) o;
return Objects.equals(url, peer.url);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), url);
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/main/java/com/quorum/tessera/p2p/resend/RestResendClient.java<|end_filename|>
package com.quorum.tessera.p2p.resend;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.util.Objects;
class RestResendClient implements ResendClient {
private final Client client;
RestResendClient(final Client client) {
this.client = Objects.requireNonNull(client);
}
@Override
public boolean makeResendRequest(final String targetUrl, final ResendRequest request) {
final Entity<ResendRequest> outboundEntity = Entity.entity(request, MediaType.APPLICATION_JSON);
try (Response response =
client.target(targetUrl).path("/resend").request().post(outboundEntity)) {
return Response.Status.OK.getStatusCode() == response.getStatus();
}
}
}
<|start_filename|>tessera-core/src/test/java/com/quorum/tessera/transaction/ReceiveRequestTest.java<|end_filename|>
package com.quorum.tessera.transaction;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import com.quorum.tessera.data.MessageHash;
import com.quorum.tessera.encryption.PublicKey;
import org.junit.Test;
public class ReceiveRequestTest {
@Test(expected = NullPointerException.class)
public void buildWithNothing() {
ReceiveRequest.Builder.create().build();
}
@Test
public void buildWithTransactionHash() {
MessageHash messageHash = mock(MessageHash.class);
ReceiveRequest result =
ReceiveRequest.Builder.create().withTransactionHash(messageHash).build();
assertThat(result).isNotNull();
assertThat(result.getTransactionHash()).isNotNull().isSameAs(messageHash);
assertThat(result.getRecipient()).isNotPresent();
}
@Test(expected = NullPointerException.class)
public void buildOnlyWithRecipient() {
PublicKey recipient = mock(PublicKey.class);
ReceiveRequest.Builder.create().withRecipient(recipient).build();
}
@Test
public void buildWithTransactionHashAndRecipient() {
MessageHash messageHash = mock(MessageHash.class);
PublicKey recipient = mock(PublicKey.class);
ReceiveRequest result =
ReceiveRequest.Builder.create()
.withTransactionHash(messageHash)
.withRecipient(recipient)
.build();
assertThat(result).isNotNull();
assertThat(result.getTransactionHash()).isNotNull().isSameAs(messageHash);
assertThat(result.getRecipient()).containsSame(recipient);
assertThat(result.isRaw()).isFalse();
}
@Test
public void buildWithRaw() {
MessageHash messageHash = mock(MessageHash.class);
PublicKey recipient = mock(PublicKey.class);
ReceiveRequest req =
ReceiveRequest.Builder.create()
.withTransactionHash(messageHash)
.withRecipient(recipient)
.withRaw(true)
.build();
assertThat(req).isNotNull();
assertThat(req.getTransactionHash()).isNotNull().isSameAs(messageHash);
assertThat(req.getRecipient()).containsSame(recipient);
assertThat(req.isRaw()).isTrue();
}
}
<|start_filename|>tessera-jaxrs/jaxrs-client/src/test/java/com/quorum/tessera/jaxrs/client/ClientFactoryTest.java<|end_filename|>
package com.quorum.tessera.jaxrs.client;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.config.AppType;
import com.quorum.tessera.config.CommunicationType;
import com.quorum.tessera.config.ServerConfig;
import com.quorum.tessera.config.SslConfig;
import com.quorum.tessera.ssl.context.SSLContextFactory;
import jakarta.ws.rs.client.Client;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.net.ssl.SSLContext;
import org.glassfish.jersey.client.ClientProperties;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class ClientFactoryTest {
private SSLContextFactory sslContextFactory;
private ClientFactory factory;
@Before
public void setUp() {
sslContextFactory = mock(SSLContextFactory.class);
factory = new ClientFactory(sslContextFactory);
}
@After
public void after() {
verifyNoMoreInteractions(sslContextFactory);
}
@Test
public void testBuildInsecureClient() {
ServerConfig serverConfig = mock(ServerConfig.class);
when(serverConfig.isSsl()).thenReturn(false);
when(serverConfig.getProperties()).thenReturn(Collections.emptyMap());
Client client = factory.buildFrom(serverConfig);
assertThat(client).isNotNull();
}
@Test
public void testBuildSecureClientCAMode() throws URISyntaxException {
ServerConfig serverConfig = mock(ServerConfig.class);
SslConfig sslConfig = mock(SslConfig.class);
when(serverConfig.isSsl()).thenReturn(true);
when(serverConfig.getServerUri()).thenReturn(new URI("https://localhost:8080"));
when(serverConfig.getSslConfig()).thenReturn(sslConfig);
Map<String, String> props = new HashMap<>();
props.put("partyInfoInterval", "20000");
when(serverConfig.getProperties()).thenReturn(props);
SSLContext sslContext = mock(SSLContext.class);
when(sslContextFactory.from(serverConfig.getServerUri().toString(), sslConfig))
.thenReturn(sslContext);
Client client = factory.buildFrom(serverConfig);
assertThat(client).isNotNull();
Map clientProperties =
client.target(serverConfig.getServerUri()).getConfiguration().getProperties();
assertThat(clientProperties.get(ClientProperties.READ_TIMEOUT)).isEqualTo(15000);
assertThat(clientProperties.get(ClientProperties.CONNECT_TIMEOUT)).isEqualTo(15000);
verify(sslContextFactory).from(serverConfig.getServerUri().toString(), sslConfig);
}
@Test
public void createUnixSocketClient() {
ServerConfig serverConfig = new ServerConfig();
serverConfig.setServerAddress("unix:/tmp/bogus.socket");
serverConfig.setApp(AppType.Q2T);
serverConfig.setCommunicationType(CommunicationType.REST);
org.glassfish.jersey.client.JerseyClient result =
(org.glassfish.jersey.client.JerseyClient) factory.buildFrom(serverConfig);
assertThat(result.getConfiguration().getProperty("unixfile"))
.isNotNull()
.isInstanceOf(URI.class);
assertThat(result.getConfiguration().getProperty("unixfile").toString())
.isEqualTo("unix:/tmp/bogus.socket");
assertThat(result.getConfiguration().getConnectorProvider().getClass().getName())
.isEqualTo("com.quorum.tessera.jaxrs.unixsocket.JerseyUnixSocketConnectorProvider");
}
@Test
public void createDefaultInstance() {
ClientFactory clientFactory = new ClientFactory();
assertThat(clientFactory).isNotNull();
}
}
<|start_filename|>tessera-data/src/main/java/com/quorum/tessera/data/internal/PrivacyGroupDAOImpl.java<|end_filename|>
package com.quorum.tessera.data.internal;
import com.quorum.tessera.data.EntityManagerTemplate;
import com.quorum.tessera.data.PrivacyGroupDAO;
import com.quorum.tessera.data.PrivacyGroupEntity;
import jakarta.persistence.EntityManagerFactory;
import jakarta.persistence.PersistenceException;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
public class PrivacyGroupDAOImpl implements PrivacyGroupDAO {
private EntityManagerTemplate entityManagerTemplate;
public PrivacyGroupDAOImpl(EntityManagerFactory entityManagerFactory) {
this.entityManagerTemplate = new EntityManagerTemplate(entityManagerFactory);
}
@Override
public PrivacyGroupEntity save(PrivacyGroupEntity entity) {
return entityManagerTemplate.execute(
entityManager -> {
entityManager.persist(entity);
return entity;
});
}
@Override
public <T> PrivacyGroupEntity save(PrivacyGroupEntity entity, Callable<T> consumer) {
return entityManagerTemplate.execute(
entityManager -> {
entityManager.persist(entity);
try {
entityManager.flush();
consumer.call();
return entity;
} catch (RuntimeException ex) {
throw ex;
} catch (Exception e) {
throw new PersistenceException(e);
}
});
}
@Override
public PrivacyGroupEntity update(PrivacyGroupEntity entity) {
return entityManagerTemplate.execute(
entityManager -> {
entityManager.merge(entity);
return entity;
});
}
@Override
public <T> PrivacyGroupEntity update(PrivacyGroupEntity entity, Callable<T> consumer) {
return entityManagerTemplate.execute(
entityManager -> {
entityManager.merge(entity);
try {
entityManager.flush();
consumer.call();
return entity;
} catch (RuntimeException ex) {
throw ex;
} catch (Exception e) {
throw new PersistenceException(e);
}
});
}
@Override
public Optional<PrivacyGroupEntity> retrieve(byte[] id) {
return entityManagerTemplate.execute(
entityManager ->
entityManager
.createNamedQuery("PrivacyGroup.FindById", PrivacyGroupEntity.class)
.setParameter("id", id)
.getResultStream()
.findAny());
}
@Override
public PrivacyGroupEntity retrieveOrSave(PrivacyGroupEntity entity) {
return entityManagerTemplate.retrieveOrSave(
() -> retrieve(entity.getId()).orElse(null), () -> entity);
}
@Override
public List<PrivacyGroupEntity> findByLookupId(byte[] lookupId) {
return entityManagerTemplate.execute(
entityManager ->
entityManager
.createNamedQuery("PrivacyGroup.FindByLookupId", PrivacyGroupEntity.class)
.setParameter("lookupId", lookupId)
.getResultStream()
.collect(Collectors.toList()));
}
@Override
public List<PrivacyGroupEntity> findAll() {
return entityManagerTemplate.execute(
em ->
em.createNamedQuery("PrivacyGroup.FindAll", PrivacyGroupEntity.class).getResultList());
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/main/java/com/quorum/tessera/api/exception/PrivacyViolationExceptionMapper.java<|end_filename|>
package com.quorum.tessera.api.exception;
import com.quorum.tessera.transaction.exception.PrivacyViolationException;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.ExceptionMapper;
import jakarta.ws.rs.ext.Provider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Provider
public class PrivacyViolationExceptionMapper implements ExceptionMapper<PrivacyViolationException> {
private static final Logger LOGGER =
LoggerFactory.getLogger(PrivacyViolationExceptionMapper.class);
@Override
public Response toResponse(final PrivacyViolationException exception) {
LOGGER.debug(null, exception);
return Response.status(Response.Status.FORBIDDEN)
.entity(exception.getMessage())
.type(MediaType.TEXT_PLAIN)
.build();
}
}
<|start_filename|>enclave/enclave-jaxrs/src/test/java/com/quorum/tessera/enclave/rest/Util.java<|end_filename|>
package com.quorum.tessera.enclave.rest;
import com.quorum.tessera.enclave.Enclave;
import jakarta.ws.rs.core.Application;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest;
import org.glassfish.jersey.test.TestProperties;
import org.slf4j.bridge.SLF4JBridgeHandler;
public class Util {
public static JerseyTest create(Enclave enclave) {
SLF4JBridgeHandler.removeHandlersForRootLogger();
SLF4JBridgeHandler.install();
return new JerseyTest() {
@Override
protected Application configure() {
enable(TestProperties.LOG_TRAFFIC);
enable(TestProperties.DUMP_ENTITY);
set(TestProperties.CONTAINER_PORT, "0");
return ResourceConfig.forApplication(new EnclaveApplication(enclave));
}
};
}
}
<|start_filename|>enclave/enclave-jaxrs/src/main/java/com/quorum/tessera/enclave/rest/EnclaveClientProvider.java<|end_filename|>
package com.quorum.tessera.enclave.rest;
import com.quorum.tessera.config.*;
import com.quorum.tessera.enclave.EnclaveClient;
import com.quorum.tessera.jaxrs.client.ClientFactory;
import jakarta.ws.rs.client.Client;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class EnclaveClientProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(EnclaveClientProvider.class);
public static EnclaveClient provider() {
Config config = ConfigFactory.create().getConfig();
LOGGER.debug("Creating RestfulEnclaveClient with {}", config);
Optional<ServerConfig> enclaveServerConfig =
config.getServerConfigs().stream().filter(sc -> sc.getApp() == AppType.ENCLAVE).findAny();
final ClientFactory clientFactory = new ClientFactory();
LOGGER.debug("Creating server context from config");
ServerConfig serverConfig = enclaveServerConfig.get();
LOGGER.debug("Created server context from config");
Client client = clientFactory.buildFrom(serverConfig);
LOGGER.info("Creating remoted enclave for {}", serverConfig.getServerUri());
return new RestfulEnclaveClient(client, serverConfig.getServerUri());
}
}
<|start_filename|>tessera-recover/src/test/java/com/quorum/tessera/recovery/workflow/internal/BatchWorkflowFactoryImplTest.java<|end_filename|>
package com.quorum.tessera.recovery.workflow.internal;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.data.EncryptedTransaction;
import com.quorum.tessera.discovery.Discovery;
import com.quorum.tessera.enclave.Enclave;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.partyinfo.node.NodeInfo;
import com.quorum.tessera.partyinfo.node.Recipient;
import com.quorum.tessera.recovery.resend.ResendBatchPublisher;
import com.quorum.tessera.recovery.workflow.BatchWorkflow;
import com.quorum.tessera.recovery.workflow.BatchWorkflowContext;
import com.quorum.tessera.recovery.workflow.BatchWorkflowFactory;
import com.quorum.tessera.service.Service;
import java.util.List;
import java.util.Set;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.MockedStatic;
public class BatchWorkflowFactoryImplTest {
private Enclave enclave = mock(Enclave.class);
private Discovery discovery = mock(Discovery.class);
private ResendBatchPublisher resendBatchPublisher = mock(ResendBatchPublisher.class);
private final MockedStatic<EncodedPayload.Builder> mockStaticPayloadBuilder =
mockStatic(EncodedPayload.Builder.class);
private final EncodedPayload.Builder mockPayloadBuilder = mock(EncodedPayload.Builder.class);
@Before
public void setUp() {
mockStaticPayloadBuilder
.when(() -> EncodedPayload.Builder.forRecipient(any(), any()))
.thenReturn(mockPayloadBuilder);
}
@After
public void onTearDown() {
verifyNoMoreInteractions(enclave, discovery, resendBatchPublisher, mockPayloadBuilder);
try {
mockStaticPayloadBuilder.verifyNoMoreInteractions();
} finally {
mockStaticPayloadBuilder.close();
}
}
@Test
public void loadMockBatchWorkflowFactory() {
BatchWorkflowFactory batchWorkflowFactory =
new BatchWorkflowFactoryImpl(enclave, discovery, resendBatchPublisher);
assertThat(batchWorkflowFactory).isExactlyInstanceOf(BatchWorkflowFactoryImpl.class);
}
@Test
public void createBatchWorkflowFactoryImplAndExecuteWorkflow() {
BatchWorkflowFactoryImpl batchWorkflowFactory =
new BatchWorkflowFactoryImpl(enclave, discovery, resendBatchPublisher);
BatchWorkflow batchWorkflow = batchWorkflowFactory.create(1L);
assertThat(batchWorkflow).isNotNull();
BatchWorkflowContext batchWorkflowContext = new BatchWorkflowContext();
PublicKey recipientKey = mock(PublicKey.class);
batchWorkflowContext.setRecipientKey(recipientKey);
PublicKey ownedKey = mock(PublicKey.class);
EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getSenderKey()).thenReturn(ownedKey);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipientKey));
EncryptedTransaction encryptedTransaction = mock(EncryptedTransaction.class);
when(encryptedTransaction.getPayload()).thenReturn(encodedPayload);
batchWorkflowContext.setEncryptedTransaction(encryptedTransaction);
batchWorkflowContext.setEncodedPayload(encodedPayload);
batchWorkflowContext.setBatchSize(100);
when(mockPayloadBuilder.build()).thenReturn(encodedPayload);
when(enclave.status()).thenReturn(Service.Status.STARTED);
when(enclave.getPublicKeys()).thenReturn(Set.of(ownedKey));
NodeInfo nodeInfo = mock(NodeInfo.class);
when(nodeInfo.getRecipients()).thenReturn(Set.of(Recipient.of(recipientKey, "url")));
when(discovery.getCurrent()).thenReturn(nodeInfo);
assertThat(batchWorkflow.execute(batchWorkflowContext)).isTrue();
assertThat(batchWorkflow.getPublishedMessageCount()).isOne();
verify(enclave).status();
verify(enclave, times(2)).getPublicKeys();
mockStaticPayloadBuilder.verify(() -> EncodedPayload.Builder.forRecipient(any(), any()));
verify(mockPayloadBuilder).build();
verify(discovery).getCurrent();
verify(resendBatchPublisher).publishBatch(any(), any());
}
@Test
public void workflowExecutedReturnFalse() {
BatchWorkflowFactoryImpl batchWorkflowFactory =
new BatchWorkflowFactoryImpl(enclave, discovery, resendBatchPublisher);
BatchWorkflow batchWorkflow = batchWorkflowFactory.create(999L);
assertThat(batchWorkflow).isNotNull();
BatchWorkflowContext batchWorkflowContext = new BatchWorkflowContext();
PublicKey publicKey = mock(PublicKey.class);
batchWorkflowContext.setRecipientKey(publicKey);
EncryptedTransaction encryptedTransaction = mock(EncryptedTransaction.class);
batchWorkflowContext.setEncryptedTransaction(encryptedTransaction);
batchWorkflowContext.setEncodedPayload(mock(EncodedPayload.class));
when(enclave.status()).thenReturn(Service.Status.STARTED);
assertThat(batchWorkflow.execute(batchWorkflowContext)).isFalse();
assertThat(batchWorkflow.getPublishedMessageCount()).isZero();
verify(enclave).status();
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/test/java/com/quorum/tessera/api/exception/MandatoryRecipientsNotAvailableExceptionMapperTest.java<|end_filename|>
package com.quorum.tessera.api.exception;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.transaction.exception.MandatoryRecipientsNotAvailableException;
import jakarta.ws.rs.core.Response;
import org.junit.Test;
public class MandatoryRecipientsNotAvailableExceptionMapperTest {
private MandatoryRecipientsNotAvailableExceptionMapper instance =
new MandatoryRecipientsNotAvailableExceptionMapper();
@Test
public void toResponse() {
final MandatoryRecipientsNotAvailableException ex =
new MandatoryRecipientsNotAvailableException("OUCH");
final Response result = instance.toResponse(ex);
assertThat(result).isNotNull();
final String message = (String) result.getEntity();
assertThat(message).isEqualTo("OUCH");
assertThat(result.getStatus()).isEqualTo(400);
}
}
<|start_filename|>tessera-jaxrs/transaction-jaxrs/src/main/java/com/quorum/tessera/q2t/internal/PrivacyGroupPublisherProvider.java<|end_filename|>
package com.quorum.tessera.q2t.internal;
import com.quorum.tessera.config.Config;
import com.quorum.tessera.config.ConfigFactory;
import com.quorum.tessera.discovery.Discovery;
import com.quorum.tessera.jaxrs.client.ClientFactory;
import com.quorum.tessera.privacygroup.publish.PrivacyGroupPublisher;
import jakarta.ws.rs.client.Client;
public class PrivacyGroupPublisherProvider {
public static PrivacyGroupPublisher provider() {
Discovery discovery = Discovery.create();
Config config = ConfigFactory.create().getConfig();
Client client = new ClientFactory().buildFrom(config.getP2PServerConfig());
return new RestPrivacyGroupPublisher(discovery, client);
}
}
<|start_filename|>tessera-data/src/test/java/com/quorum/tessera/data/staging/internal/StagingEntityDAOTest.java<|end_filename|>
package com.quorum.tessera.data.staging.internal;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.data.TestConfig;
import com.quorum.tessera.data.Utils;
import com.quorum.tessera.data.staging.StagingAffectedTransaction;
import com.quorum.tessera.data.staging.StagingEntityDAO;
import com.quorum.tessera.data.staging.StagingTransaction;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.enclave.PrivacyMode;
import jakarta.persistence.*;
import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.Root;
import java.util.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.mockito.MockedStatic;
@RunWith(Parameterized.class)
public class StagingEntityDAOTest {
private EntityManagerFactory entityManagerFactory;
private StagingEntityDAO stagingEntityDAO;
private Map<String, StagingTransaction> transactions;
private TestConfig testConfig;
private MockedStatic<PayloadEncoder> mockedStaticPayloadEncoder;
private PayloadEncoder payloadEncoder;
private byte[] payloadData;
private EncodedPayload encodedPayload;
final EncodedPayloadCodec CODEC = EncodedPayloadCodec.current();
public StagingEntityDAOTest(TestConfig testConfig) {
this.testConfig = testConfig;
}
@Before
public void beforeTest() throws Exception {
mockedStaticPayloadEncoder = mockStatic(PayloadEncoder.class);
payloadData = "I LOve Sparrows".getBytes();
encodedPayload = mock(EncodedPayload.class);
payloadEncoder = mock(PayloadEncoder.class);
mockedStaticPayloadEncoder
.when(() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)))
.thenReturn(payloadEncoder);
when(payloadEncoder.decode(payloadData)).thenReturn(encodedPayload);
when(payloadEncoder.encode(encodedPayload)).thenReturn(payloadData);
Map properties = new HashMap();
properties.put("jakarta.persistence.jdbc.url", testConfig.getUrl());
properties.put("jakarta.persistence.jdbc.user", "junit");
properties.put("jakarta.persistence.jdbc.password", "");
properties.put(
"eclipselink.logging.logger", "org.eclipse.persistence.logging.slf4j.SLF4JLogger");
properties.put("eclipselink.logging.level", "FINE");
properties.put("eclipselink.logging.parameters", "true");
properties.put("eclipselink.logging.level.sql", "FINE");
properties.put("jakarta.persistence.schema-generation.database.action", "drop-and-create");
properties.put("eclipselink.cache.shared.default", "false");
properties.put(
"eclipselink.session.customizer", "com.quorum.tessera.eclipselink.AtomicLongSequence");
entityManagerFactory = Persistence.createEntityManagerFactory("tessera-recover", properties);
stagingEntityDAO = new StagingEntityDAOImpl(entityManagerFactory);
transactions = createFixtures();
}
@After
public void afterTest() throws Exception {
EntityManager entityManager = entityManagerFactory.createEntityManager();
entityManager.getTransaction().begin();
// entityManager.createQuery("delete from StagingTransactionVersion").executeUpdate();
entityManager.createQuery("delete from StagingAffectedTransaction").executeUpdate();
// entityManager.createQuery("delete from StagingRecipient").executeUpdate();
entityManager.createQuery("delete from StagingTransaction").executeUpdate();
entityManager.getTransaction().commit();
transactions.clear();
mockedStaticPayloadEncoder.close();
}
@Test
public void updateStageForBatch() {
final long validationStage = new Random().nextLong();
final int batchSize = 1;
int results = stagingEntityDAO.updateStageForBatch(batchSize, validationStage);
assertThat(results).isEqualTo(batchSize);
EntityManager entityManager = entityManagerFactory.createEntityManager();
CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class);
Root<StagingTransaction> root = criteriaQuery.from(StagingTransaction.class);
criteriaQuery
.select(criteriaBuilder.count(root))
.where(criteriaBuilder.equal(root.get("validationStage"), validationStage));
Long countPending =
entityManager
.createQuery(criteriaQuery)
.setParameter("stage", validationStage)
.getSingleResult();
assertThat(countPending).isEqualTo((long) batchSize);
}
@Test
public void testStagingQuery() {
final List<StagingTransaction> preStaging =
stagingEntityDAO.retrieveTransactionBatchOrderByStageAndHash(0, Integer.MAX_VALUE);
final AtomicLong stage = new AtomicLong(0);
final int batchSize = 10;
assertThat(preStaging.size()).isEqualTo(7);
preStaging.forEach(
stagingTransaction -> {
assertThat(stagingTransaction.getValidationStage()).isNull();
});
while (stagingEntityDAO.updateStageForBatch(batchSize, stage.incrementAndGet()) != 0) {}
final List<StagingTransaction> verifiedTransactions =
stagingEntityDAO.retrieveTransactionBatchOrderByStageAndHash(0, Integer.MAX_VALUE);
// First tx to process will need to be Tx1
assertThat(verifiedTransactions.get(0).getValidationStage()).isEqualTo(1L);
assertThat(verifiedTransactions.get(0).getId()).isEqualTo(1L);
// Then tx2 (2 versions) and 3
assertThat(verifiedTransactions.get(1).getValidationStage()).isEqualTo(2L);
assertThat(verifiedTransactions.get(2).getValidationStage()).isEqualTo(2L);
assertThat(verifiedTransactions.get(3).getValidationStage()).isEqualTo(2L);
// Then transaction 4 as its affected tx (3) had been validated
assertThat(verifiedTransactions.get(4).getValidationStage()).isEqualTo(3L);
assertThat(verifiedTransactions.get(4).getId()).isEqualTo(4L);
// Then transaction 7 as all of its affected txs (1 and 4) had been validated
assertThat(verifiedTransactions.get(5).getValidationStage()).isEqualTo(4L);
assertThat(verifiedTransactions.get(5).getId()).isEqualTo(7L);
// Transaction 5 can never be validated as it depends on an unknown tx6
assertThat(verifiedTransactions.get(6).getValidationStage()).isNull();
assertThat(verifiedTransactions.get(6).getId()).isEqualTo(5L);
final List<StagingTransaction> allTransactions =
stagingEntityDAO.retrieveTransactionBatchOrderByStageAndHash(0, Integer.MAX_VALUE);
assertThat(allTransactions.stream().filter(et -> et.getValidationStage() == null).count())
.isEqualTo(1);
assertThat(stagingEntityDAO.countAll()).isEqualTo(7);
assertThat(stagingEntityDAO.countStaged()).isEqualTo(6);
assertThat(stagingEntityDAO.countAllAffected()).isEqualTo(7);
}
@Test
public void paginationCanCauseDifferentStagingValueButOrderShouldBeMaintained() {
final List<StagingTransaction> preStaging =
stagingEntityDAO.retrieveTransactionBatchOrderByStageAndHash(0, Integer.MAX_VALUE);
final AtomicLong stage = new AtomicLong(0);
final int batchSize = 1;
assertThat(preStaging.size()).isEqualTo(7);
preStaging.forEach(
stagingTransaction -> {
assertThat(stagingTransaction.getValidationStage()).isNull();
});
while (stagingEntityDAO.updateStageForBatch(batchSize, stage.incrementAndGet()) != 0) {}
final List<StagingTransaction> verifiedTransactions =
stagingEntityDAO.retrieveTransactionBatchOrderByStageAndHash(0, Integer.MAX_VALUE);
// Order increase by one due to pagination
assertThat(verifiedTransactions.get(0).getValidationStage()).isEqualTo(1L);
assertThat(verifiedTransactions.get(1).getValidationStage()).isEqualTo(2L);
assertThat(verifiedTransactions.get(2).getValidationStage()).isEqualTo(3L);
assertThat(verifiedTransactions.get(3).getValidationStage()).isEqualTo(4L);
assertThat(verifiedTransactions.get(4).getValidationStage()).isEqualTo(5L);
assertThat(verifiedTransactions.get(5).getValidationStage()).isEqualTo(6L);
assertThat(verifiedTransactions.get(6).getValidationStage()).isNull();
final List<String> possibleOrdering =
Arrays.asList("1,21,22,3,4,7,5", "1,3,21,22,4,7,5", "1,3,4,21,22,7,5", "1,3,4,7,21,22,5");
final String order =
verifiedTransactions.stream()
.map(StagingTransaction::getId)
.map(String::valueOf)
.collect(Collectors.joining(","));
assertThat(possibleOrdering.contains(order));
final List<StagingTransaction> allTransactions =
stagingEntityDAO.retrieveTransactionBatchOrderByStageAndHash(0, Integer.MAX_VALUE);
assertThat(allTransactions.stream().filter(et -> et.getValidationStage() == null).count())
.isEqualTo(1);
assertThat(stagingEntityDAO.countAll()).isEqualTo(7);
assertThat(stagingEntityDAO.countStaged()).isEqualTo(6);
assertThat(stagingEntityDAO.countAllAffected()).isEqualTo(7);
}
@Test
public void testRetrieveTransactionByHash() {
final String txnHash7 = transactions.get("TXN7").getHash();
final Optional<StagingTransaction> stagingTransaction =
stagingEntityDAO.retrieveByHash(txnHash7);
assertThat(stagingTransaction).isPresent();
assertThat(stagingTransaction.get().getAffectedContractTransactions()).hasSize(2);
}
@Test
public void testUpdate() {
final String txnHash7 = transactions.get("TXN7").getHash();
final Optional<StagingTransaction> stagingTransaction =
stagingEntityDAO.retrieveByHash(txnHash7);
assertThat(stagingTransaction).isPresent();
StagingTransaction st = stagingTransaction.get();
st.setValidationStage(123L);
stagingEntityDAO.update(st);
final Optional<StagingTransaction> stagingTransactionAfterUpdate =
stagingEntityDAO.retrieveByHash(txnHash7);
assertThat(stagingTransactionAfterUpdate).isPresent();
assertThat(stagingTransactionAfterUpdate.get().getValidationStage()).isEqualTo(123L);
}
@Test
public void testSave() {
String txHash = Utils.createHashStr();
final StagingTransaction stagingTransaction = new StagingTransaction();
stagingTransaction.setHash(txHash);
stagingTransaction.setPrivacyMode(PrivacyMode.STANDARD_PRIVATE);
stagingTransaction.setEncodedPayloadCodec(CODEC);
stagingTransaction.setPayload(payloadData);
final StagingAffectedTransaction affected1 = new StagingAffectedTransaction();
affected1.setSourceTransaction(stagingTransaction);
affected1.setHash("affected1");
final StagingAffectedTransaction affected2 = new StagingAffectedTransaction();
affected2.setId(123L);
affected2.setSourceTransaction(stagingTransaction);
affected2.setHash("affected2");
stagingTransaction.setAffectedContractTransactions(
Stream.of(affected1, affected2).collect(Collectors.toSet()));
stagingEntityDAO.save(stagingTransaction);
assertThat(stagingEntityDAO.retrieveByHash(txHash)).isPresent();
final StagingTransaction retrieved = stagingEntityDAO.retrieveByHash(txHash).get();
assertThat(retrieved).isEqualTo(stagingTransaction);
assertThat(retrieved.getValidationStage()).isNull();
assertThat(retrieved.getPrivacyMode()).isEqualTo(PrivacyMode.STANDARD_PRIVATE);
assertThat(retrieved.getAffectedContractTransactions())
.containsExactlyInAnyOrder(affected1, affected2);
retrieved.getAffectedContractTransactions().forEach(a -> assertThat(a.getId()).isNotNull());
}
public Map<String, StagingTransaction> createFixtures() {
final EntityManager entityManager = entityManagerFactory.createEntityManager();
entityManager.getTransaction().begin();
final String txnHash1 = Utils.createHashStr();
final StagingTransaction stTransaction1 = new StagingTransaction();
stTransaction1.setId(1L);
stTransaction1.setHash(txnHash1);
stTransaction1.setEncodedPayloadCodec(CODEC);
entityManager.persist(stTransaction1);
final String txnHash2 = Utils.createHashStr();
final StagingTransaction stTransaction2a = new StagingTransaction();
stTransaction2a.setId(21L);
stTransaction2a.setHash(txnHash2);
stTransaction2a.setEncodedPayloadCodec(CODEC);
StagingAffectedTransaction stAffectedContractTransaction21a = new StagingAffectedTransaction();
stAffectedContractTransaction21a.setHash(txnHash1);
stAffectedContractTransaction21a.setSourceTransaction(stTransaction2a);
stTransaction2a.getAffectedContractTransactions().add(stAffectedContractTransaction21a);
entityManager.persist(stTransaction2a);
// Another version of transaction 2
final StagingTransaction stTransaction2b = new StagingTransaction();
stTransaction2b.setId(22L);
stTransaction2b.setHash(txnHash2);
stTransaction2b.setEncodedPayloadCodec(CODEC);
StagingAffectedTransaction stAffectedContractTransaction21b = new StagingAffectedTransaction();
stAffectedContractTransaction21b.setHash(txnHash1);
stAffectedContractTransaction21b.setSourceTransaction(stTransaction2b);
stTransaction2b.getAffectedContractTransactions().add(stAffectedContractTransaction21b);
entityManager.persist(stTransaction2b);
final String txnHash4 = Utils.createHashStr();
// we are storing a transaction TXN4 which depends on another transaction TXN3 (which has not
// been received yet)
final String txnHash3 = Utils.createHashStr();
final StagingTransaction stTransaction4 = new StagingTransaction();
stTransaction4.setId(4L);
stTransaction4.setHash(txnHash4);
stTransaction4.setEncodedPayloadCodec(CODEC);
StagingAffectedTransaction stAffectedContractTransaction43 = new StagingAffectedTransaction();
stAffectedContractTransaction43.setHash(txnHash3);
stAffectedContractTransaction43.setSourceTransaction(stTransaction4);
stTransaction4.getAffectedContractTransactions().add(stAffectedContractTransaction43);
entityManager.persist(stTransaction4);
final StagingTransaction stTransaction3 = new StagingTransaction();
stTransaction3.setHash(txnHash3);
stTransaction3.setId(3L);
stTransaction3.setEncodedPayloadCodec(CODEC);
StagingAffectedTransaction stAffectedContractTransaction31 = new StagingAffectedTransaction();
stAffectedContractTransaction31.setHash(txnHash1);
stAffectedContractTransaction31.setSourceTransaction(stTransaction3);
stTransaction3.getAffectedContractTransactions().add(stAffectedContractTransaction31);
entityManager.persist(stTransaction3);
final String txnHash5 = Utils.createHashStr();
// TXN5 is a unresolvable transaction as it depends on TXN6 which is never received
final String txnHash6 = Utils.createHashStr();
final StagingTransaction stTransaction5 = new StagingTransaction();
stTransaction5.setHash(txnHash5);
stTransaction5.setId(5L);
stTransaction5.setEncodedPayloadCodec(CODEC);
StagingAffectedTransaction stAffectedContractTransaction56 = new StagingAffectedTransaction();
stAffectedContractTransaction56.setHash(txnHash6);
stAffectedContractTransaction56.setSourceTransaction(stTransaction5);
stTransaction5.getAffectedContractTransactions().add(stAffectedContractTransaction56);
entityManager.persist(stTransaction5);
final String txnHash7 = Utils.createHashStr();
// TXN7 depends on TXN1 and TXN3
final StagingTransaction stTransaction7 = new StagingTransaction();
stTransaction7.setHash(txnHash7);
stTransaction7.setId(7L);
stTransaction7.setEncodedPayloadCodec(CODEC);
StagingAffectedTransaction stAffectedContractTransaction71 = new StagingAffectedTransaction();
stAffectedContractTransaction71.setHash(txnHash1);
stAffectedContractTransaction71.setSourceTransaction(stTransaction7);
stTransaction7.getAffectedContractTransactions().add(stAffectedContractTransaction71);
StagingAffectedTransaction stAffectedContractTransaction74 = new StagingAffectedTransaction();
stAffectedContractTransaction74.setHash(txnHash4);
stAffectedContractTransaction74.setSourceTransaction(stTransaction7);
stTransaction7.getAffectedContractTransactions().add(stAffectedContractTransaction74);
entityManager.persist(stTransaction7);
entityManager.getTransaction().commit();
Map<String, StagingTransaction> transactions = new HashMap<>();
transactions.put("TXN1", stTransaction1);
transactions.put("TXN2A", stTransaction2a);
transactions.put("TXN2B", stTransaction2b);
transactions.put("TXN3", stTransaction3);
transactions.put("TXN4", stTransaction4);
transactions.put("TXN5", stTransaction5);
transactions.put("TXN7", stTransaction7);
return transactions;
}
@Test
public void createStagingEntityDAOFromServiceLoader() {
try (var mockedServiceLoader = mockStatic(ServiceLoader.class)) {
ServiceLoader serviceLoader = mock(ServiceLoader.class);
when(serviceLoader.findFirst()).thenReturn(Optional.of(mock(StagingEntityDAO.class)));
mockedServiceLoader
.when(() -> ServiceLoader.load(StagingEntityDAO.class))
.thenReturn(serviceLoader);
StagingEntityDAO.create();
verify(serviceLoader).findFirst();
verifyNoMoreInteractions(serviceLoader);
mockedServiceLoader.verify(() -> ServiceLoader.load(StagingEntityDAO.class));
mockedServiceLoader.verifyNoMoreInteractions();
}
}
@Parameterized.Parameters(name = "DB {0}")
public static Collection<TestConfig> connectionDetails() {
return List.of(TestConfig.values());
}
}
<|start_filename|>cli/config-cli/src/main/java/module-info.java<|end_filename|>
module tessera.cli.config {
requires java.management;
requires jakarta.validation;
requires jakarta.xml.bind;
requires info.picocli;
requires org.slf4j;
requires tessera.cli.api;
requires tessera.config;
requires tessera.encryption.api;
requires tessera.keygeneration;
requires tessera.shared;
uses com.quorum.tessera.cli.keypassresolver.KeyPasswordResolver;
uses com.quorum.tessera.passwords.PasswordReaderFactory;
uses com.quorum.tessera.key.generation.KeyGeneratorFactory;
uses com.quorum.tessera.config.cli.KeyDataMarshaller;
opens com.quorum.tessera.config.cli to
info.picocli;
exports com.quorum.tessera.config.cli;
provides com.quorum.tessera.config.cli.KeyDataMarshaller with
com.quorum.tessera.config.cli.DefaultKeyDataMarshaller;
provides com.quorum.tessera.config.cli.KeyVaultHandler with
com.quorum.tessera.config.cli.DispatchingKeyVaultHandler;
}
<|start_filename|>tests/acceptance-test/src/test/java/com/quorum/tessera/test/rest/MetricsIT.java<|end_filename|>
package com.quorum.tessera.test.rest;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.config.Config;
import com.quorum.tessera.config.ServerConfig;
import com.quorum.tessera.jaxrs.client.ClientFactory;
import com.quorum.tessera.test.Party;
import com.quorum.tessera.test.PartyHelper;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.core.Response;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.junit.Test;
public class MetricsIT {
@Test
public void metrics() {
final PartyHelper partyHelper = PartyHelper.create();
Set<ServerConfig> serverConfigs =
partyHelper
.getParties()
.map(Party::getConfig)
.map(Config::getServerConfigs)
.flatMap(List::stream)
.collect(Collectors.toUnmodifiableSet());
ClientFactory clientFactory = new ClientFactory();
for (ServerConfig serverConfig : serverConfigs) {
Client c = clientFactory.buildFrom(serverConfig);
Response response = c.target(serverConfig.getServerUri()).path("metrics").request().get();
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
}
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/test/java/com/quorum/tessera/p2p/TransactionResourceTest.java<|end_filename|>
package com.quorum.tessera.p2p;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.p2p.recovery.ResendBatchRequest;
import com.quorum.tessera.p2p.resend.ResendRequest;
import com.quorum.tessera.recovery.resend.ResendBatchResponse;
import com.quorum.tessera.recovery.workflow.BatchResendManager;
import com.quorum.tessera.recovery.workflow.LegacyResendManager;
import com.quorum.tessera.transaction.TransactionManager;
import jakarta.ws.rs.core.Response;
import java.util.Base64;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.MockedStatic;
public class TransactionResourceTest {
private TransactionResource transactionResource;
private TransactionManager transactionManager;
private BatchResendManager batchResendManager;
private PayloadEncoder payloadEncoder;
private LegacyResendManager legacyResendManager;
private final MockedStatic<PayloadEncoder> payloadEncoderFactoryFunction =
mockStatic(PayloadEncoder.class);
@Before
public void onSetup() {
transactionManager = mock(TransactionManager.class);
batchResendManager = mock(BatchResendManager.class);
payloadEncoder = mock(PayloadEncoder.class);
legacyResendManager = mock(LegacyResendManager.class);
payloadEncoder = mock(PayloadEncoder.class);
payloadEncoderFactoryFunction
.when(() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)))
.thenReturn(payloadEncoder);
transactionResource =
new TransactionResource(transactionManager, batchResendManager, legacyResendManager);
}
@After
public void onTearDown() {
try {
verifyNoMoreInteractions(
transactionManager, batchResendManager, payloadEncoder, legacyResendManager);
payloadEncoderFactoryFunction.verifyNoMoreInteractions();
} finally {
payloadEncoderFactoryFunction.close();
}
}
@Test
public void push() {
final byte[] someData = "SomeData".getBytes();
final EncodedPayload payload = mock(EncodedPayload.class);
when(payloadEncoder.decode(someData)).thenReturn(payload);
final Response result = transactionResource.push(someData, List.of("4.0,5.0"));
assertThat(result.getStatus()).isEqualTo(201);
assertThat(result.hasEntity()).isTrue();
verify(transactionManager).storePayload(payload);
verify(payloadEncoder).decode(someData);
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
}
@Test
public void resend() {
ResendRequest resendRequest = new ResendRequest();
resendRequest.setType("ALL");
resendRequest.setPublicKey(Base64.getEncoder().encodeToString("JUNIT".getBytes()));
EncodedPayload payload = mock(EncodedPayload.class);
com.quorum.tessera.recovery.resend.ResendResponse resendResponse =
mock(com.quorum.tessera.recovery.resend.ResendResponse.class);
when(resendResponse.getPayload()).thenReturn(payload);
when(legacyResendManager.resend(any(com.quorum.tessera.recovery.resend.ResendRequest.class)))
.thenReturn(resendResponse);
when(payloadEncoder.encode(payload)).thenReturn("SUCCESS".getBytes());
Response result = transactionResource.resend(resendRequest);
assertThat(result.getStatus()).isEqualTo(200);
assertThat(result.getEntity()).isEqualTo("SUCCESS".getBytes());
verify(payloadEncoder).encode(payload);
verify(legacyResendManager).resend(any(com.quorum.tessera.recovery.resend.ResendRequest.class));
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
}
@Test
public void resendBatch() {
ResendBatchRequest incoming = new ResendBatchRequest();
incoming.setPublicKey("someKey");
incoming.setBatchSize(1);
ResendBatchResponse resendResponse = ResendBatchResponse.from(1);
when(batchResendManager.resendBatch(any())).thenReturn(resendResponse);
Response result = transactionResource.resendBatch(incoming);
assertThat(result.getStatus()).isEqualTo(200);
com.quorum.tessera.p2p.recovery.ResendBatchResponse convertedResponse =
(com.quorum.tessera.p2p.recovery.ResendBatchResponse) result.getEntity();
assertThat(convertedResponse.getTotal()).isEqualTo(1);
ArgumentCaptor<com.quorum.tessera.recovery.resend.ResendBatchRequest> captor =
ArgumentCaptor.forClass(com.quorum.tessera.recovery.resend.ResendBatchRequest.class);
verify(batchResendManager).resendBatch(captor.capture());
com.quorum.tessera.recovery.resend.ResendBatchRequest convertedRequest = captor.getValue();
assertThat(convertedRequest.getPublicKey()).isEqualTo("someKey");
assertThat(convertedRequest.getBatchSize()).isEqualTo(1);
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/main/java/com/quorum/tessera/api/BesuReceiveResponse.java<|end_filename|>
package com.quorum.tessera.api;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.xml.bind.annotation.XmlMimeType;
/**
* Model representation of a JSON body on outgoing HTTP requests
*
* <p>Contains a Base64 encoded string that is the decrypting payload of a transaction
*/
public class BesuReceiveResponse {
@Schema(description = "decrypted ciphertext payload", type = "string", format = "base64")
@XmlMimeType("base64Binary")
private byte[] payload;
@Schema(description = "public key of the transaction sender", format = "base64")
private String senderKey;
@Schema(description = "privacy group id of the transaction", format = "base64")
private String privacyGroupId;
public BesuReceiveResponse() {}
public byte[] getPayload() {
return payload;
}
public void setPayload(final byte[] payload) {
this.payload = payload;
}
public String getSenderKey() {
return senderKey;
}
public void setSenderKey(String senderKey) {
this.senderKey = senderKey;
}
public String getPrivacyGroupId() {
return privacyGroupId;
}
public void setPrivacyGroupId(String privacyGroupId) {
this.privacyGroupId = privacyGroupId;
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/main/java/com/quorum/tessera/api/common/BaseResource.java<|end_filename|>
package com.quorum.tessera.api.common;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Returns a 200 for GET requests made to http://server and http://server/
*
* <p>This is required by Kubernetes Ingress load balancers which require '/' return 200 for their
* health-checks. https://github.com/jpmorganchase/tessera/issues/1064
*/
@io.swagger.v3.oas.annotations.Hidden
@Path("/")
public class BaseResource {
private static final Logger LOGGER = LoggerFactory.getLogger(BaseResource.class);
@GET
@Produces(MediaType.TEXT_PLAIN)
public Response get() {
LOGGER.debug("GET /");
return Response.ok().build();
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/main/java/com/quorum/tessera/p2p/recovery/RestRecoveryClient.java<|end_filename|>
package com.quorum.tessera.p2p.recovery;
import com.quorum.tessera.p2p.resend.ResendRequest;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.util.Objects;
public class RestRecoveryClient implements RecoveryClient {
private final Client client;
public RestRecoveryClient(final Client client) {
this.client = Objects.requireNonNull(client);
}
@Override
public boolean makeResendRequest(final String targetUrl, final ResendRequest request) {
final Entity<ResendRequest> outboundEntity = Entity.entity(request, MediaType.APPLICATION_JSON);
try (Response response =
client.target(targetUrl).path("/resend").request().post(outboundEntity)) {
return Response.Status.OK.getStatusCode() == response.getStatus();
}
}
@Override
public boolean pushBatch(String targetUrl, PushBatchRequest pushBatchRequest) {
final Response response =
client
.target(targetUrl)
.path("/pushBatch")
.request()
.post(Entity.entity(pushBatchRequest, MediaType.APPLICATION_JSON));
return Response.Status.OK.getStatusCode() == response.getStatus();
}
@Override
public ResendBatchResponse makeBatchResendRequest(String targetUrl, ResendBatchRequest request) {
final Response response =
client
.target(targetUrl)
.path("/resendBatch")
.request()
.post(Entity.entity(request, MediaType.APPLICATION_JSON));
if (Response.Status.OK.getStatusCode() == response.getStatus()) {
return response.readEntity(ResendBatchResponse.class);
}
return null;
}
}
<|start_filename|>shared/src/main/java/com/quorum/tessera/version/CBORSupportVersion.java<|end_filename|>
package com.quorum.tessera.version;
public class CBORSupportVersion implements ApiVersion {
public static final String API_VERSION_5 = "5.0";
@Override
public String getVersion() {
return API_VERSION_5;
}
}
<|start_filename|>tessera-recover/src/test/java/com/quorum/tessera/recovery/workflow/internal/LegacyResendManagerProviderTest.java<|end_filename|>
package com.quorum.tessera.recovery.workflow.internal;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.mockStatic;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.discovery.Discovery;
import com.quorum.tessera.enclave.Enclave;
import com.quorum.tessera.recovery.workflow.LegacyResendManager;
import com.quorum.tessera.transaction.publish.PayloadPublisher;
import org.junit.Test;
public class LegacyResendManagerProviderTest {
@Test
public void provider() {
try (var enclaveMockedStatic = mockStatic(Enclave.class);
var encryptedTransactionDAOMockedStatic = mockStatic(EncryptedTransactionDAO.class);
var payloadPublisherMockedStatic = mockStatic(PayloadPublisher.class);
var discoveryMockedStatic = mockStatic(Discovery.class)) {
enclaveMockedStatic.when(Enclave::create).thenReturn(mock(Enclave.class));
encryptedTransactionDAOMockedStatic
.when(EncryptedTransactionDAO::create)
.thenReturn(mock(EncryptedTransactionDAO.class));
payloadPublisherMockedStatic
.when(PayloadPublisher::create)
.thenReturn(mock(PayloadPublisher.class));
discoveryMockedStatic.when(Discovery::create).thenReturn(mock(Discovery.class));
LegacyResendManager legacyResendManager = LegacyResendManagerProvider.provider();
assertThat(legacyResendManager).isNotNull();
enclaveMockedStatic.verify(Enclave::create);
enclaveMockedStatic.verifyNoMoreInteractions();
encryptedTransactionDAOMockedStatic.verify(EncryptedTransactionDAO::create);
encryptedTransactionDAOMockedStatic.verifyNoMoreInteractions();
payloadPublisherMockedStatic.verify(PayloadPublisher::create);
discoveryMockedStatic.verify(Discovery::create);
discoveryMockedStatic.verifyNoMoreInteractions();
}
}
@Test
public void defaultConstructorForCoverage() {
assertThat(new LegacyResendManagerProvider()).isNotNull();
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/test/java/com/quorum/tessera/api/constraint/RequestPrivacyValidatorTest.java<|end_filename|>
package com.quorum.tessera.api.constraint;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import com.quorum.tessera.api.SendRequest;
import com.quorum.tessera.api.SendSignedRequest;
import jakarta.validation.ConstraintValidatorContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class RequestPrivacyValidatorTest {
private RequestPrivacyValidator validator = new RequestPrivacyValidator();
private ConstraintValidatorContext context;
private ConstraintValidatorContext.ConstraintViolationBuilder builder;
@Before
public void init() {
context = mock(ConstraintValidatorContext.class);
builder = mock(ConstraintValidatorContext.ConstraintViolationBuilder.class);
when(builder.addConstraintViolation()).thenReturn(context);
when(context.buildConstraintViolationWithTemplate(any())).thenReturn(builder);
}
@After
public void tearDown() {
verifyNoMoreInteractions(context);
verifyNoMoreInteractions(builder);
}
@Test
public void testPrivacyValidationOnSendRequest() {
SendRequest request = new SendRequest();
request.setPrivacyFlag(-1000);
assertThat(validator.isValid(request, context)).isTrue();
request.setPrivacyFlag(0);
assertThat(validator.isValid(request, context)).isTrue();
request.setPrivacyFlag(1);
assertThat(validator.isValid(request, context)).isTrue();
request.setPrivacyFlag(2);
assertThat(validator.isValid(request, context)).isTrue();
request.setPrivacyFlag(3);
assertThat(validator.isValid(request, context)).isFalse();
verify(context).buildConstraintViolationWithTemplate("Exec hash missing");
verify(builder).addConstraintViolation();
request.setExecHash("execHash");
assertThat(validator.isValid(request, context)).isTrue();
}
@Test
public void testPrivacyValidationOnSendSignedRequest() {
SendSignedRequest request = new SendSignedRequest();
request.setPrivacyFlag(-1000);
assertThat(validator.isValid(request, context)).isTrue();
request.setPrivacyFlag(0);
assertThat(validator.isValid(request, context)).isTrue();
request.setPrivacyFlag(1);
assertThat(validator.isValid(request, context)).isTrue();
request.setPrivacyFlag(2);
assertThat(validator.isValid(request, context)).isTrue();
request.setPrivacyFlag(3);
assertThat(validator.isValid(request, context)).isFalse();
verify(context).buildConstraintViolationWithTemplate("Exec hash missing");
verify(builder).addConstraintViolation();
request.setExecHash("execHash");
assertThat(validator.isValid(request, context)).isTrue();
}
@Test
public void testWrongUsage() {
Object someObject = new Object();
assertThat(validator.isValid(someObject, context)).isFalse();
verify(context)
.buildConstraintViolationWithTemplate(
"Invalid usage. This validator can only be apply to SendRequest or SendSignedRequest");
verify(builder).addConstraintViolation();
}
}
<|start_filename|>config/src/test/java/com/quorum/tessera/config/ObjectFactoryTest.java<|end_filename|>
package com.quorum.tessera.config;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import jakarta.xml.bind.JAXBElement;
import org.junit.Before;
import org.junit.Test;
public class ObjectFactoryTest {
private ObjectFactory objectFactory;
@Before
public void setUp() {
this.objectFactory = new ObjectFactory();
}
@Test
public void createConfiguration() {
final Config configuration = mock(Config.class);
final JAXBElement<Config> element = objectFactory.createConfiguration(configuration);
assertThat(element).isNotNull();
assertThat(element.getValue()).isSameAs(configuration);
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/keypairs/FilesystemKeyPair.java<|end_filename|>
package com.quorum.tessera.config.keypairs;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.quorum.tessera.config.KeyDataConfig;
import com.quorum.tessera.config.adapters.PathAdapter;
import com.quorum.tessera.config.constraints.ValidBase64;
import com.quorum.tessera.config.constraints.ValidContent;
import com.quorum.tessera.config.constraints.ValidPath;
import com.quorum.tessera.config.keys.KeyEncryptor;
import com.quorum.tessera.config.util.JaxbUtil;
import com.quorum.tessera.io.IOCallback;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import jakarta.validation.constraints.Size;
import jakarta.xml.bind.annotation.XmlElement;
import jakarta.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import java.nio.file.Files;
import java.nio.file.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FilesystemKeyPair implements ConfigKeyPair {
private static final Logger LOGGER = LoggerFactory.getLogger(FilesystemKeyPair.class);
@ValidContent(
minLines = 1,
maxLines = 1,
message = "file expected to contain a single non empty value")
@NotNull
@ValidPath(checkExists = true, message = "File does not exist")
@XmlElement
@XmlJavaTypeAdapter(PathAdapter.class)
private final Path publicKeyPath;
@ValidContent(minLines = 1, message = "file expected to contain at least one line")
@NotNull
@ValidPath(checkExists = true, message = "File does not exist")
@XmlElement
@XmlJavaTypeAdapter(PathAdapter.class)
private final Path privateKeyPath;
private InlineKeypair inlineKeypair;
private char[] password;
private final KeyEncryptor keyEncryptor;
// public FilesystemKeyPair(final Path publicKeyPath, final Path privateKeyPath) {
// this(
// publicKeyPath,
// privateKeyPath,
// KeyEncryptorFactory.newFactory()
// .create(
// new EncryptorConfig() {
// {
// setType(EncryptorType.NACL);
// }
// }));
// }
public FilesystemKeyPair(
final Path publicKeyPath, final Path privateKeyPath, final KeyEncryptor keyEncryptor) {
this.publicKeyPath = publicKeyPath;
this.privateKeyPath = privateKeyPath;
this.keyEncryptor = keyEncryptor;
try {
loadKeys();
} catch (final Exception ex) {
// silently discard errors as these get picked up by the validator
LOGGER.debug("Unable to read key files", ex);
}
}
@Override
@Size(min = 1)
@ValidBase64(message = "Invalid Base64 key provided")
public String getPublicKey() {
if (this.inlineKeypair == null) {
return null;
}
return this.inlineKeypair.getPublicKey();
}
@Override
@Size(min = 1)
@ValidBase64(message = "Invalid Base64 key provided")
@Pattern(
regexp = "^((?!NACL_FAILURE).)*$",
message =
"Could not decrypt the private key with the provided password, please double check the passwords provided")
public String getPrivateKey() {
if (this.inlineKeypair == null) {
return null;
}
return this.inlineKeypair.getPrivateKey();
}
@Override
public void withPassword(final char[] password) {
this.password = password;
if (this.inlineKeypair != null) {
this.inlineKeypair.withPassword(this.password);
}
}
@Override
public char[] getPassword() {
return this.password;
}
public Path getPublicKeyPath() {
return publicKeyPath;
}
public Path getPrivateKeyPath() {
return privateKeyPath;
}
public InlineKeypair getInlineKeypair() {
return inlineKeypair;
}
private void loadKeys() {
this.inlineKeypair =
new InlineKeypair(
IOCallback.execute(() -> new String(Files.readAllBytes(this.publicKeyPath), UTF_8)),
JaxbUtil.unmarshal(
IOCallback.execute(() -> Files.newInputStream(privateKeyPath)),
KeyDataConfig.class),
keyEncryptor);
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/util/jaxb/MarshallerBuilder.java<|end_filename|>
package com.quorum.tessera.config.util.jaxb;
import com.quorum.tessera.config.util.JaxbUtil;
import jakarta.xml.bind.JAXBContext;
import jakarta.xml.bind.Marshaller;
public class MarshallerBuilder {
private MarshallerBuilder() {}
public static MarshallerBuilder create() {
return new MarshallerBuilder();
}
private boolean beanvalidation = true;
private MediaType mediaType = MediaType.JSON;
public MarshallerBuilder withoutBeanValidation() {
this.beanvalidation = false;
return this;
}
public MarshallerBuilder withXmlMediaType() {
this.mediaType = MediaType.XML;
return this;
}
public Marshaller build() {
return JaxbCallback.execute(
() -> {
JAXBContext jAXBContext = JAXBContext.newInstance(JaxbUtil.JAXB_CLASSES);
Marshaller marshaller = jAXBContext.createMarshaller();
final Class<Enum> beanValidationModeType =
Class.class.cast(
marshaller.getProperty("eclipselink.beanvalidation.mode").getClass());
final Enum enu = Enum.valueOf(beanValidationModeType, beanvalidation ? "AUTO" : "NONE");
marshaller.setProperty("eclipselink.beanvalidation.mode", enu);
marshaller.setProperty("eclipselink.media-type", mediaType.getValue());
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
if (mediaType == MediaType.JSON) {
marshaller.setProperty("eclipselink.json.include-root", false);
marshaller.setProperty("eclipselink.json.reduce-any-arrays", true);
}
return marshaller;
});
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/main/java/com/quorum/tessera/api/StoreRawResponse.java<|end_filename|>
package com.quorum.tessera.api;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.xml.bind.annotation.XmlInlineBinaryData;
/**
* Model representation of a JSON body on incoming HTTP requests
*
* <p>A response to a {@link StoreRawRequest} after the raw transaction has been saved
*/
public class StoreRawResponse {
@Schema(type = "string", format = "base64", description = "hash of encrypted payload")
@XmlInlineBinaryData
private byte[] key;
public StoreRawResponse(byte[] key) {
this.key = key;
}
public StoreRawResponse() {}
public byte[] getKey() {
return key;
}
public void setKey(byte[] key) {
this.key = key;
}
}
<|start_filename|>tests/acceptance-test/src/test/java/com/quorum/tessera/test/rest/PrivacyGroupTestUtil.java<|end_filename|>
package com.quorum.tessera.test.rest;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.test.Party;
import com.quorum.tessera.test.PartyHelper;
import jakarta.json.Json;
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonObject;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.util.Arrays;
import java.util.stream.Stream;
public class PrivacyGroupTestUtil {
private PartyHelper partyHelper = PartyHelper.create();
public String create(String... aliases) {
JsonArrayBuilder members = Json.createArrayBuilder();
Stream.of(aliases).map(partyHelper::findByAlias).map(Party::getPublicKey).forEach(members::add);
Party sender = partyHelper.findByAlias(aliases[0]);
JsonObject json =
Json.createObjectBuilder()
.add("addresses", members)
.add("from", sender.getPublicKey())
.add("name", "Organisation " + Arrays.toString(aliases))
.add("description", "Contains members of Organisation " + Arrays.toString(aliases))
.build();
final Response response =
sender
.getRestClient()
.target(sender.getQ2TUri())
.path("/createPrivacyGroup")
.request()
.post(Entity.entity(json, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(200);
return response.readEntity(String.class);
}
public String retrieve(String targetNode, String groupId) {
JsonObject reqJson = Json.createObjectBuilder().add("privacyGroupId", groupId).build();
Party node = partyHelper.findByAlias(targetNode);
final Response response =
node.getRestClient()
.target(node.getQ2TUri())
.path("/retrievePrivacyGroup")
.request()
.post(Entity.entity(reqJson, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(200);
return response.readEntity(String.class);
}
public String find(String targetNode, String... aliases) {
JsonArrayBuilder members = Json.createArrayBuilder();
Stream.of(aliases).map(partyHelper::findByAlias).map(Party::getPublicKey).forEach(members::add);
JsonObject json = Json.createObjectBuilder().add("addresses", members).build();
Party node = partyHelper.findByAlias(targetNode);
final Response response =
node.getRestClient()
.target(node.getQ2TUri())
.path("/findPrivacyGroup")
.request()
.post(Entity.entity(json, MediaType.APPLICATION_JSON));
assertThat(response.getStatus()).isEqualTo(200);
return response.readEntity(String.class);
}
}
<|start_filename|>enclave/enclave-api/src/main/java/com/quorum/tessera/enclave/PayloadEncoder.java<|end_filename|>
package com.quorum.tessera.enclave;
import java.util.*;
/** Encodes and decodes a {@link EncodedPayload} to and from its binary representation */
public interface PayloadEncoder {
/**
* Encodes the payload to a byte array
*
* @param payload the payload to encode
* @return the byte array representing the encoded payload
*/
byte[] encode(EncodedPayload payload);
/**
* Decodes a byte array back into an encrypted payload
*
* @param input The byte array to decode into an EncodedPayload
* @return the decoded payload
*/
EncodedPayload decode(byte[] input);
EncodedPayloadCodec encodedPayloadCodec();
static PayloadEncoder create(EncodedPayloadCodec encodedPayloadCodec) {
return ServiceLoader.load(PayloadEncoder.class).stream()
.map(ServiceLoader.Provider::get)
.filter(e -> e.encodedPayloadCodec() == encodedPayloadCodec)
.reduce(
(l, r) -> {
throw new IllegalStateException(
"Resolved multiple encoders for codec " + encodedPayloadCodec);
})
.orElseThrow(
() -> new IllegalStateException("No encoder found for " + encodedPayloadCodec));
}
}
<|start_filename|>tessera-core/src/test/java/com/quorum/tessera/transaction/internal/PrivacyHelperTest.java<|end_filename|>
package com.quorum.tessera.transaction.internal;
import static java.util.Collections.*;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.ArgumentMatchers.anyCollection;
import static org.mockito.Mockito.*;
import com.quorum.tessera.data.EncryptedTransaction;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.data.MessageHash;
import com.quorum.tessera.enclave.*;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.transaction.PrivacyHelper;
import com.quorum.tessera.transaction.exception.EnhancedPrivacyNotSupportedException;
import com.quorum.tessera.transaction.exception.PrivacyViolationException;
import java.util.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class PrivacyHelperTest {
private PrivacyHelper privacyHelper;
private EncryptedTransactionDAO encryptedTransactionDAO;
@Before
public void setUp() {
encryptedTransactionDAO = mock(EncryptedTransactionDAO.class);
privacyHelper = new PrivacyHelperImpl(encryptedTransactionDAO, true);
}
@After
public void onTearDown() {
verifyNoMoreInteractions(encryptedTransactionDAO);
}
@Test
public void create() {
try (var mockedServiceLoader = mockStatic(ServiceLoader.class)) {
PrivacyHelper privacyHelper = mock(PrivacyHelper.class);
ServiceLoader serviceLoader = mock(ServiceLoader.class);
when(serviceLoader.findFirst()).thenReturn(Optional.of(privacyHelper));
mockedServiceLoader
.when(() -> ServiceLoader.load(PrivacyHelper.class))
.thenReturn(serviceLoader);
PrivacyHelper.create();
mockedServiceLoader.verify(() -> ServiceLoader.load(PrivacyHelper.class));
verify(serviceLoader).findFirst();
mockedServiceLoader.verifyNoMoreInteractions();
verifyNoMoreInteractions(serviceLoader);
verifyNoInteractions(privacyHelper);
}
}
@Test
public void findAffectedContractTransactionsFromSendRequestFound() {
final MessageHash hash1 = mock(MessageHash.class);
final MessageHash hash2 = mock(MessageHash.class);
EncryptedTransaction et1 = mock(EncryptedTransaction.class);
when(et1.getEncodedPayload()).thenReturn("payload1".getBytes());
when(et1.getHash()).thenReturn(hash1);
when(et1.getPayload()).thenReturn(mock(EncodedPayload.class));
EncryptedTransaction et2 = mock(EncryptedTransaction.class);
when(et2.getEncodedPayload()).thenReturn("payload2".getBytes());
when(et2.getHash()).thenReturn(hash2);
when(et2.getPayload()).thenReturn(mock(EncodedPayload.class));
when(encryptedTransactionDAO.findByHashes(anyCollection())).thenReturn(List.of(et1, et2));
List<AffectedTransaction> affectedTransactions =
privacyHelper.findAffectedContractTransactionsFromSendRequest(Set.of(hash1, hash2));
assertThat(affectedTransactions).isNotNull();
assertThat(affectedTransactions.size()).isEqualTo(2);
verify(encryptedTransactionDAO).findByHashes(any());
}
@Test
public void findAffectedContractTransactionsFromSendRequestNotFound() {
final MessageHash hash1 = mock(MessageHash.class);
final MessageHash hash2 = mock(MessageHash.class);
EncryptedTransaction et1 = mock(EncryptedTransaction.class);
when(et1.getEncodedPayload()).thenReturn("payload1".getBytes());
when(et1.getHash()).thenReturn(new MessageHash("hash1".getBytes()));
when(encryptedTransactionDAO.findByHashes(anyCollection())).thenReturn(List.of(et1));
assertThatExceptionOfType(PrivacyViolationException.class)
.isThrownBy(
() -> {
privacyHelper.findAffectedContractTransactionsFromSendRequest(Set.of(hash1, hash2));
failBecauseExceptionWasNotThrown(Exception.class);
})
.withMessageContaining("Unable to find affectedContractTransaction");
verify(encryptedTransactionDAO).findByHashes(any());
}
@Test
public void testValidateSendPartyProtection() {
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getPrivacyMode()).thenReturn(PrivacyMode.PARTY_PROTECTION);
final AffectedTransaction affectedTransaction = mock(AffectedTransaction.class);
when(affectedTransaction.getPayload()).thenReturn(encodedPayload);
boolean isValid =
privacyHelper.validateSendRequest(
PrivacyMode.PARTY_PROTECTION,
Collections.emptyList(),
singletonList(affectedTransaction),
emptySet());
assertThat(isValid).isTrue();
}
@Test
public void testValidateSendPartyProtectionFlagMismatched() {
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getPrivacyMode()).thenReturn(PrivacyMode.PARTY_PROTECTION);
final AffectedTransaction affectedTransaction = mock(AffectedTransaction.class);
when(affectedTransaction.getPayload()).thenReturn(encodedPayload);
final TxHash hash = TxHash.from("someHash".getBytes());
when(affectedTransaction.getHash()).thenReturn(hash);
assertThatExceptionOfType(PrivacyViolationException.class)
.isThrownBy(
() ->
privacyHelper.validateSendRequest(
PrivacyMode.PRIVATE_STATE_VALIDATION,
Collections.emptyList(),
singletonList(affectedTransaction),
emptySet()))
.withMessage("Privacy metadata mismatched with Affected Txn " + hash.encodeToBase64());
}
@Test
public void testValidateSendPsv() {
PublicKey recipient1 = mock(PublicKey.class);
PublicKey recipient2 = mock(PublicKey.class);
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
final AffectedTransaction affectedTransaction = mock(AffectedTransaction.class);
when(affectedTransaction.getPayload()).thenReturn(encodedPayload);
final TxHash hash = TxHash.from("someHash".getBytes());
when(affectedTransaction.getHash()).thenReturn(hash);
boolean isValid =
privacyHelper.validateSendRequest(
PrivacyMode.PRIVATE_STATE_VALIDATION,
List.of(recipient1, recipient2),
singletonList(affectedTransaction),
emptySet());
assertThat(isValid).isTrue();
}
@Test
public void testValidateSendPsvMoreRecipientsAffected() {
PublicKey recipient1 = mock(PublicKey.class);
PublicKey recipient2 = mock(PublicKey.class);
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
final AffectedTransaction affectedTransaction = mock(AffectedTransaction.class);
when(affectedTransaction.getPayload()).thenReturn(encodedPayload);
final TxHash hash = TxHash.from("someHash".getBytes());
when(affectedTransaction.getHash()).thenReturn(hash);
assertThatExceptionOfType(PrivacyViolationException.class)
.isThrownBy(
() ->
privacyHelper.validateSendRequest(
PrivacyMode.PRIVATE_STATE_VALIDATION,
List.of(recipient1),
singletonList(affectedTransaction),
emptySet()))
.withMessage("Recipients mismatched for Affected Txn " + hash.encodeToBase64());
}
@Test
public void testValidateSendPsvLessRecipientsAffected() {
PublicKey recipient1 = mock(PublicKey.class);
PublicKey recipient2 = mock(PublicKey.class);
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipient1));
final AffectedTransaction affectedTransaction = mock(AffectedTransaction.class);
when(affectedTransaction.getPayload()).thenReturn(encodedPayload);
final TxHash hash = TxHash.from("someHash".getBytes());
when(affectedTransaction.getHash()).thenReturn(hash);
assertThatExceptionOfType(PrivacyViolationException.class)
.isThrownBy(
() ->
privacyHelper.validateSendRequest(
PrivacyMode.PRIVATE_STATE_VALIDATION,
List.of(recipient1, recipient2),
singletonList(affectedTransaction),
emptySet()))
.withMessage("Recipients mismatched for Affected Txn " + hash.encodeToBase64());
}
@Test
public void findAffectedContractTransactionsFromPayload() {
final EncodedPayload payload = mock(EncodedPayload.class);
Map<TxHash, SecurityHash> affected = new HashMap<>();
affected.put(TxHash.from("Hash1".getBytes()), SecurityHash.from("secHash1".getBytes()));
affected.put(TxHash.from("Hash2".getBytes()), SecurityHash.from("secHash2".getBytes()));
EncryptedTransaction et1 = mock(EncryptedTransaction.class);
when(et1.getEncodedPayload()).thenReturn("payload1".getBytes());
when(et1.getHash()).thenReturn(new MessageHash("Hash1".getBytes()));
when(et1.getPayload()).thenReturn(mock(EncodedPayload.class));
when(payload.getAffectedContractTransactions()).thenReturn(affected);
when(encryptedTransactionDAO.findByHashes(any())).thenReturn(singletonList(et1));
List<AffectedTransaction> result =
privacyHelper.findAffectedContractTransactionsFromPayload(payload);
assertThat(result).hasSize(1);
verify(encryptedTransactionDAO).findByHashes(any());
}
@Test
public void validatePayloadFlagMismatched() {
TxHash txHash = mock(TxHash.class);
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.STANDARD_PRIVATE);
EncodedPayload affectedPayload1 = mock(EncodedPayload.class);
when(affectedPayload1.getPrivacyMode()).thenReturn(PrivacyMode.STANDARD_PRIVATE);
AffectedTransaction affectedTransaction1 = mock(AffectedTransaction.class);
when(affectedTransaction1.getPayload()).thenReturn(affectedPayload1);
EncodedPayload affectedPayload2 = mock(EncodedPayload.class);
when(affectedPayload2.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
AffectedTransaction affectedTransaction2 = mock(AffectedTransaction.class);
when(affectedTransaction2.getPayload()).thenReturn(affectedPayload2);
boolean result =
privacyHelper.validatePayload(
txHash, payload, List.of(affectedTransaction1, affectedTransaction2));
assertThat(result).isFalse();
}
@Test
public void validatePayloadMandatoryRecipientsMismatched() {
TxHash txHash = mock(TxHash.class);
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.MANDATORY_RECIPIENTS);
PublicKey mandatory1 = mock(PublicKey.class);
PublicKey mandatory2 = mock(PublicKey.class);
when(payload.getMandatoryRecipients()).thenReturn(Set.of(mandatory1));
EncodedPayload affectedPayload1 = mock(EncodedPayload.class);
when(affectedPayload1.getPrivacyMode()).thenReturn(PrivacyMode.MANDATORY_RECIPIENTS);
when(affectedPayload1.getMandatoryRecipients()).thenReturn(Set.of(mandatory1, mandatory2));
AffectedTransaction affectedTransaction1 = mock(AffectedTransaction.class);
when(affectedTransaction1.getPayload()).thenReturn(affectedPayload1);
EncodedPayload affectedPayload2 = mock(EncodedPayload.class);
when(affectedPayload2.getPrivacyMode()).thenReturn(PrivacyMode.MANDATORY_RECIPIENTS);
when(affectedPayload2.getMandatoryRecipients()).thenReturn(Set.of(mandatory1, mandatory2));
AffectedTransaction affectedTransaction2 = mock(AffectedTransaction.class);
when(affectedTransaction2.getPayload()).thenReturn(affectedPayload2);
boolean result =
privacyHelper.validatePayload(
txHash, payload, List.of(affectedTransaction1, affectedTransaction2));
assertThat(result).isFalse();
}
@Test
public void validPayloadMandatoryRecipients() {
TxHash txHash = mock(TxHash.class);
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.MANDATORY_RECIPIENTS);
PublicKey mandatory1 = mock(PublicKey.class);
PublicKey mandatory2 = mock(PublicKey.class);
when(payload.getMandatoryRecipients()).thenReturn(Set.of(mandatory1, mandatory2));
EncodedPayload affectedPayload1 = mock(EncodedPayload.class);
when(affectedPayload1.getPrivacyMode()).thenReturn(PrivacyMode.MANDATORY_RECIPIENTS);
when(affectedPayload1.getMandatoryRecipients()).thenReturn(Set.of(mandatory1));
AffectedTransaction affectedTransaction1 = mock(AffectedTransaction.class);
when(affectedTransaction1.getPayload()).thenReturn(affectedPayload1);
EncodedPayload affectedPayload2 = mock(EncodedPayload.class);
when(affectedPayload2.getPrivacyMode()).thenReturn(PrivacyMode.MANDATORY_RECIPIENTS);
when(affectedPayload2.getMandatoryRecipients()).thenReturn(Set.of(mandatory2));
AffectedTransaction affectedTransaction2 = mock(AffectedTransaction.class);
when(affectedTransaction2.getPayload()).thenReturn(affectedPayload2);
boolean result =
privacyHelper.validatePayload(
txHash, payload, List.of(affectedTransaction1, affectedTransaction2));
assertThat(result).isTrue();
}
@Test
public void validatePsvPayloadWithMissingAffectedTxs() {
final TxHash txHash = mock(TxHash.class);
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
Map<TxHash, SecurityHash> affected = new HashMap<>();
affected.put(TxHash.from("Hash1".getBytes()), SecurityHash.from("secHash1".getBytes()));
affected.put(TxHash.from("Hash2".getBytes()), SecurityHash.from("secHash2".getBytes()));
when(payload.getAffectedContractTransactions()).thenReturn(affected);
EncodedPayload affectedPayload = mock(EncodedPayload.class);
when(affectedPayload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
AffectedTransaction affectedTransaction = mock(AffectedTransaction.class);
when(affectedTransaction.getPayload()).thenReturn(affectedPayload);
boolean result =
privacyHelper.validatePayload(txHash, payload, singletonList(affectedTransaction));
assertThat(result).isFalse();
}
@Test
public void validatePayloadPsvFakeSender() {
final PublicKey recipient1 = PublicKey.from("Recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("Recipient2".getBytes());
final PublicKey fakeSender = PublicKey.from("someone".getBytes());
final TxHash txHash = TxHash.from("someHash".getBytes());
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(payload.getSenderKey()).thenReturn(fakeSender);
Map<TxHash, SecurityHash> affected = new HashMap<>();
affected.put(TxHash.from("Hash1".getBytes()), SecurityHash.from("secHash1".getBytes()));
affected.put(TxHash.from("Hash2".getBytes()), SecurityHash.from("secHash2".getBytes()));
when(payload.getAffectedContractTransactions()).thenReturn(affected);
EncodedPayload affectedPayload1 = mock(EncodedPayload.class);
when(affectedPayload1.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(affectedPayload1.getRecipientKeys()).thenReturn(List.of(recipient1, fakeSender));
AffectedTransaction affectedTransaction1 = mock(AffectedTransaction.class);
when(affectedTransaction1.getPayload()).thenReturn(affectedPayload1);
when(affectedTransaction1.getHash()).thenReturn(TxHash.from("hash1".getBytes()));
EncodedPayload affectedPayload2 = mock(EncodedPayload.class);
when(affectedPayload2.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(affectedPayload2.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
AffectedTransaction affectedTransaction2 = mock(AffectedTransaction.class);
when(affectedTransaction2.getPayload()).thenReturn(affectedPayload2);
when(affectedTransaction2.getHash()).thenReturn(TxHash.from("hash2".getBytes()));
boolean result =
privacyHelper.validatePayload(
txHash, payload, List.of(affectedTransaction1, affectedTransaction2));
assertThat(result).isFalse();
}
@Test
public void validatePsvPayloadRecipientsMismatched() {
PublicKey recipient1 = PublicKey.from("Recipient1".getBytes());
PublicKey recipient2 = PublicKey.from("Recipient2".getBytes());
final TxHash txHash = TxHash.from("someHash".getBytes());
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(payload.getSenderKey()).thenReturn(recipient1);
when(payload.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
Map<TxHash, SecurityHash> affected = new HashMap<>();
affected.put(TxHash.from("Hash1".getBytes()), SecurityHash.from("secHash1".getBytes()));
affected.put(TxHash.from("Hash2".getBytes()), SecurityHash.from("secHash2".getBytes()));
when(payload.getAffectedContractTransactions()).thenReturn(affected);
EncodedPayload affectedPayload1 = mock(EncodedPayload.class);
when(affectedPayload1.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(affectedPayload1.getRecipientKeys()).thenReturn(singletonList(recipient1));
AffectedTransaction affectedTransaction1 = mock(AffectedTransaction.class);
when(affectedTransaction1.getPayload()).thenReturn(affectedPayload1);
when(affectedTransaction1.getHash()).thenReturn(TxHash.from("hash1".getBytes()));
EncodedPayload affectedPayload2 = mock(EncodedPayload.class);
when(affectedPayload2.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(affectedPayload2.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
AffectedTransaction affectedTransaction2 = mock(AffectedTransaction.class);
when(affectedTransaction2.getPayload()).thenReturn(affectedPayload2);
when(affectedTransaction2.getHash()).thenReturn(TxHash.from("hash2".getBytes()));
assertThatExceptionOfType(PrivacyViolationException.class)
.isThrownBy(
() ->
privacyHelper.validatePayload(
txHash, payload, List.of(affectedTransaction1, affectedTransaction2)))
.withMessage(
"Recipients mismatched for Affected Txn "
+ TxHash.from("hash1".getBytes()).encodeToBase64());
}
@Test
public void validPayload() {
PublicKey recipient1 = PublicKey.from("Recipient1".getBytes());
PublicKey recipient2 = PublicKey.from("Recipient2".getBytes());
final TxHash txHash = TxHash.from("someHash".getBytes());
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(payload.getSenderKey()).thenReturn(recipient1);
when(payload.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
Map<TxHash, SecurityHash> affected = new HashMap<>();
affected.put(TxHash.from("Hash1".getBytes()), SecurityHash.from("secHash1".getBytes()));
affected.put(TxHash.from("Hash2".getBytes()), SecurityHash.from("secHash2".getBytes()));
when(payload.getAffectedContractTransactions()).thenReturn(affected);
EncodedPayload affectedPayload1 = mock(EncodedPayload.class);
when(affectedPayload1.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(affectedPayload1.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
AffectedTransaction affectedTransaction1 = mock(AffectedTransaction.class);
when(affectedTransaction1.getPayload()).thenReturn(affectedPayload1);
when(affectedTransaction1.getHash()).thenReturn(TxHash.from("hash1".getBytes()));
EncodedPayload affectedPayload2 = mock(EncodedPayload.class);
when(affectedPayload2.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(affectedPayload2.getRecipientKeys()).thenReturn(List.of(recipient2, recipient1));
AffectedTransaction affectedTransaction2 = mock(AffectedTransaction.class);
when(affectedTransaction2.getPayload()).thenReturn(affectedPayload2);
when(affectedTransaction2.getHash()).thenReturn(TxHash.from("hash2".getBytes()));
boolean result =
privacyHelper.validatePayload(
txHash, payload, List.of(affectedTransaction1, affectedTransaction2));
assertThat(result).isTrue();
}
@Test
public void psvTransactionCannotHaveInvalidHashes() {
TxHash txHash = TxHash.from("Hash1".getBytes());
TxHash invalid = TxHash.from("InvalidHash".getBytes());
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
Set<TxHash> invalidHashes = Set.of(invalid);
assertThatExceptionOfType(PrivacyViolationException.class)
.isThrownBy(() -> privacyHelper.sanitisePrivacyPayload(txHash, payload, invalidHashes))
.withMessage(
"Invalid security hashes identified for PSC TX "
+ txHash
+ ". Invalid ACOTHs: "
+ invalid.encodeToBase64());
}
@Test
public void sanitisedInputForPartyProtection() {
final TxHash txHash = TxHash.from("Hash1".getBytes());
TxHash invalid = TxHash.from("InvalidHash".getBytes());
Map<TxHash, byte[]> affected = new HashMap<>();
affected.put(TxHash.from("Hash1".getBytes()), "secHash1".getBytes());
affected.put(invalid, "secHash2".getBytes());
EncodedPayload payload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PARTY_PROTECTION)
.withAffectedContractTransactions(affected)
.build();
assertThat(payload.getAffectedContractTransactions()).hasSize(2);
Set<TxHash> invalidHashes = Set.of(invalid);
final EncodedPayload updatedPayload =
privacyHelper.sanitisePrivacyPayload(txHash, payload, invalidHashes);
assertThat(updatedPayload.getAffectedContractTransactions()).hasSize(1);
}
@Test
public void returnsEmptyList() {
assertThat(privacyHelper.findAffectedContractTransactionsFromSendRequest(null)).hasSize(0);
assertThat(privacyHelper.findAffectedContractTransactionsFromSendRequest(Collections.EMPTY_SET))
.hasSize(0);
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getAffectedContractTransactions()).thenReturn(emptyMap());
assertThat(privacyHelper.findAffectedContractTransactionsFromPayload(payload)).hasSize(0);
}
@Test
public void throwExceptionForSendRequestWhenPrivacyNotEnabled() {
final PrivacyHelper anotherHelper = new PrivacyHelperImpl(encryptedTransactionDAO, false);
assertThatExceptionOfType(EnhancedPrivacyNotSupportedException.class)
.isThrownBy(
() ->
anotherHelper.validateSendRequest(
PrivacyMode.PRIVATE_STATE_VALIDATION, emptyList(), emptyList(), emptySet()));
}
@Test
public void throwExceptionForPayloadWhenPrivacyNotEnabled() {
final PrivacyHelper anotherHelper = new PrivacyHelperImpl(encryptedTransactionDAO, false);
EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.PARTY_PROTECTION);
assertThatExceptionOfType(EnhancedPrivacyNotSupportedException.class)
.isThrownBy(() -> anotherHelper.validatePayload(mock(TxHash.class), payload, emptyList()));
}
@Test
public void testValidateSendMandatoryRecipientsInvalid() {
PublicKey recipient1 = mock(PublicKey.class);
PublicKey recipient2 = mock(PublicKey.class);
assertThatExceptionOfType(PrivacyViolationException.class)
.isThrownBy(
() ->
privacyHelper.validateSendRequest(
PrivacyMode.MANDATORY_RECIPIENTS,
List.of(recipient1, recipient2),
emptyList(),
Set.of(mock(PublicKey.class))))
.withMessageContaining(
"One or more mandatory recipients not included in the participant list");
}
@Test
public void testValidateSendMandatoryRecipientsMismatched() {
PublicKey recipient1 = mock(PublicKey.class);
PublicKey recipient2 = mock(PublicKey.class);
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getPrivacyMode()).thenReturn(PrivacyMode.MANDATORY_RECIPIENTS);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
when(encodedPayload.getMandatoryRecipients()).thenReturn(Set.of(mock(PublicKey.class)));
final AffectedTransaction affectedTransaction = mock(AffectedTransaction.class);
when(affectedTransaction.getPayload()).thenReturn(encodedPayload);
final TxHash hash = TxHash.from("someHash".getBytes());
when(affectedTransaction.getHash()).thenReturn(hash);
assertThatExceptionOfType(PrivacyViolationException.class)
.isThrownBy(
() ->
privacyHelper.validateSendRequest(
PrivacyMode.MANDATORY_RECIPIENTS,
List.of(recipient1, recipient2),
singletonList(affectedTransaction),
Set.of(recipient1)))
.withMessageContaining("Privacy metadata mismatched");
}
@Test
public void testValidSendMandatoryRecipients() {
PublicKey recipient1 = mock(PublicKey.class);
PublicKey recipient2 = mock(PublicKey.class);
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getPrivacyMode()).thenReturn(PrivacyMode.MANDATORY_RECIPIENTS);
when(encodedPayload.getRecipientKeys()).thenReturn(List.of(recipient1));
when(encodedPayload.getMandatoryRecipients()).thenReturn(Set.of(recipient1));
final AffectedTransaction affectedTransaction = mock(AffectedTransaction.class);
when(affectedTransaction.getPayload()).thenReturn(encodedPayload);
final TxHash hash = TxHash.from("someHash".getBytes());
when(affectedTransaction.getHash()).thenReturn(hash);
boolean valid =
privacyHelper.validateSendRequest(
PrivacyMode.MANDATORY_RECIPIENTS,
List.of(recipient1, recipient2),
singletonList(affectedTransaction),
Set.of(recipient1, recipient2));
assertThat(valid).isTrue();
}
}
<|start_filename|>tests/acceptance-test/src/test/java/com/quorum/tessera/test/vault/azure/AzureKeyVaultHttpHandler.java<|end_filename|>
package com.quorum.tessera.test.vault.azure;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;
import jakarta.json.Json;
import jakarta.json.JsonObject;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AzureKeyVaultHttpHandler implements HttpHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(AzureKeyVaultHttpHandler.class);
private AtomicInteger counter = new AtomicInteger(0);
private final String publicKey = "<KEY>;
private final String keyVaultUrl;
public AzureKeyVaultHttpHandler(String keyVaultUrl) {
this.keyVaultUrl = keyVaultUrl;
}
@Override
public void handle(HttpExchange exchange) throws IOException {
LOGGER.info("HttpExchange getRequestMethod {}", exchange.getRequestMethod());
LOGGER.info("HttpExchange getRequestURI {}", exchange.getRequestURI());
LOGGER.info("HttpExchange content type {}", exchange.getRequestHeaders().get("Content-type"));
counter.incrementAndGet();
exchange
.getRequestHeaders()
.entrySet()
.forEach(
e -> {
LOGGER.info("HttpExchange Header: {} = {}", e.getKey(), e.getValue());
exchange.getResponseHeaders().add(e.getKey(), String.join(",", e.getValue()));
});
// exchange.getResponseHeaders().add("WWW-Authenticate",
// String.format("Bearer authorization=%s/auth, resource=%s",keyVaultUrl));
if (exchange.getRequestURI().toString().startsWith("/secrets/Pub/")) {
JsonObject jsonObject = Json.createObjectBuilder().add("value", publicKey).build();
byte[] response = jsonObject.toString().getBytes();
exchange.sendResponseHeaders(200, response.length);
exchange.getResponseBody().write(response);
LOGGER.info("response send {}", new String(response));
exchange.close();
} else {
exchange.sendResponseHeaders(200, 0);
exchange.close();
}
}
public int getCounter() {
return counter.intValue();
}
}
<|start_filename|>config/src/test/java/com/quorum/tessera/config/constraints/KeyConfigurationValidatorTest.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import static java.util.Collections.emptyList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import com.quorum.tessera.config.KeyConfiguration;
import jakarta.validation.ConstraintValidatorContext;
import java.nio.file.Paths;
import org.junit.Test;
public class KeyConfigurationValidatorTest {
private KeyConfigurationValidator validator = new KeyConfigurationValidator();
@Test
public void bothNotSetIsValid() {
final KeyConfiguration configuration = new KeyConfiguration(null, null, null, null, null);
assertThat(validator.isValid(configuration, mock(ConstraintValidatorContext.class))).isTrue();
}
@Test
public void fileSetIsValid() {
final KeyConfiguration configuration =
new KeyConfiguration(Paths.get("anything"), null, null, null, null);
assertThat(validator.isValid(configuration, mock(ConstraintValidatorContext.class))).isTrue();
}
@Test
public void inlineSetIsValid() {
final KeyConfiguration configuration =
new KeyConfiguration(null, emptyList(), null, null, null);
assertThat(validator.isValid(configuration, mock(ConstraintValidatorContext.class))).isTrue();
}
@Test
public void bothSetIsInvalid() {
final KeyConfiguration configuration =
new KeyConfiguration(Paths.get("anything"), emptyList(), null, null, null);
assertThat(validator.isValid(configuration, mock(ConstraintValidatorContext.class))).isFalse();
}
@Test
public void nullConfigIsValid() {
assertThat(validator.isValid(null, mock(ConstraintValidatorContext.class))).isTrue();
}
}
<|start_filename|>enclave/enclave-api/src/test/java/com/quorum/tessera/enclave/CBOREncoderTest.java<|end_filename|>
package com.quorum.tessera.enclave;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.mockito.Mockito.mock;
import com.quorum.tessera.encryption.Nonce;
import com.quorum.tessera.encryption.PublicKey;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
public class CBOREncoderTest {
private final PayloadEncoder encoder = new CBOREncoder();
private EncodedPayload standardPayload;
@Before
public void setUp() {
standardPayload =
EncodedPayload.Builder.create()
.withSenderKey(PublicKey.from("sender".getBytes()))
.withCipherText("text".getBytes())
.withRecipientNonce(new Nonce("recipientNonce".getBytes()))
.withCipherTextNonce(new Nonce("cipherTextNonce".getBytes()))
.withRecipientBoxes(List.of("box1".getBytes(), "box2".getBytes()))
.withRecipientKeys(
List.of(
PublicKey.from("recipient1".getBytes()),
PublicKey.from("recipient2".getBytes())))
.build();
}
@Test
public void testEncodeDecodeStandard() {
final byte[] encoded = encoder.encode(standardPayload);
final EncodedPayload result = encoder.decode(encoded);
assertThat(result).isEqualTo(standardPayload);
assertThat(result.getPrivacyMode()).isEqualTo(PrivacyMode.STANDARD_PRIVATE);
assertThat(result.getAffectedContractTransactions()).isEmpty();
assertThat(result.getExecHash()).isEmpty();
assertThat(result.getMandatoryRecipients()).isEmpty();
assertThat(result.getPrivacyGroupId()).isEmpty();
}
@Test
public void testEncodeDecodePP() {
EncodedPayload payload =
EncodedPayload.Builder.from(standardPayload)
.withPrivacyMode(PrivacyMode.PARTY_PROTECTION)
.withAffectedContractTransactions(
Map.of(
TxHash.from("txHash1".getBytes()),
"securityHash1".getBytes(),
TxHash.from("txHash2".getBytes()),
"securityHash2".getBytes()))
.build();
final byte[] encoded = encoder.encode(payload);
final EncodedPayload result = encoder.decode(encoded);
assertThat(result).isEqualTo(payload);
assertThat(result.getPrivacyMode()).isEqualTo(PrivacyMode.PARTY_PROTECTION);
assertThat(result.getAffectedContractTransactions())
.isEqualTo(
Map.of(
TxHash.from("txHash1".getBytes()),
SecurityHash.from("securityHash1".getBytes()),
TxHash.from("txHash2".getBytes()),
SecurityHash.from("securityHash2".getBytes())));
assertThat(result.getExecHash()).isEmpty();
assertThat(result.getMandatoryRecipients()).isEmpty();
assertThat(result.getPrivacyGroupId()).isEmpty();
}
@Test
public void testEncodeDecodePSV() {
EncodedPayload payload =
EncodedPayload.Builder.from(standardPayload)
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withAffectedContractTransactions(
Map.of(
TxHash.from("txHash1".getBytes()),
"securityHash1".getBytes(),
TxHash.from("txHash2".getBytes()),
"securityHash2".getBytes()))
.withExecHash("execHash".getBytes())
.build();
final byte[] encoded = encoder.encode(payload);
final EncodedPayload result = encoder.decode(encoded);
assertThat(result).isEqualTo(payload);
assertThat(result.getPrivacyMode()).isEqualTo(PrivacyMode.PRIVATE_STATE_VALIDATION);
assertThat(result.getAffectedContractTransactions())
.isEqualTo(
Map.of(
TxHash.from("txHash1".getBytes()),
SecurityHash.from("securityHash1".getBytes()),
TxHash.from("txHash2".getBytes()),
SecurityHash.from("securityHash2".getBytes())));
assertThat(result.getExecHash()).isEqualTo("execHash".getBytes());
assertThat(result.getMandatoryRecipients()).isEmpty();
assertThat(result.getPrivacyGroupId()).isEmpty();
}
@Test
public void testEncodeDecodeMR() {
EncodedPayload payload =
EncodedPayload.Builder.from(standardPayload)
.withPrivacyMode(PrivacyMode.MANDATORY_RECIPIENTS)
.withAffectedContractTransactions(
Map.of(
TxHash.from("txHash1".getBytes()),
"securityHash1".getBytes(),
TxHash.from("txHash2".getBytes()),
"securityHash2".getBytes()))
.withMandatoryRecipients(
Set.of(
PublicKey.from("recipient1".getBytes()),
PublicKey.from("recipient2".getBytes())))
.build();
final byte[] encoded = encoder.encode(payload);
final EncodedPayload result = encoder.decode(encoded);
assertThat(result).isEqualTo(payload);
assertThat(result.getPrivacyMode()).isEqualTo(PrivacyMode.MANDATORY_RECIPIENTS);
assertThat(result.getAffectedContractTransactions())
.isEqualTo(
Map.of(
TxHash.from("txHash1".getBytes()),
SecurityHash.from("securityHash1".getBytes()),
TxHash.from("txHash2".getBytes()),
SecurityHash.from("securityHash2".getBytes())));
assertThat(result.getExecHash()).isEmpty();
assertThat(result.getMandatoryRecipients())
.isEqualTo(
Set.of(
PublicKey.from("recipient1".getBytes()), PublicKey.from("recipient2".getBytes())));
assertThat(result.getPrivacyGroupId()).isEmpty();
}
@Test
public void testEncodeDecodeWithPrivacyGroup() {
PrivacyGroup.Id groupId = PrivacyGroup.Id.fromBytes("group".getBytes());
EncodedPayload payload =
EncodedPayload.Builder.from(standardPayload)
.withPrivacyMode(PrivacyMode.MANDATORY_RECIPIENTS)
.withMandatoryRecipients(
Set.of(
PublicKey.from("recipient1".getBytes()),
PublicKey.from("recipient2".getBytes())))
.withPrivacyGroupId(groupId)
.build();
final byte[] encoded = encoder.encode(payload);
final EncodedPayload result = encoder.decode(encoded);
assertThat(result).isEqualTo(payload);
assertThat(result.getPrivacyMode()).isEqualTo(PrivacyMode.MANDATORY_RECIPIENTS);
assertThat(result.getAffectedContractTransactions()).isEmpty();
assertThat(result.getExecHash()).isEmpty();
assertThat(result.getMandatoryRecipients())
.isEqualTo(
Set.of(
PublicKey.from("recipient1".getBytes()), PublicKey.from("recipient2".getBytes())));
assertThat(result.getPrivacyGroupId()).isPresent().get().isEqualTo(groupId);
}
@Test
public void encodeError() {
EncodedPayload payload = mock(EncodedPayload.class);
assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(() -> encoder.encode(payload))
.withMessageContaining("Unable to encode payload");
}
@Test
public void decodeError() {
String invalid = "oWZzZW5kZXKA";
byte[] raw = Base64.getDecoder().decode(invalid);
assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(() -> encoder.decode(raw))
.withMessageContaining("Unable to decode payload data");
}
@Test
public void codec() {
assertThat(encoder.encodedPayloadCodec()).isEqualTo(EncodedPayloadCodec.CBOR);
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/package-info.java<|end_filename|>
@jakarta.xml.bind.annotation.XmlSchema(
namespace = "http://tessera.github.com/config",
elementFormDefault = jakarta.xml.bind.annotation.XmlNsForm.QUALIFIED,
attributeFormDefault = jakarta.xml.bind.annotation.XmlNsForm.UNQUALIFIED)
package com.quorum.tessera.config;
<|start_filename|>config/src/main/java/module-info.java<|end_filename|>
open module tessera.config {
requires jakarta.validation;
requires java.xml;
requires jasypt;
requires org.apache.commons.lang3;
requires org.slf4j;
requires tessera.argontwo;
requires tessera.encryption.api;
requires tessera.shared;
requires jakarta.xml.bind;
requires jakarta.json;
exports com.quorum.tessera.config;
exports com.quorum.tessera.config.apps;
exports com.quorum.tessera.config.keypairs;
exports com.quorum.tessera.config.keys;
exports com.quorum.tessera.config.util;
exports com.quorum.tessera.config.adapters;
exports com.quorum.tessera.config.constraints;
uses com.quorum.tessera.config.util.EnvironmentVariableProviderFactory;
uses com.quorum.tessera.config.ConfigFactory;
provides com.quorum.tessera.config.util.EnvironmentVariableProviderFactory with
com.quorum.tessera.config.util.EnvironmentVariableProviderFactoryImpl;
provides com.quorum.tessera.config.ConfigFactory with
com.quorum.tessera.config.internal.ConfigFactoryProvider;
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/test/java/com/quorum/tessera/p2p/recovery/RestRecoveryClientTest.java<|end_filename|>
package com.quorum.tessera.p2p.recovery;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.p2p.resend.ResendRequest;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.client.Invocation;
import jakarta.ws.rs.client.WebTarget;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class RestRecoveryClientTest {
private Response.Status expectedResponseStatus;
public RestRecoveryClientTest(Response.Status expectedResponseStatus) {
this.expectedResponseStatus = expectedResponseStatus;
}
@Test
public void makeResendRequest() {
try (var entityMockedStatic = mockStatic(Entity.class)) {
Entity<ResendRequest> outboundEntity = mock(Entity.class);
ResendRequest resendRequest = mock(ResendRequest.class);
entityMockedStatic
.when(() -> Entity.entity(resendRequest, MediaType.APPLICATION_JSON))
.thenReturn(outboundEntity);
String targetUrl = "targetUrl";
Client client = mock(Client.class);
WebTarget webTarget = mock(WebTarget.class);
when(client.target(targetUrl)).thenReturn(webTarget);
when(webTarget.path("/resend")).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
when(webTarget.request()).thenReturn(invocationBuilder);
Response response = mock(Response.class);
when(response.getStatus()).thenReturn(expectedResponseStatus.getStatusCode());
when(invocationBuilder.post(outboundEntity)).thenReturn(response);
RestRecoveryClient restRecoveryClient = new RestRecoveryClient(client);
boolean outcome = restRecoveryClient.makeResendRequest(targetUrl, resendRequest);
if (expectedResponseStatus == Response.Status.OK) {
assertThat(outcome).isTrue();
} else {
assertThat(outcome).isFalse();
}
entityMockedStatic.verify(() -> Entity.entity(resendRequest, MediaType.APPLICATION_JSON));
entityMockedStatic.verifyNoMoreInteractions();
verify(client).target(targetUrl);
verify(webTarget).path("/resend");
verify(webTarget).request();
verify(invocationBuilder).post(outboundEntity);
verifyNoMoreInteractions(outboundEntity, resendRequest, client, webTarget, invocationBuilder);
}
}
@Test
public void pushBatch() {
try (var entityMockedStatic = mockStatic(Entity.class)) {
Entity<PushBatchRequest> outboundEntity = mock(Entity.class);
PushBatchRequest pushBatchRequest = mock(PushBatchRequest.class);
entityMockedStatic
.when(() -> Entity.entity(pushBatchRequest, MediaType.APPLICATION_JSON))
.thenReturn(outboundEntity);
String targetUrl = "targetUrl";
Client client = mock(Client.class);
WebTarget webTarget = mock(WebTarget.class);
when(client.target(targetUrl)).thenReturn(webTarget);
when(webTarget.path("/pushBatch")).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
when(webTarget.request()).thenReturn(invocationBuilder);
Response response = mock(Response.class);
when(response.getStatus()).thenReturn(expectedResponseStatus.getStatusCode());
when(invocationBuilder.post(outboundEntity)).thenReturn(response);
RestRecoveryClient restRecoveryClient = new RestRecoveryClient(client);
boolean outcome = restRecoveryClient.pushBatch(targetUrl, pushBatchRequest);
if (expectedResponseStatus == Response.Status.OK) {
assertThat(outcome).isTrue();
} else {
assertThat(outcome).isFalse();
}
entityMockedStatic.verify(() -> Entity.entity(pushBatchRequest, MediaType.APPLICATION_JSON));
entityMockedStatic.verifyNoMoreInteractions();
verify(client).target(targetUrl);
verify(webTarget).path("/pushBatch");
verify(webTarget).request();
verify(invocationBuilder).post(outboundEntity);
verifyNoMoreInteractions(
outboundEntity, pushBatchRequest, client, webTarget, invocationBuilder);
}
}
@Test
public void makeBatchResendRequest() {
try (var entityMockedStatic = mockStatic(Entity.class)) {
Entity<PushBatchRequest> outboundEntity = mock(Entity.class);
ResendBatchRequest pushBatchRequest = mock(ResendBatchRequest.class);
entityMockedStatic
.when(() -> Entity.entity(pushBatchRequest, MediaType.APPLICATION_JSON))
.thenReturn(outboundEntity);
String targetUrl = "targetUrl";
Client client = mock(Client.class);
WebTarget webTarget = mock(WebTarget.class);
when(client.target(targetUrl)).thenReturn(webTarget);
when(webTarget.path("/resendBatch")).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
when(webTarget.request()).thenReturn(invocationBuilder);
Response response = mock(Response.class);
when(response.getStatus()).thenReturn(expectedResponseStatus.getStatusCode());
ResendBatchResponse resendBatchResponse = mock(ResendBatchResponse.class);
when(response.readEntity(ResendBatchResponse.class)).thenReturn(resendBatchResponse);
when(invocationBuilder.post(outboundEntity)).thenReturn(response);
RestRecoveryClient restRecoveryClient = new RestRecoveryClient(client);
ResendBatchResponse outcome =
restRecoveryClient.makeBatchResendRequest(targetUrl, pushBatchRequest);
if (expectedResponseStatus == Response.Status.OK) {
verify(response).readEntity(ResendBatchResponse.class);
assertThat(outcome).isSameAs(resendBatchResponse);
} else {
assertThat(outcome).isNull();
}
entityMockedStatic.verify(() -> Entity.entity(pushBatchRequest, MediaType.APPLICATION_JSON));
entityMockedStatic.verifyNoMoreInteractions();
verify(client).target(targetUrl);
verify(webTarget).path("/resendBatch");
verify(webTarget).request();
verify(invocationBuilder).post(outboundEntity);
verifyNoMoreInteractions(
outboundEntity, pushBatchRequest, client, webTarget, invocationBuilder);
}
}
@Parameterized.Parameters(name = "ResponseStatus {0}")
public static Collection<Response.Status> statuses() {
return Arrays.asList(Response.Status.values());
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/ConfigItem.java<|end_filename|>
package com.quorum.tessera.config;
import com.quorum.tessera.config.constraints.NoUnmatchedElements;
import jakarta.xml.bind.annotation.XmlTransient;
import java.lang.reflect.Field;
import java.util.List;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
@NoUnmatchedElements
public abstract class ConfigItem {
@XmlTransient private List<Object> unmatched;
public List<Object> getUnmatched() {
return unmatched;
}
@Override
public boolean equals(Object obj) {
return EqualsBuilder.reflectionEquals(this, obj);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
@Override
public String toString() {
return new ReflectionToStringBuilder(this, ToStringStyle.MULTI_LINE_STYLE) {
@Override
protected boolean accept(Field f) {
return super.accept(f) && !f.getName().toLowerCase().contains("password");
}
}.toString();
}
}
<|start_filename|>server/jersey-server/src/main/java/com/quorum/tessera/server/jaxrs/LoggingFilter.java<|end_filename|>
package com.quorum.tessera.server.jaxrs;
import jakarta.ws.rs.container.*;
import jakarta.ws.rs.core.Context;
import jakarta.ws.rs.core.UriInfo;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LoggingFilter implements ContainerRequestFilter, ContainerResponseFilter {
private static final Logger LOGGER = LoggerFactory.getLogger(LoggingFilter.class);
@Context private ResourceInfo resourceInfo;
private Logger getLogger() {
return Optional.ofNullable(resourceInfo)
.filter(r -> r.getResourceClass() != null)
.map(r -> LoggerFactory.getLogger(r.getResourceClass()))
.orElse(LOGGER);
}
@Override
public void filter(final ContainerRequestContext request) {
log("Enter", request);
}
@Override
public void filter(
final ContainerRequestContext request, final ContainerResponseContext response) {
log("Exit", request);
String path = Optional.ofNullable(request.getUriInfo()).map(UriInfo::getPath).orElse(null);
Optional.ofNullable(response.getStatusInfo())
.ifPresent(
statusType ->
getLogger()
.info(
"Response for {} : {} {}",
path,
statusType.getStatusCode(),
statusType.getReasonPhrase()));
}
private void log(String prefix, ContainerRequestContext request) {
String path = Optional.ofNullable(request.getUriInfo()).map(UriInfo::getPath).orElse(null);
getLogger().info("{} Request : {} : {}", prefix, request.getMethod(), "/" + path);
}
/**
* Set the request resource info. Only needed for unit tests.
*
* @param resourceInfo the resource info
*/
@Context
public void setResourceInfo(final ResourceInfo resourceInfo) {
this.resourceInfo = resourceInfo;
}
}
<|start_filename|>cli/config-cli/src/main/java/com/quorum/tessera/config/cli/KeyGenFileUpdateOptions.java<|end_filename|>
package com.quorum.tessera.config.cli;
import com.quorum.tessera.config.Config;
import java.nio.file.Path;
import picocli.CommandLine;
public class KeyGenFileUpdateOptions {
@CommandLine.Option(
names = {"--configfile", "-configfile", "--config-file"},
description = "Path to node configuration file",
required = true)
private Config config;
@CommandLine.Option(
names = {"--configout", "-output"},
description =
"Path to save updated configfile to. Requires --configfile option to also be provided")
private Path configOut;
@CommandLine.Option(
names = {"--pwdout"},
description =
"Path to save updated password list to. Requires --configfile and --configout options to also be provided")
private Path pwdOut;
public Config getConfig() {
return config;
}
public Path getConfigOut() {
return configOut;
}
public Path getPwdOut() {
return pwdOut;
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/main/java/com/quorum/tessera/api/constraint/PrivacyValid.java<|end_filename|>
package com.quorum.tessera.api.constraint;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import jakarta.validation.Constraint;
import jakarta.validation.Payload;
import java.lang.annotation.Retention;
@Retention(RUNTIME)
@Constraint(validatedBy = RequestPrivacyValidator.class)
public @interface PrivacyValid {
String message() default "Send request not valid";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
}
<|start_filename|>enclave/enclave-api/src/test/java/com/quorum/tessera/enclave/EncodedPayloadCodecTest.java<|end_filename|>
package com.quorum.tessera.enclave;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.version.*;
import java.util.Set;
import org.junit.Test;
public class EncodedPayloadCodecTest {
@Test
public void current() {
EncodedPayloadCodec encodedPayloadCodec = EncodedPayloadCodec.current();
assertThat(encodedPayloadCodec).isSameAs(EncodedPayloadCodec.CBOR);
}
@Test
public void getPreferredCodecLegacy() {
EncodedPayloadCodec codec =
EncodedPayloadCodec.getPreferredCodec(
Set.of(BaseVersion.API_VERSION_1, MandatoryRecipientsVersion.API_VERSION_4));
assertThat(codec).isEqualTo(EncodedPayloadCodec.LEGACY);
}
@Test
public void getPreferredCodecVersion5() {
EncodedPayloadCodec codec =
EncodedPayloadCodec.getPreferredCodec(
Set.of(
BaseVersion.API_VERSION_1,
MandatoryRecipientsVersion.API_VERSION_4,
CBORSupportVersion.API_VERSION_5));
assertThat(codec).isEqualTo(EncodedPayloadCodec.CBOR);
assertThat(codec.getMinimumSupportedVersion()).isEqualTo(CBORSupportVersion.API_VERSION_5);
}
@Test
public void getPreferredCodecUnknownVersion() {
EncodedPayloadCodec codec = EncodedPayloadCodec.getPreferredCodec(Set.of());
assertThat(codec).isEqualTo(EncodedPayloadCodec.LEGACY);
assertThat(codec.getMinimumSupportedVersion()).isEqualTo(BaseVersion.API_VERSION_1);
}
}
<|start_filename|>tessera-jaxrs/transaction-jaxrs/src/main/java/com/quorum/tessera/q2t/TransactionResource4.java<|end_filename|>
package com.quorum.tessera.q2t;
import static com.quorum.tessera.version.MandatoryRecipientsVersion.MIME_TYPE_JSON_4;
import static jakarta.ws.rs.core.MediaType.TEXT_PLAIN;
import com.quorum.tessera.api.SendRequest;
import com.quorum.tessera.api.SendResponse;
import com.quorum.tessera.api.SendSignedRequest;
import com.quorum.tessera.api.constraint.PrivacyValid;
import com.quorum.tessera.data.MessageHash;
import com.quorum.tessera.enclave.PrivacyGroup;
import com.quorum.tessera.enclave.PrivacyMode;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.privacygroup.PrivacyGroupManager;
import com.quorum.tessera.transaction.TransactionManager;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.ExampleObject;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import jakarta.ws.rs.*;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.UriBuilder;
import java.net.URI;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides endpoints for dealing with transactions, including:
*
* <p>- creating new transactions and distributing them - deleting transactions - fetching
* transactions - resending old transactions
*
* <p>This resources deal with send, sendsignedtx, and receive for mime type
* application/vnd.tessera-4.0+json
*/
@Tag(name = "quorum-to-tessera")
@Path("/")
public class TransactionResource4 {
private static final Logger LOGGER = LoggerFactory.getLogger(TransactionResource4.class);
private final TransactionManager transactionManager;
private final PrivacyGroupManager privacyGroupManager;
private final Base64.Decoder base64Decoder = Base64.getDecoder();
private final Base64.Encoder base64Encoder = Base64.getEncoder();
public TransactionResource4(
final TransactionManager transactionManager, final PrivacyGroupManager privacyGroupManager) {
this.transactionManager = Objects.requireNonNull(transactionManager);
this.privacyGroupManager = Objects.requireNonNull(privacyGroupManager);
}
@POST
@Path("send")
@Consumes({MIME_TYPE_JSON_4})
@Produces({MIME_TYPE_JSON_4})
public Response send(@NotNull @Valid @PrivacyValid final SendRequest sendRequest) {
final PublicKey sender =
Optional.ofNullable(sendRequest.getFrom())
.map(base64Decoder::decode)
.map(PublicKey::from)
.orElseGet(transactionManager::defaultPublicKey);
final Optional<PrivacyGroup.Id> privacyGroupId =
Optional.ofNullable(sendRequest.getPrivacyGroupId()).map(PrivacyGroup.Id::fromBase64String);
final List<PublicKey> recipientList =
privacyGroupId
.map(privacyGroupManager::retrievePrivacyGroup)
.map(PrivacyGroup::getMembers)
.orElse(
Stream.of(sendRequest)
.filter(sr -> Objects.nonNull(sr.getTo()))
.flatMap(s -> Stream.of(s.getTo()))
.map(base64Decoder::decode)
.map(PublicKey::from)
.collect(Collectors.toList()));
final Set<MessageHash> affectedTransactions =
Stream.ofNullable(sendRequest.getAffectedContractTransactions())
.flatMap(Arrays::stream)
.map(base64Decoder::decode)
.map(MessageHash::new)
.collect(Collectors.toSet());
final byte[] execHash =
Optional.ofNullable(sendRequest.getExecHash()).map(String::getBytes).orElse(new byte[0]);
final PrivacyMode privacyMode = PrivacyMode.fromFlag(sendRequest.getPrivacyFlag());
final Set<PublicKey> mandatoryRecipients =
Stream.ofNullable(sendRequest.getMandatoryRecipients())
.flatMap(Arrays::stream)
.map(base64Decoder::decode)
.map(PublicKey::from)
.collect(Collectors.toUnmodifiableSet());
final com.quorum.tessera.transaction.SendRequest.Builder requestBuilder =
com.quorum.tessera.transaction.SendRequest.Builder.create()
.withRecipients(recipientList)
.withSender(sender)
.withPayload(sendRequest.getPayload())
.withExecHash(execHash)
.withPrivacyMode(privacyMode)
.withAffectedContractTransactions(affectedTransactions)
.withMandatoryRecipients(mandatoryRecipients);
privacyGroupId.ifPresent(requestBuilder::withPrivacyGroupId);
final com.quorum.tessera.transaction.SendResponse response =
transactionManager.send(requestBuilder.build());
final String encodedKey =
Optional.of(response)
.map(com.quorum.tessera.transaction.SendResponse::getTransactionHash)
.map(MessageHash::getHashBytes)
.map(base64Encoder::encodeToString)
.get();
final String[] managedParties =
Optional.of(response).map(com.quorum.tessera.transaction.SendResponse::getManagedParties)
.orElse(Collections.emptySet()).stream()
.map(PublicKey::encodeToBase64)
.toArray(String[]::new);
final SendResponse sendResponse =
Optional.of(response)
.map(com.quorum.tessera.transaction.SendResponse::getTransactionHash)
.map(MessageHash::getHashBytes)
.map(base64Encoder::encodeToString)
.map(
messageHash ->
new SendResponse(messageHash, managedParties, sender.encodeToBase64()))
.get();
final URI location =
UriBuilder.fromPath("transaction")
.path(URLEncoder.encode(encodedKey, StandardCharsets.UTF_8))
.build();
return Response.created(location).entity(sendResponse).build();
}
@POST
@Path("sendsignedtx")
@Consumes({MIME_TYPE_JSON_4})
@Produces({MIME_TYPE_JSON_4})
public Response sendSignedTransaction(
@NotNull @Valid @PrivacyValid final SendSignedRequest sendSignedRequest) {
final Optional<PrivacyGroup.Id> privacyGroupId =
Optional.ofNullable(sendSignedRequest.getPrivacyGroupId())
.map(PrivacyGroup.Id::fromBase64String);
final List<PublicKey> recipients =
privacyGroupId
.map(privacyGroupManager::retrievePrivacyGroup)
.map(PrivacyGroup::getMembers)
.orElse(
Optional.ofNullable(sendSignedRequest.getTo()).stream()
.flatMap(Arrays::stream)
.map(base64Decoder::decode)
.map(PublicKey::from)
.collect(Collectors.toList()));
final PrivacyMode privacyMode = PrivacyMode.fromFlag(sendSignedRequest.getPrivacyFlag());
final Set<MessageHash> affectedTransactions =
Stream.ofNullable(sendSignedRequest.getAffectedContractTransactions())
.flatMap(Arrays::stream)
.map(base64Decoder::decode)
.map(MessageHash::new)
.collect(Collectors.toSet());
final byte[] execHash =
Optional.ofNullable(sendSignedRequest.getExecHash())
.map(String::getBytes)
.orElse(new byte[0]);
final Set<PublicKey> mandatoryRecipients =
Stream.ofNullable(sendSignedRequest.getMandatoryRecipients())
.flatMap(Arrays::stream)
.map(base64Decoder::decode)
.map(PublicKey::from)
.collect(Collectors.toUnmodifiableSet());
final com.quorum.tessera.transaction.SendSignedRequest.Builder requestBuilder =
com.quorum.tessera.transaction.SendSignedRequest.Builder.create()
.withSignedData(sendSignedRequest.getHash())
.withRecipients(recipients)
.withPrivacyMode(privacyMode)
.withAffectedContractTransactions(affectedTransactions)
.withExecHash(execHash)
.withMandatoryRecipients(mandatoryRecipients);
privacyGroupId.ifPresent(requestBuilder::withPrivacyGroupId);
final com.quorum.tessera.transaction.SendResponse response =
transactionManager.sendSignedTransaction(requestBuilder.build());
final String encodedTransactionHash =
Optional.of(response)
.map(com.quorum.tessera.transaction.SendResponse::getTransactionHash)
.map(MessageHash::getHashBytes)
.map(base64Encoder::encodeToString)
.get();
LOGGER.debug("Encoded key: {}", encodedTransactionHash);
final URI location =
UriBuilder.fromPath("transaction")
.path(URLEncoder.encode(encodedTransactionHash, StandardCharsets.UTF_8))
.build();
final String[] managedParties =
Optional.of(response).map(com.quorum.tessera.transaction.SendResponse::getManagedParties)
.orElse(Collections.emptySet()).stream()
.map(PublicKey::encodeToBase64)
.toArray(String[]::new);
final SendResponse responseEntity = new SendResponse();
responseEntity.setKey(encodedTransactionHash);
responseEntity.setManagedParties(managedParties);
responseEntity.setSenderKey(response.getSender().encodeToBase64());
LOGGER.debug("Encoded key: {}", encodedTransactionHash);
return Response.created(location).entity(responseEntity).build();
}
@Operation(
summary = "/transaction/{hash}/mandatory",
operationId = "getMandatoryRecipients",
description = "get list of mandatory recipient public keys for a transaction")
@ApiResponse(
responseCode = "200",
description = "comma-separated list of mandatory recipients",
content =
@Content(
schema =
@Schema(
type = "string",
description = "comma-separated list of mandatory recipients"),
examples =
@ExampleObject(
"RO<KEY>=,BULeR8JyUWhiuuCMU/HLA0Q5pzkYT+cHII3ZKBey3Bo=")))
@GET
@Path("/transaction/{hash}/mandatory")
@Produces(TEXT_PLAIN)
public Response getMandatoryRecipients(
@Parameter(
description = "hash indicating encrypted payload to get mandatory recipients for",
schema = @Schema(format = "base64"))
@PathParam("hash")
final String ptmHash) {
LOGGER.debug("Received mandatory recipients list API request for key {}", ptmHash);
MessageHash transactionHash =
Optional.of(ptmHash).map(Base64.getDecoder()::decode).map(MessageHash::new).get();
final String mandatoryRecipients =
transactionManager.getMandatoryRecipients(transactionHash).stream()
.map(PublicKey::encodeToBase64)
.collect(Collectors.joining(","));
return Response.ok(mandatoryRecipients).build();
}
}
<|start_filename|>tests/acceptance-test/src/test/java/com/quorum/tessera/test/rest/CustomPayloadEncryptionIT.java<|end_filename|>
package com.quorum.tessera.test.rest;
import static com.quorum.tessera.version.MultiTenancyVersion.MIME_TYPE_JSON_2_1;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.api.PayloadEncryptResponse;
import com.quorum.tessera.api.ReceiveResponse;
import com.quorum.tessera.api.SendRequest;
import com.quorum.tessera.test.Party;
import com.quorum.tessera.test.PartyHelper;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import suite.NodeAlias;
@RunWith(Parameterized.class)
public class CustomPayloadEncryptionIT {
private final PartyHelper partyHelper = PartyHelper.create();
private String mediaType;
public CustomPayloadEncryptionIT(final String mediaType) {
this.mediaType = mediaType;
}
@Parameterized.Parameters
public static List<String> params() {
return List.of(MediaType.APPLICATION_JSON, MIME_TYPE_JSON_2_1);
}
@Test
public void createPayload() {
final Party sender = partyHelper.findByAlias(NodeAlias.A);
final SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(Base64.getEncoder().encode("Test Payload".getBytes()));
sendRequest.setTo(partyHelper.getParties().map(Party::getPublicKey).toArray(String[]::new));
Response result =
sender
.getRestClientWebTarget()
.path("/encodedpayload/create")
.request()
.post(Entity.entity(sendRequest, mediaType));
assertThat(result.getStatus()).isEqualTo(200);
final PayloadEncryptResponse payloadEncryptResponse =
result.readEntity(PayloadEncryptResponse.class);
assertThat(Base64.getEncoder().encodeToString(payloadEncryptResponse.getSenderKey()))
.isEqualTo(sender.getPublicKey());
assertThat(payloadEncryptResponse.getCipherText()).isNotEmpty();
assertThat(payloadEncryptResponse.getCipherTextNonce()).isNotEmpty();
assertThat(payloadEncryptResponse.getRecipientBoxes()).hasSize(4);
assertThat(payloadEncryptResponse.getRecipientNonce()).isNotEmpty();
assertThat(payloadEncryptResponse.getRecipientKeys())
.hasSize(4)
.containsExactlyInAnyOrder(
partyHelper
.getParties()
.map(Party::getPublicKey)
.map(Base64.getDecoder()::decode)
.toArray(byte[][]::new));
assertThat(payloadEncryptResponse.getPrivacyMode()).isEqualTo(0);
assertThat(payloadEncryptResponse.getAffectedContractTransactions()).isEmpty();
assertThat(payloadEncryptResponse.getExecHash()).isEmpty();
}
@Test
public void createAndDecryptPayload() {
final Party sender = partyHelper.findByAlias(NodeAlias.A);
final Party recipient = partyHelper.findByAlias(NodeAlias.B);
final SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(Base64.getEncoder().encode("Test Payload".getBytes()));
sendRequest.setTo(recipient.getPublicKey());
final Response encryptResult =
sender
.getRestClientWebTarget()
.path("/encodedpayload/create")
.request()
.post(Entity.entity(sendRequest, mediaType));
assertThat(encryptResult.getStatus()).isEqualTo(200);
final PayloadEncryptResponse payloadEncryptResponse =
encryptResult.readEntity(PayloadEncryptResponse.class);
// decrypt it again with the sender
final Response decryptResultForSender =
sender
.getRestClientWebTarget()
.path("/encodedpayload/decrypt")
.request()
.post(Entity.entity(payloadEncryptResponse, mediaType));
final ReceiveResponse decryptedPayload =
decryptResultForSender.readEntity(ReceiveResponse.class);
assertThat(Base64.getDecoder().decode(decryptedPayload.getPayload()))
.isEqualTo("Test Payload".getBytes());
// decrypt it using the recipient
final String firstRecipientInList =
Base64.getEncoder().encodeToString(payloadEncryptResponse.getRecipientKeys().get(0));
if (Objects.equals(firstRecipientInList, sender.getPublicKey())) {
payloadEncryptResponse.getRecipientBoxes().remove(0);
} else {
payloadEncryptResponse.getRecipientBoxes().remove(1);
}
payloadEncryptResponse.setRecipientKeys(Collections.emptyList());
final Response decryptResultForRecipient =
recipient
.getRestClientWebTarget()
.path("/encodedpayload/decrypt")
.request()
.post(Entity.entity(payloadEncryptResponse, mediaType));
final ReceiveResponse decryptedPayloadForRecipient =
decryptResultForRecipient.readEntity(ReceiveResponse.class);
assertThat(Base64.getDecoder().decode(decryptedPayloadForRecipient.getPayload()))
.isEqualTo("Test Payload".getBytes());
}
@Test
public void payloadDecryptionFailsOnBadMessage() {
final Party sender = partyHelper.findByAlias(NodeAlias.A);
final SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(Base64.getEncoder().encode("Test Payload".getBytes()));
Response result =
sender
.getRestClientWebTarget()
.path("/encodedpayload/create")
.request()
.post(Entity.entity(sendRequest, mediaType));
assertThat(result.getStatus()).isEqualTo(200);
final PayloadEncryptResponse payloadEncryptResponse =
result.readEntity(PayloadEncryptResponse.class);
// edit the cipher text to something rubbish, so it can't be decrypted
payloadEncryptResponse.setCipherText("Unexpected data".getBytes());
// attempt to decrypt it
final Response decryptResultForSender =
sender
.getRestClientWebTarget()
.path("/encodedpayload/decrypt")
.request()
.post(Entity.entity(payloadEncryptResponse, mediaType));
assertThat(decryptResultForSender.getStatus()).isEqualTo(500);
}
@Test
public void payloadDecryptionFailsWithoutProperKeyAvailable() {
final Party sender = partyHelper.findByAlias(NodeAlias.A);
final Party recipient = partyHelper.findByAlias(NodeAlias.B);
final SendRequest sendRequest = new SendRequest();
sendRequest.setPayload(Base64.getEncoder().encode("Test Payload".getBytes()));
sendRequest.setTo(recipient.getPublicKey());
final Response encryptResult =
sender
.getRestClientWebTarget()
.path("/encodedpayload/create")
.request()
.post(Entity.entity(sendRequest, mediaType));
assertThat(encryptResult.getStatus()).isEqualTo(200);
final PayloadEncryptResponse payloadEncryptResponse =
encryptResult.readEntity(PayloadEncryptResponse.class);
// purposefully remove the wrong box so it can't be decrypted
// since the key that corresponds to the remaining box isn't on that node
final String firstRecipientInList =
Base64.getEncoder().encodeToString(payloadEncryptResponse.getRecipientKeys().get(0));
if (Objects.equals(firstRecipientInList, sender.getPublicKey())) {
payloadEncryptResponse.getRecipientBoxes().remove(1);
} else {
payloadEncryptResponse.getRecipientBoxes().remove(0);
}
payloadEncryptResponse.setRecipientKeys(Collections.emptyList());
final Response decryptResultForRecipient =
recipient
.getRestClientWebTarget()
.path("/encodedpayload/decrypt")
.request()
.post(Entity.entity(payloadEncryptResponse, mediaType));
assertThat(decryptResultForRecipient.getStatus()).isEqualTo(500);
}
}
<|start_filename|>migration/multitenancy/src/test/java/com/quorum/tessera/multitenancy/migration/MigrationTest.java<|end_filename|>
package com.quorum.tessera.multitenancy.migration;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.cli.CliType;
import com.quorum.tessera.cli.parsers.ConfigConverter;
import com.quorum.tessera.config.Config;
import com.quorum.tessera.config.JdbcConfig;
import com.quorum.tessera.config.util.JaxbUtil;
import com.quorum.tessera.data.EncryptedRawTransaction;
import com.quorum.tessera.data.EncryptedTransaction;
import com.quorum.tessera.data.MessageHash;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PrivacyMode;
import com.quorum.tessera.encryption.PublicKey;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityManagerFactory;
import jakarta.persistence.Persistence;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.stream.IntStream;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import picocli.CommandLine;
@RunWith(Parameterized.class)
public class MigrationTest {
@Rule public TemporaryFolder workDir = new TemporaryFolder();
private Path primaryConfigPath;
private Path secondaryConfigPath;
private List<String> args;
private EntityManagerFactory primaryEntityManagerFactory;
private EntityManagerFactory secondaryEntityManagerFactory;
private int encryptedTransactionCount;
private int encryptedRawTransactionCount;
public MigrationTest(TestInfo testInfo) {
this.encryptedTransactionCount = testInfo.getEncryptedTransactionCount();
this.encryptedRawTransactionCount = testInfo.getEncryptedRawTransactionCount();
}
@Before
public void beforeTest() throws IOException {
Config primaryConfig = new Config();
primaryConfig.setJdbcConfig(new JdbcConfig());
primaryConfig.getJdbcConfig().setUsername("junit");
primaryConfig.getJdbcConfig().setPassword("<PASSWORD>");
String primaryJdbcUrl =
"jdbc:h2:" + workDir.getRoot().toPath().resolve("primary.db").toString();
primaryConfig.getJdbcConfig().setUrl(primaryJdbcUrl);
Config secondaryConfig = new Config();
secondaryConfig.setJdbcConfig(new JdbcConfig());
secondaryConfig.getJdbcConfig().setUsername("junit");
secondaryConfig.getJdbcConfig().setPassword("<PASSWORD>");
String secondaryJdbcUrl =
"jdbc:h2:" + workDir.getRoot().toPath().resolve("secondary.db").toString();
secondaryConfig.getJdbcConfig().setUrl(secondaryJdbcUrl);
primaryConfigPath = workDir.getRoot().toPath().toAbsolutePath().resolve("primary-confg.json");
try (OutputStream outputStream = Files.newOutputStream(primaryConfigPath)) {
JaxbUtil.marshalWithNoValidation(primaryConfig, outputStream);
}
secondaryConfigPath =
workDir.getRoot().toPath().toAbsolutePath().resolve("secondary-confg.json");
try (OutputStream outputStream = Files.newOutputStream(secondaryConfigPath)) {
JaxbUtil.marshalWithNoValidation(secondaryConfig, outputStream);
}
args =
List.of(
"--primary",
primaryConfigPath.toString(),
"--secondary",
secondaryConfigPath.toString());
primaryEntityManagerFactory =
Optional.of(primaryConfig)
.map(Config::getJdbcConfig)
.map(JdbcConfigUtil::toMap)
.map(m -> new HashMap(m))
.map(
p -> {
p.put("jakarta.persistence.schema-generation.database.action", "drop-and-create");
EntityManagerFactory emf = Persistence.createEntityManagerFactory("tessera", p);
emf.createEntityManager();
return emf;
})
.get();
secondaryEntityManagerFactory =
Optional.of(secondaryConfig)
.map(Config::getJdbcConfig)
.map(JdbcConfigUtil::toMap)
.map(m -> new HashMap(m))
.map(
p -> {
p.put("jakarta.persistence.schema-generation.database.action", "create");
EntityManagerFactory emf = Persistence.createEntityManagerFactory("tessera", p);
return emf;
})
.get();
EntityManager secondaryEntityManager = secondaryEntityManagerFactory.createEntityManager();
secondaryEntityManager.getTransaction().begin();
IntStream.range(0, encryptedTransactionCount)
.forEach(
i -> {
EncryptedTransaction encryptedTransaction = generateEncryptedTransaction();
secondaryEntityManager.persist(encryptedTransaction);
});
secondaryEntityManager.getTransaction().commit();
secondaryEntityManager.getTransaction().begin();
IntStream.range(0, encryptedRawTransactionCount)
.forEach(
i -> {
EncryptedRawTransaction encryptedRawTransaction = generateEncryptedRawTransaction();
secondaryEntityManager.persist(encryptedRawTransaction);
});
secondaryEntityManager.getTransaction().commit();
}
@After
public void afterTest() {
primaryEntityManagerFactory.close();
secondaryEntityManagerFactory.close();
}
@Test
public void doMigration() {
MigrationCliAdapter migrationCommand = new MigrationCliAdapter();
assertThat(migrationCommand.getType()).isEqualTo(CliType.MULTITENANCY_MIGRATION);
final CommandLine commandLine = new CommandLine(migrationCommand);
commandLine
.registerConverter(Config.class, new ConfigConverter())
.setSeparator(" ")
.setCaseInsensitiveEnumValuesAllowed(true);
int exitCode = commandLine.execute(args.toArray(String[]::new));
assertThat(exitCode).isZero();
EntityManager secondaryEntityManager = secondaryEntityManagerFactory.createEntityManager();
EntityManager primaryEntityManager = primaryEntityManagerFactory.createEntityManager();
secondaryEntityManager.getTransaction().begin();
primaryEntityManager.getTransaction().begin();
secondaryEntityManager
.createQuery("select count(e) from EncryptedTransaction e", Long.class)
.getResultStream()
.findFirst()
.ifPresent(count -> assertThat(count).isEqualTo(encryptedTransactionCount));
primaryEntityManager
.createQuery("select count(e) from EncryptedTransaction e", Long.class)
.getResultStream()
.findFirst()
.ifPresent(count -> assertThat(count).isEqualTo(encryptedTransactionCount));
secondaryEntityManager
.createQuery("select count(e) from EncryptedRawTransaction e", Long.class)
.getResultStream()
.findFirst()
.ifPresent(count -> assertThat(count).isEqualTo(encryptedRawTransactionCount));
primaryEntityManager
.createQuery("select count(e) from EncryptedRawTransaction e", Long.class)
.getResultStream()
.findFirst()
.ifPresent(count -> assertThat(count).isEqualTo(encryptedRawTransactionCount));
secondaryEntityManager
.createQuery("select e from EncryptedTransaction e", EncryptedTransaction.class)
.getResultStream()
.forEach(
e -> {
EncryptedTransaction copiedEncryptedTransaction =
primaryEntityManager.find(EncryptedTransaction.class, e.getHash());
assertThat(copiedEncryptedTransaction).isNotNull();
assertThat(copiedEncryptedTransaction.getEncodedPayload())
.isEqualTo(e.getEncodedPayload());
});
secondaryEntityManager
.createQuery("select e from EncryptedRawTransaction e", EncryptedRawTransaction.class)
.getResultStream()
.forEach(
e -> {
EncryptedRawTransaction copiedEncryptedRawTransaction =
primaryEntityManager.find(EncryptedRawTransaction.class, e.getHash());
assertThat(copiedEncryptedRawTransaction).isNotNull();
assertThat(copiedEncryptedRawTransaction.getEncryptedKey())
.isEqualTo(e.getEncryptedKey());
assertThat(copiedEncryptedRawTransaction.getEncryptedPayload())
.isEqualTo(e.getEncryptedPayload());
assertThat(copiedEncryptedRawTransaction.getSender()).isEqualTo(e.getSender());
assertThat(copiedEncryptedRawTransaction.getNonce()).isEqualTo(e.getNonce());
});
secondaryEntityManager.getTransaction().rollback();
primaryEntityManager.getTransaction().rollback();
assertThat(commandLine.execute(args.toArray(String[]::new)))
.describedAs("Rerunning should throw no errors as there are exist checks before insert")
.isZero();
primaryEntityManager
.createQuery("select count(e) from EncryptedTransaction e", Long.class)
.getResultStream()
.findFirst()
.ifPresent(count -> assertThat(count).isEqualTo(encryptedTransactionCount));
secondaryEntityManager
.createQuery("select count(e) from EncryptedRawTransaction e", Long.class)
.getResultStream()
.findFirst()
.ifPresent(count -> assertThat(count).isEqualTo(encryptedRawTransactionCount));
}
static EncryptedTransaction generateEncryptedTransaction() {
EncryptedTransaction encryptedTransaction = new EncryptedTransaction();
encryptedTransaction.setHash(new MessageHash(UUID.randomUUID().toString().getBytes()));
encryptedTransaction.setPayload(generateEncodedPayload());
encryptedTransaction.setEncodedPayloadCodec(EncodedPayloadCodec.LEGACY);
return encryptedTransaction;
}
static EncodedPayload generateEncodedPayload() {
PrivacyMode privacyMode =
Arrays.stream(PrivacyMode.values())
.skip((int) (PrivacyMode.values().length * Math.random()))
.findAny()
.get();
PublicKey senderKey = PublicKey.from("SenderKey".getBytes());
EncodedPayload.Builder encodedPayloadBuilder =
EncodedPayload.Builder.create()
.withSenderKey(senderKey)
.withCipherText("cipherText".getBytes())
.withCipherTextNonce("CipherTextNonce".getBytes())
.withPrivacyMode(privacyMode)
.withRecipientNonce("RecipientNonce".getBytes())
.withRecipientKeys(List.of(senderKey, PublicKey.from("Recipient".getBytes())));
if (privacyMode != PrivacyMode.PRIVATE_STATE_VALIDATION) {
if (privacyMode == PrivacyMode.MANDATORY_RECIPIENTS) {
encodedPayloadBuilder.withMandatoryRecipients(
Set.of(PublicKey.from("Recipient".getBytes())));
}
encodedPayloadBuilder.withExecHash(new byte[0]);
} else {
encodedPayloadBuilder.withExecHash("execHash".getBytes());
}
return encodedPayloadBuilder.build();
}
static EncryptedRawTransaction generateEncryptedRawTransaction() {
final EncryptedRawTransaction secondaryRawTx =
new EncryptedRawTransaction(
new MessageHash(UUID.randomUUID().toString().getBytes()),
"some encrypted message".getBytes(),
"encryptedKey".getBytes(),
"nonce".getBytes(),
"sender".getBytes());
return secondaryRawTx;
}
@Parameterized.Parameters(name = "{0}")
public static List<TestInfo> configs() {
return List.of(new TestInfo(21, 89), new TestInfo(91, 12));
}
static class TestInfo {
private int encryptedTransactionCount;
private int encryptedRawTransactionCount;
TestInfo(int encryptedTransactionCount, int encryptedRawTransactionCount) {
this.encryptedTransactionCount = encryptedTransactionCount;
this.encryptedRawTransactionCount = encryptedRawTransactionCount;
}
public int getEncryptedTransactionCount() {
return encryptedTransactionCount;
}
public int getEncryptedRawTransactionCount() {
return encryptedRawTransactionCount;
}
@Override
public String toString() {
return "TestInfo{"
+ "encryptedTransactionCount="
+ encryptedTransactionCount
+ ", encryptedRawTransactionCount="
+ encryptedRawTransactionCount
+ '}';
}
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/main/java/com/quorum/tessera/api/exception/TransactionNotFoundExceptionMapper.java<|end_filename|>
package com.quorum.tessera.api.exception;
import com.quorum.tessera.transaction.exception.TransactionNotFoundException;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.ExceptionMapper;
import jakarta.ws.rs.ext.Provider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Provider
public class TransactionNotFoundExceptionMapper
implements ExceptionMapper<TransactionNotFoundException> {
private static final Logger LOGGER =
LoggerFactory.getLogger(TransactionNotFoundExceptionMapper.class);
@Override
public Response toResponse(final TransactionNotFoundException e) {
LOGGER.info(e.getMessage());
return Response.status(Response.Status.NOT_FOUND)
.entity(e.getMessage())
.type(MediaType.TEXT_PLAIN)
.build();
}
}
<|start_filename|>tessera-data/src/test/java/com/quorum/tessera/data/OpenPojoEntityTest.java<|end_filename|>
package com.quorum.tessera.data;
import com.openpojo.reflection.PojoClass;
import com.openpojo.reflection.impl.PojoClassFactory;
import com.openpojo.validation.Validator;
import com.openpojo.validation.ValidatorBuilder;
import com.openpojo.validation.rule.impl.*;
import com.openpojo.validation.test.impl.GetterTester;
import com.openpojo.validation.test.impl.SetterTester;
import com.quorum.tessera.data.staging.StagingTransaction;
import java.io.Serializable;
import java.util.Map;
import java.util.Set;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class OpenPojoEntityTest {
private PojoClass pojoClass;
private Validator pojoValidator;
public OpenPojoEntityTest(Map.Entry<Class<? extends Serializable>, Validator> typeValidatorPair) {
this.pojoClass = PojoClassFactory.getPojoClass(typeValidatorPair.getKey());
this.pojoValidator = typeValidatorPair.getValue();
}
@Test
public void executeOpenPojoValidationsWithSetter() {
pojoValidator.validate(pojoClass);
}
@Parameterized.Parameters(name = "{0}")
public static Set<Map.Entry<Class<? extends Serializable>, Validator>> entities() {
ValidatorBuilder validatorBuilder =
ValidatorBuilder.create()
.with(new GetterMustExistRule())
.with(new SetterTester())
.with(new GetterTester())
.with(new EqualsAndHashCodeMatchRule())
.with(new NoPublicFieldsExceptStaticFinalRule());
Validator defaultValidator = validatorBuilder.build();
return Map.of(
MessageHash.class, validatorBuilder.with(new NoPrimitivesRule()).build(),
EncryptedRawTransaction.class, defaultValidator,
EncryptedTransaction.class, defaultValidator,
StagingTransaction.class, defaultValidator)
.entrySet();
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/main/java/com/quorum/tessera/p2p/PrivacyGroupResource.java<|end_filename|>
package com.quorum.tessera.p2p;
import static jakarta.ws.rs.core.MediaType.APPLICATION_OCTET_STREAM;
import com.quorum.tessera.privacygroup.PrivacyGroupManager;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.validation.constraints.NotNull;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.core.Response;
@Tag(name = "peer-to-peer")
@Path("/")
public class PrivacyGroupResource {
private PrivacyGroupManager privacyGroupManager;
public PrivacyGroupResource(PrivacyGroupManager privacyGroupManager) {
this.privacyGroupManager = privacyGroupManager;
}
@Operation(
summary = "/pushPrivacyGroup",
operationId = "pushPrivacyGroup",
description = "store privacy group's encoded data")
@ApiResponse(responseCode = "200", description = "privacy group payload stored successfully")
@POST
@Path("pushPrivacyGroup")
@Consumes(APPLICATION_OCTET_STREAM)
public Response storePrivacyGroup(@NotNull final byte[] privacyGroupData) {
privacyGroupManager.storePrivacyGroup(privacyGroupData);
return Response.status(Response.Status.OK).build();
}
}
<|start_filename|>migration/multitenancy/src/test/java/com/quorum/tessera/multitenancy/migration/EncryptedTransactionMigratorTest.java<|end_filename|>
package com.quorum.tessera.multitenancy.migration;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.enclave.*;
import com.quorum.tessera.encryption.PublicKey;
import jakarta.persistence.EntityManager;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class EncryptedTransactionMigratorTest {
private EntityManager primaryDao;
private EntityManager secondaryDao;
private EncryptedTransactionMigrator migrator;
@Before
public void init() {
this.primaryDao = mock(EntityManager.class);
this.secondaryDao = mock(EntityManager.class);
this.migrator = new EncryptedTransactionMigrator(primaryDao, secondaryDao);
}
@After
public void after() {
verifyNoMoreInteractions(primaryDao, secondaryDao);
}
@Test
public void psvTxWithPrimaryAsSender() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withExecHash("execHash".getBytes())
.withSenderKey(sender)
.withNewRecipientKeys(List.of(sender, recipient1, recipient2))
.withRecipientBoxes(List.of("boxSender".getBytes(), recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withExecHash("execHash".getBytes())
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1, sender, recipient2))
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(primaryPayload);
}
@Test
public void psvTxWithSecondaryAsSender() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withExecHash("execHash".getBytes())
.withSenderKey(sender)
.withNewRecipientKeys(List.of(sender, recipient1, recipient2))
.withRecipientBoxes(List.of("boxSender".getBytes(), recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withExecHash("execHash".getBytes())
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1, sender, recipient2))
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(secondaryPayload);
}
@Test
public void psvTxWithBothAsRecipients() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final byte[] recipient2Box = "box2".getBytes();
final Map<TxHash, byte[]> recipient1Acoths =
Map.of(TxHash.from("txhash1".getBytes()), "securityhash1".getBytes());
final Map<TxHash, byte[]> recipient2Acoths =
Map.of(TxHash.from("txhash2".getBytes()), "securityhash2".getBytes());
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withExecHash("execHash".getBytes())
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1, sender, recipient2))
.withRecipientBoxes(List.of(recipient1Box))
.withAffectedContractTransactions(recipient1Acoths)
.build();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withExecHash("execHash".getBytes())
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient2, sender, recipient1))
.withRecipientBox(recipient2Box)
.withAffectedContractTransactions(recipient2Acoths)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
final EncodedPayload expected =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PRIVATE_STATE_VALIDATION)
.withExecHash("execHash".getBytes())
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient2, recipient1, sender))
.withRecipientBoxes(List.of(recipient2Box, recipient1Box))
.withAffectedContractTransactions(
Map.of(
TxHash.from("txhash1".getBytes()), "securityhash1".getBytes(),
TxHash.from("txhash2".getBytes()), "securityhash2".getBytes()))
.build();
assertThat(result).isEqualToComparingFieldByFieldRecursively(expected);
}
@Test
public void ppTxWithPrimaryAsSender() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PARTY_PROTECTION)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(sender, recipient1, recipient2))
.withRecipientBoxes(List.of("boxSender".getBytes(), recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PARTY_PROTECTION)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1))
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(primaryPayload);
}
@Test
public void ppTxWithSecondaryAsSender() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PARTY_PROTECTION)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(sender, recipient1, recipient2))
.withRecipientBoxes(List.of("boxSender".getBytes(), recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PARTY_PROTECTION)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1))
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(secondaryPayload);
}
@Test
public void ppTxWithBothAsRecipients() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final byte[] recipient2Box = "box2".getBytes();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PARTY_PROTECTION)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1))
.withRecipientBoxes(List.of(recipient1Box))
.build();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PARTY_PROTECTION)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient2))
.withRecipientBox(recipient2Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
final EncodedPayload expected =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.PARTY_PROTECTION)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1, recipient2))
.withRecipientBoxes(List.of(recipient1Box, recipient2Box))
.build();
assertThat(result).isEqualToComparingFieldByFieldRecursively(expected);
}
@Test
public void spPETxWithPrimaryAsSender() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(sender, recipient1, recipient2))
.withRecipientBoxes(List.of("boxSender".getBytes(), recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1))
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(primaryPayload);
}
@Test
public void spPrePETxWithPrimaryAsSender() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(sender, recipient1, recipient2))
.withRecipientBoxes(List.of("boxSender".getBytes(), recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(primaryPayload);
}
@Test
public void spPETxWithSecondaryAsSender() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(sender, recipient1, recipient2))
.withRecipientBoxes(List.of("boxSender".getBytes(), recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1))
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(secondaryPayload);
}
@Test
public void spPrePETxWithSecondaryAsSender() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(sender, recipient1, recipient2))
.withRecipientBoxes(List.of("boxSender".getBytes(), recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(secondaryPayload);
}
@Test
public void spPETxWithBothRecipients() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final byte[] recipient2Box = "box2".getBytes();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientKey(recipient1)
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientKey(recipient2)
.withRecipientBox(recipient2Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
final EncodedPayload expected =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientKeys(List.of(recipient1, recipient2))
.withRecipientBoxes(List.of(recipient1Box, recipient2Box))
.build();
assertThat(result).isEqualToComparingFieldByFieldRecursively(expected);
}
@Test
public void spPrePETxWithBothRecipients() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final byte[] recipient2Box = "box2".getBytes();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientBoxes(List.of(recipient1Box))
.build();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientBox(recipient2Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
final EncodedPayload expected =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientBoxes(List.of(recipient1Box, recipient2Box))
.build();
assertThat(result).isEqualToComparingFieldByFieldRecursively(expected);
}
// From a pre-0.8 tx
@Test
public void spPrimarySenderDoesntHaveOwnKeyInList() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1, recipient2))
.withRecipientBoxes(List.of(recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(primaryPayload);
}
// From a pre-0.8 tx
@Test
public void spSecondarySenderDoesntHaveOwnKeyInList() {
final PublicKey sender = PublicKey.from("sender".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final byte[] recipient1Box = "box1".getBytes();
final EncodedPayload secondaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withNewRecipientKeys(List.of(recipient1, recipient2))
.withRecipientBoxes(List.of(recipient1Box, "box2".getBytes()))
.build();
final EncodedPayload primaryPayload =
EncodedPayload.Builder.create()
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withSenderKey(sender)
.withRecipientBox(recipient1Box)
.build();
final EncodedPayload result =
migrator.handleSingleTransaction(primaryPayload, secondaryPayload);
assertThat(result).isEqualToComparingFieldByFieldRecursively(secondaryPayload);
}
}
<|start_filename|>key-vault/aws-key-vault/src/main/java/module-info.java<|end_filename|>
module tessera.keyvault.aws {
requires software.amazon.awssdk.core;
requires software.amazon.awssdk.services.secretsmanager;
requires tessera.config;
requires tessera.keyvault.api;
requires com.fasterxml.jackson.core;
requires com.fasterxml.jackson.databind;
requires org.slf4j;
requires org.apache.commons.logging;
provides com.quorum.tessera.key.vault.KeyVaultServiceFactory with
com.quorum.tessera.key.vault.aws.AWSKeyVaultServiceFactory;
}
<|start_filename|>tessera-data/src/test/java/com/quorum/tessera/data/staging/StagingTransactionListenerTest.java<|end_filename|>
package com.quorum.tessera.data.staging;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import static org.mockito.Mockito.verify;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.MockedStatic;
public class StagingTransactionListenerTest {
private final MockedStatic<PayloadEncoder> payloadEncoderFactoryFunction =
mockStatic(PayloadEncoder.class);
private StagingTransactionListener stagingTransactionListener;
private PayloadEncoder payloadEncoder;
@Before
public void beforeTest() {
stagingTransactionListener = new StagingTransactionListener();
payloadEncoder = mock(PayloadEncoder.class);
payloadEncoderFactoryFunction
.when(() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)))
.thenReturn(payloadEncoder);
}
@After
public void afterTest() {
try {
verifyNoMoreInteractions(payloadEncoder);
payloadEncoderFactoryFunction.verifyNoMoreInteractions();
} finally {
payloadEncoderFactoryFunction.close();
}
}
@Test
public void onLoad() {
byte[] payloadData = "PayloadData".getBytes();
StagingTransaction stagingTransaction = new StagingTransaction();
stagingTransaction.setEncodedPayloadCodec(EncodedPayloadCodec.LEGACY);
stagingTransaction.setPayload(payloadData);
stagingTransactionListener.onLoad(stagingTransaction);
verify(payloadEncoder).decode(payloadData);
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
}
}
<|start_filename|>server/jaxrs-client-unixsocket/src/main/java/com/quorum/tessera/jaxrs/unixsocket/JerseyUnixSocketConnector.java<|end_filename|>
package com.quorum.tessera.jaxrs.unixsocket;
import jakarta.ws.rs.ProcessingException;
import jakarta.ws.rs.core.MultivaluedMap;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.UriBuilder;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Future;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.client.api.ContentProvider;
import org.eclipse.jetty.client.api.ContentResponse;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.client.util.BytesContentProvider;
import org.eclipse.jetty.http.HttpMethod;
import org.eclipse.jetty.unixsocket.client.HttpClientTransportOverUnixSockets;
import org.glassfish.jersey.client.ClientRequest;
import org.glassfish.jersey.client.ClientResponse;
import org.glassfish.jersey.client.spi.AsyncConnectorCallback;
import org.glassfish.jersey.client.spi.Connector;
import org.glassfish.jersey.message.internal.Statuses;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JerseyUnixSocketConnector implements Connector {
private static final Logger LOGGER = LoggerFactory.getLogger(JerseyUnixSocketConnector.class);
private HttpClient httpClient;
private URI unixfile;
public JerseyUnixSocketConnector(URI unixfile) {
this.unixfile = unixfile;
String unixFilePath = Paths.get(unixfile).toFile().getAbsolutePath();
httpClient = new HttpClient(new HttpClientTransportOverUnixSockets(unixFilePath));
try {
httpClient.start();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
@Override
public ClientResponse apply(ClientRequest request) {
try {
return doApply(request);
} catch (Exception ex) {
throw new ProcessingException(ex);
}
}
private ClientResponse doApply(ClientRequest request) throws Exception {
HttpMethod httpMethod = HttpMethod.valueOf(request.getMethod());
final URI originalUri = request.getUri();
final URI uri;
Path basePath = Paths.get(unixfile);
if (originalUri.getScheme().startsWith("unix")) {
String path = originalUri.getRawPath().replaceFirst(basePath.toString(), "");
LOGGER.trace("Extracted path {} from {}", path, originalUri.getRawPath());
uri =
UriBuilder.fromUri(originalUri)
.replacePath(path)
.scheme("http")
.port(99)
.host("localhost")
.build();
LOGGER.trace("Created psuedo uri {} for originalUri {}", uri, originalUri);
} else {
uri = originalUri;
}
Request clientRequest = httpClient.newRequest(uri).method(httpMethod);
MultivaluedMap<String, Object> headers = request.getHeaders();
headers.keySet().stream()
.forEach(
name -> {
headers
.get(name)
.forEach(
value -> {
clientRequest.header(name, Objects.toString(value));
});
});
if (request.hasEntity()) {
final long length = request.getLengthLong();
try (ByteArrayOutputStream bout = new ByteArrayOutputStream()) {
request.setStreamProvider((int contentLength) -> bout);
request.writeEntity();
ContentProvider content = new BytesContentProvider(bout.toByteArray());
clientRequest.content(content);
}
}
final ContentResponse contentResponse = clientRequest.send();
int statusCode = contentResponse.getStatus();
String reason = contentResponse.getReason();
LOGGER.trace(
"uri {}, method: {},statusCode:{},reason: {} ", uri, httpMethod, statusCode, reason);
final Response.StatusType status = Statuses.from(statusCode, reason);
ClientResponse response = new ClientResponse(status, request);
contentResponse.getHeaders().stream()
.forEach(
header -> {
response.headers(header.getName(), (Object[]) header.getValues());
});
response.setEntityStream(new ByteArrayInputStream(contentResponse.getContent()));
return response;
}
@Override
public Future<?> apply(final ClientRequest request, final AsyncConnectorCallback callback) {
try {
callback.response(doApply(request));
} catch (IOException ex) {
callback.failure(new ProcessingException(ex));
} catch (Throwable t) {
callback.failure(t);
}
return CompletableFuture.completedFuture(null);
}
@Override
public String getName() {
return getClass().getSimpleName();
}
@Override
public void close() {
try {
httpClient.stop();
} catch (Exception ex) {
}
}
}
<|start_filename|>key-vault/hashicorp-key-vault/src/main/java/module-info.java<|end_filename|>
module tessera.keyvault.hashicorp {
requires spring.core;
requires spring.vault.core;
requires spring.web;
requires tessera.config;
requires tessera.keyvault.api;
requires org.slf4j;
requires org.apache.commons.logging;
requires com.fasterxml.jackson.core;
provides com.quorum.tessera.key.vault.KeyVaultServiceFactory with
com.quorum.tessera.key.vault.hashicorp.HashicorpKeyVaultServiceFactory;
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/test/java/com/quorum/tessera/p2p/RecoveryResourceTest.java<|end_filename|>
package com.quorum.tessera.p2p;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.enclave.PrivacyMode;
import com.quorum.tessera.p2p.recovery.PushBatchRequest;
import com.quorum.tessera.recovery.workflow.BatchResendManager;
import com.quorum.tessera.transaction.TransactionManager;
import jakarta.ws.rs.core.Response;
import java.util.Collections;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.MockedStatic;
public class RecoveryResourceTest {
private RecoveryResource recoveryResource;
private BatchResendManager resendManager;
private TransactionManager transactionManager;
private PayloadEncoder payloadEncoder;
private final MockedStatic<PayloadEncoder> payloadEncoderFactoryFunction =
mockStatic(PayloadEncoder.class);
@Before
public void onSetup() {
resendManager = mock(BatchResendManager.class);
transactionManager = mock(TransactionManager.class);
payloadEncoder = mock(PayloadEncoder.class);
payloadEncoderFactoryFunction
.when(() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)))
.thenReturn(payloadEncoder);
recoveryResource = new RecoveryResource(transactionManager, resendManager);
}
@After
public void onTearDown() {
try {
verifyNoMoreInteractions(transactionManager, resendManager, payloadEncoder);
payloadEncoderFactoryFunction.verifyNoMoreInteractions();
} finally {
payloadEncoderFactoryFunction.close();
}
}
@Test
public void pushBatch() {
PushBatchRequest pushBatchRequest =
new PushBatchRequest(Collections.singletonList("SomeData".getBytes()));
Response result = recoveryResource.pushBatch(pushBatchRequest);
assertThat(result.getStatus()).isEqualTo(200);
ArgumentCaptor<com.quorum.tessera.recovery.resend.PushBatchRequest> argCaptor =
ArgumentCaptor.forClass(com.quorum.tessera.recovery.resend.PushBatchRequest.class);
verify(resendManager).storeResendBatch(argCaptor.capture());
com.quorum.tessera.recovery.resend.PushBatchRequest capturedRequest = argCaptor.getValue();
assertThat(capturedRequest).isNotNull();
assertThat(capturedRequest.getEncodedPayloads()).containsExactly("SomeData".getBytes());
}
@Test
public void pushAllowedForStandardPrivate() {
final byte[] someData = "SomeData".getBytes();
final EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.STANDARD_PRIVATE);
when(payloadEncoder.decode(someData)).thenReturn(payload);
final Response result = recoveryResource.push(someData, null);
assertThat(result.getStatus()).isEqualTo(201);
assertThat(result.hasEntity()).isTrue();
verify(transactionManager).storePayload(payload);
verify(payloadEncoder).decode(someData);
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
}
@Test
public void pushNotAllowedForEnhancedPrivacy() {
final byte[] someData = "SomeData".getBytes();
final EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(payloadEncoder.decode(someData)).thenReturn(payload);
final Response result = recoveryResource.push(someData, null);
assertThat(result.getStatus()).isEqualTo(403);
verify(payloadEncoder).decode(someData);
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
}
}
<|start_filename|>config/src/test/java/com/quorum/tessera/config/util/SomeObject.java<|end_filename|>
package com.quorum.tessera.config.util;
import jakarta.xml.bind.annotation.XmlRootElement;
@XmlRootElement
public class SomeObject {
private String someValue;
public String getSomeValue() {
return someValue;
}
public void setSomeValue(final String someValue) {
this.someValue = someValue;
}
}
<|start_filename|>tessera-context/src/main/java/com/quorum/tessera/context/internal/DefaultRuntimeContext.java<|end_filename|>
package com.quorum.tessera.context.internal;
import com.quorum.tessera.config.keys.KeyEncryptor;
import com.quorum.tessera.context.RuntimeContext;
import com.quorum.tessera.encryption.PublicKey;
import jakarta.ws.rs.client.Client;
import java.net.URI;
import java.util.List;
import java.util.Set;
class DefaultRuntimeContext implements RuntimeContext {
private final Set<PublicKey> keys;
private final KeyEncryptor keyEncryptor;
private final List<PublicKey> alwaysSendTo;
private final List<URI> peers;
private final Client p2pClient;
private final boolean remoteKeyValidation;
private final boolean enhancedPrivacy;
private final URI p2pServerUri;
private final boolean disablePeerDiscovery;
private final boolean useWhiteList;
private final boolean recoveryMode;
private final boolean orionMode;
private final boolean multiplePrivateStates;
protected DefaultRuntimeContext(
Set<PublicKey> keys,
KeyEncryptor keyEncryptor,
List<PublicKey> alwaysSendTo,
List<URI> peers,
Client p2pClient,
boolean remoteKeyValidation,
boolean enhancedPrivacy,
URI p2pServerUri,
boolean disablePeerDiscovery,
boolean useWhiteList,
boolean recoveryMode,
boolean orionMode,
boolean multiplePrivateStates) {
this.keys = Set.copyOf(keys);
this.keyEncryptor = keyEncryptor;
this.alwaysSendTo = List.copyOf(alwaysSendTo);
this.peers = List.copyOf(peers);
this.p2pClient = p2pClient;
this.remoteKeyValidation = remoteKeyValidation;
this.enhancedPrivacy = enhancedPrivacy;
this.p2pServerUri = p2pServerUri;
this.disablePeerDiscovery = disablePeerDiscovery;
this.useWhiteList = useWhiteList;
this.recoveryMode = recoveryMode;
this.orionMode = orionMode;
this.multiplePrivateStates = multiplePrivateStates;
}
public Set<PublicKey> getKeys() {
return Set.copyOf(keys);
}
public KeyEncryptor getKeyEncryptor() {
return keyEncryptor;
}
public List<PublicKey> getAlwaysSendTo() {
return alwaysSendTo;
}
public List<URI> getPeers() {
return peers;
}
public Client getP2pClient() {
return p2pClient;
}
public boolean isRemoteKeyValidation() {
return remoteKeyValidation;
}
@Override
public boolean isEnhancedPrivacy() {
return enhancedPrivacy;
}
public URI getP2pServerUri() {
return p2pServerUri;
}
@Override
public boolean isDisablePeerDiscovery() {
return disablePeerDiscovery;
}
@Override
public boolean isUseWhiteList() {
return useWhiteList;
}
@Override
public boolean isRecoveryMode() {
return recoveryMode;
}
@Override
public Set<PublicKey> getPublicKeys() {
return Set.copyOf(this.keys);
}
@Override
public boolean isOrionMode() {
return orionMode;
}
@Override
public boolean isMultiplePrivateStates() {
return multiplePrivateStates;
}
@Override
public String toString() {
return "DefaultRuntimeContext{"
+ "keys="
+ keys
+ ", keyEncryptor="
+ keyEncryptor
+ ", alwaysSendTo="
+ alwaysSendTo
+ ", peers="
+ peers
+ ", p2pClient="
+ p2pClient
+ ", remoteKeyValidation="
+ remoteKeyValidation
+ ", enhancedPrivacy="
+ enhancedPrivacy
+ ", p2pServerUri="
+ p2pServerUri
+ ", disablePeerDiscovery="
+ disablePeerDiscovery
+ ", useWhiteList="
+ useWhiteList
+ ", recoveryMode="
+ recoveryMode
+ ", orionMode="
+ orionMode
+ ", multiplePrivateStates="
+ multiplePrivateStates
+ '}';
}
}
<|start_filename|>tessera-core/src/main/java/com/quorum/tessera/transaction/internal/PrivacyHelperProvider.java<|end_filename|>
package com.quorum.tessera.transaction.internal;
import com.quorum.tessera.context.RuntimeContext;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.transaction.PrivacyHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PrivacyHelperProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(PrivacyHelperProvider.class);
public static PrivacyHelper provider() {
RuntimeContext runtimeContext = RuntimeContext.getInstance();
LOGGER.debug("Creating PrivacyHelper");
boolean privacyEnabled = runtimeContext.isEnhancedPrivacy();
EncryptedTransactionDAO encryptedTransactionDAO = EncryptedTransactionDAO.create();
PrivacyHelper privacyHelper = new PrivacyHelperImpl(encryptedTransactionDAO, privacyEnabled);
LOGGER.debug("Created PrivacyHelper {}", privacyHelper);
return privacyHelper;
}
}
<|start_filename|>tessera-core/src/main/java/com/quorum/tessera/transaction/internal/TransactionManagerProvider.java<|end_filename|>
package com.quorum.tessera.transaction.internal;
import com.quorum.tessera.data.EncryptedRawTransactionDAO;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.enclave.Enclave;
import com.quorum.tessera.enclave.PayloadDigest;
import com.quorum.tessera.transaction.PrivacyHelper;
import com.quorum.tessera.transaction.TransactionManager;
import com.quorum.tessera.transaction.publish.BatchPayloadPublisher;
import com.quorum.tessera.transaction.publish.PayloadPublisher;
import com.quorum.tessera.transaction.resend.ResendManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TransactionManagerProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(TransactionManagerProvider.class);
public static TransactionManager provider() {
final TransactionManagerHolder transactionManagerHolder = TransactionManagerHolder.INSTANCE;
if (transactionManagerHolder.getTransactionManager().isPresent()) {
return transactionManagerHolder.getTransactionManager().get();
}
final EncryptedTransactionDAO encryptedTransactionDAO = EncryptedTransactionDAO.create();
final Enclave enclave = Enclave.create();
final EncryptedRawTransactionDAO encryptedRawTransactionDAO =
EncryptedRawTransactionDAO.create();
LOGGER.debug("Creating ResendManager");
final ResendManager resendManager = ResendManager.create();
LOGGER.debug("Created ResendManager {}", resendManager);
LOGGER.debug("Creating payload publisher");
final PayloadPublisher payloadPublisher = PayloadPublisher.create();
LOGGER.debug("Created payload publisher {}", payloadPublisher);
LOGGER.debug("Creating batchPayloadPublisher");
final BatchPayloadPublisher batchPayloadPublisher = BatchPayloadPublisher.create();
LOGGER.debug("Created batchPayloadPublisher {}", batchPayloadPublisher);
LOGGER.debug("Creating PrivacyHelper");
final PrivacyHelper privacyHelper = PrivacyHelper.create();
LOGGER.debug("Created PrivacyHelper {}", privacyHelper);
int resendBatchSize = 100;
LOGGER.debug("Creating PayloadDigest");
final PayloadDigest messageHashFactory = PayloadDigest.create();
LOGGER.debug("Created PayloadDigest {}", messageHashFactory);
return transactionManagerHolder.store(
new TransactionManagerImpl(
enclave,
encryptedTransactionDAO,
encryptedRawTransactionDAO,
resendManager,
batchPayloadPublisher,
privacyHelper,
messageHashFactory));
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/constraints/UrlValidator.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import jakarta.validation.ConstraintValidator;
import jakarta.validation.ConstraintValidatorContext;
import java.net.MalformedURLException;
import java.net.URL;
public class UrlValidator implements ConstraintValidator<ValidUrl, String> {
@Override
public boolean isValid(String value, ConstraintValidatorContext context) {
try {
new URL(value);
return true;
} catch (MalformedURLException e) {
context.disableDefaultConstraintViolation();
context
.buildConstraintViolationWithTemplate(String.format("Invalid URL: %s", e.getMessage()))
.addConstraintViolation();
return false;
}
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/test/java/com/quorum/tessera/api/common/UpCheckResourceTest.java<|end_filename|>
package com.quorum.tessera.api.common;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import jakarta.ws.rs.core.Response;
import org.junit.Test;
public class UpCheckResourceTest {
private UpCheckResource resource = new UpCheckResource();
@Test
public void upCheck() {
final Response response = resource.upCheck();
assertThat(response.getStatus()).isEqualTo(200);
assertThat(response.getEntity()).isEqualTo("I'm up!");
}
}
<|start_filename|>tessera-recover/src/main/java/module-info.java<|end_filename|>
module tessera.recovery {
requires tessera.config;
requires tessera.data;
requires tessera.partyinfo;
requires tessera.enclave.api;
requires tessera.shared;
requires tessera.encryption.api;
requires tessera.context;
requires org.slf4j;
requires tessera.transaction;
requires jakarta.persistence;
exports com.quorum.tessera.recovery;
exports com.quorum.tessera.recovery.resend;
exports com.quorum.tessera.recovery.workflow;
uses com.quorum.tessera.recovery.Recovery;
uses com.quorum.tessera.recovery.workflow.BatchResendManager;
uses com.quorum.tessera.recovery.workflow.BatchWorkflowFactory;
uses com.quorum.tessera.recovery.resend.BatchTransactionRequester;
uses com.quorum.tessera.recovery.resend.ResendBatchPublisher;
uses com.quorum.tessera.recovery.workflow.LegacyResendManager;
provides com.quorum.tessera.recovery.workflow.BatchResendManager with
com.quorum.tessera.recovery.workflow.internal.BatchResendManagerProvider;
provides com.quorum.tessera.recovery.Recovery with
com.quorum.tessera.recovery.internal.RecoveryProvider;
provides com.quorum.tessera.recovery.workflow.BatchWorkflowFactory with
com.quorum.tessera.recovery.workflow.internal.BatchWorkflowFactoryProvider;
provides com.quorum.tessera.recovery.workflow.LegacyResendManager with
com.quorum.tessera.recovery.workflow.internal.LegacyResendManagerProvider;
}
<|start_filename|>tessera-data/src/test/java/com/quorum/tessera/data/EncryptedTransactionListenerTest.java<|end_filename|>
package com.quorum.tessera.data;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.MockedStatic;
public class EncryptedTransactionListenerTest {
private final MockedStatic<PayloadEncoder> payloadEncoderFactoryFunction =
mockStatic(PayloadEncoder.class);
private EncryptedTransactionListener encryptedTransactionListener;
private PayloadEncoder payloadEncoder;
@Before
public void beforeTest() {
encryptedTransactionListener = new EncryptedTransactionListener();
payloadEncoder = mock(PayloadEncoder.class);
payloadEncoderFactoryFunction
.when(() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)))
.thenReturn(payloadEncoder);
}
@After
public void afterTest() {
try {
verifyNoMoreInteractions(payloadEncoder);
payloadEncoderFactoryFunction.verifyNoMoreInteractions();
} finally {
payloadEncoderFactoryFunction.close();
}
}
@Test
public void onLoad() {
byte[] payloadData = "PayloadData".getBytes();
EncodedPayload payload = mock(EncodedPayload.class);
when(payloadEncoder.decode(payloadData)).thenReturn(payload);
EncryptedTransaction encryptedTransaction = new EncryptedTransaction();
encryptedTransaction.setEncodedPayloadCodec(EncodedPayloadCodec.CBOR);
encryptedTransaction.setEncodedPayload(payloadData);
encryptedTransactionListener.onLoad(encryptedTransaction);
verify(payloadEncoder).decode(payloadData);
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
assertThat(encryptedTransaction.getPayload()).isEqualTo(payload);
}
@Test
public void onLoadLegacyEncodedData() {
byte[] payloadData = "PayloadData".getBytes();
EncodedPayload payload = mock(EncodedPayload.class);
when(payloadEncoder.decode(payloadData)).thenReturn(payload);
EncryptedTransaction encryptedTransaction = new EncryptedTransaction();
encryptedTransaction.setEncodedPayload(payloadData);
encryptedTransactionListener.onLoad(encryptedTransaction);
verify(payloadEncoder).decode(payloadData);
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(eq(EncodedPayloadCodec.LEGACY)));
assertThat(encryptedTransaction.getPayload()).isEqualTo(payload);
assertThat(encryptedTransaction.getEncodedPayloadCodec()).isEqualTo(EncodedPayloadCodec.LEGACY);
}
@Test
public void onSave() {
EncodedPayload encodedPayload = mock(EncodedPayload.class);
EncryptedTransaction encryptedTransaction = new EncryptedTransaction();
encryptedTransaction.setPayload(encodedPayload);
byte[] payloadData = "PayloadData".getBytes();
when(payloadEncoder.encode(encodedPayload)).thenReturn(payloadData);
encryptedTransactionListener.onSave(encryptedTransaction);
verify(payloadEncoder).encode(encodedPayload);
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(EncodedPayloadCodec.current()));
assertThat(encryptedTransaction.getEncodedPayload()).isEqualTo(payloadData);
}
}
<|start_filename|>tests/acceptance-test/src/test/java/transaction/raw/RawSteps.java<|end_filename|>
package transaction.raw;
import static com.quorum.tessera.test.rest.RawHeaderName.RECIPIENTS;
import static com.quorum.tessera.test.rest.RawHeaderName.SENDER;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.api.ReceiveResponse;
import com.quorum.tessera.test.Party;
import com.quorum.tessera.test.PartyHelper;
import com.quorum.tessera.test.rest.RestUtils;
import io.cucumber.java8.En;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.client.Invocation;
import jakarta.ws.rs.client.WebTarget;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.net.URI;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
import suite.ExecutionContext;
public class RawSteps implements En {
private RestUtils restUtils = new RestUtils();
private PartyHelper partyHelper = PartyHelper.create();
private Party getSender(Collection<String> senderHolder) {
return partyHelper.findByAlias(senderHolder.stream().findAny().get());
}
private Set<Party> getRecipientParties(Set<String> recipientAliases) {
return recipientAliases.stream().map(partyHelper::findByAlias).collect(Collectors.toSet());
}
public RawSteps() {
final Collection<String> senderHolder = new ArrayList<>();
final Set<String> recipients = new HashSet<>();
final byte[] transactionData = restUtils.createTransactionData();
final Set<String> storedHashes = new TreeSet<>();
Given(
"^Sender party (.+)$",
(String pty) -> {
senderHolder.add(pty);
});
And(
"^Recipient part(?:y|ies) (.+)$",
(String alias) -> {
parseAliases(alias).stream().forEach(recipients::add);
assertThat(recipients).isNotEmpty();
});
And(
"^all parties are running$",
() -> {
final Client client = partyHelper.getParties().findAny().get().getRestClient();
assertThat(
partyHelper
.getParties()
.map(Party::getP2PUri)
.map(client::target)
.map(t -> t.path("upcheck"))
.map(WebTarget::request)
.map(Invocation.Builder::get)
.allMatch(r -> r.getStatus() == 200))
.describedAs("All serers are up ")
.isTrue();
});
When(
"^sender party receives transaction from Quorum peer$",
() -> {
Party sender = getSender(senderHolder);
Response response =
restUtils.sendRaw(
sender, transactionData, getRecipientParties(recipients).toArray(new Party[0]));
assertThat(response.getStatus()).isEqualTo(200);
String persistedKey = response.readEntity(String.class);
assertThat(persistedKey).isNotNull();
storedHashes.add(persistedKey);
});
When(
"sender party receives transaction with no sender key defined from Quorum peer",
() -> {
Party sender = getSender(senderHolder);
final Response response =
sender
.getRestClientWebTarget()
.path("sendraw")
.request()
.header(
RECIPIENTS,
recipients.stream()
.map(partyHelper::findByAlias)
.map(Party::getPublicKey)
.collect(Collectors.joining(",")))
.post(Entity.entity(transactionData, MediaType.APPLICATION_OCTET_STREAM));
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(200);
String persistedKey = response.readEntity(String.class);
assertThat(persistedKey).isNotNull();
storedHashes.add(persistedKey);
URI location = response.getLocation();
recipients.stream()
.map(partyHelper::findByAlias)
.map(Party::getRestClient)
.forEach(
client -> {
final Response checkPersistedTxnResponse =
client.target(location).request().get();
assertThat(checkPersistedTxnResponse.getStatus()).isEqualTo(200);
ReceiveResponse receiveResponse =
checkPersistedTxnResponse.readEntity(ReceiveResponse.class);
assertThat(receiveResponse.getPayload()).isEqualTo(transactionData);
final Set<Party> recipientParties = getRecipientParties(recipients);
List<Response> responses =
restUtils
.findTransaction(persistedKey, recipientParties)
.collect(Collectors.toList());
responses.forEach(
r -> {
assertThat(r.getStatus())
.describedAs("find transaction for " + recipients + ". " + r)
.isEqualTo(200);
});
});
restUtils
.findTransaction(
persistedKey, partyHelper.findByAlias("C"), partyHelper.findByAlias("B"))
.forEach(
r -> {
assertThat(r.getStatus()).isEqualTo(404);
});
});
When(
"sender party receives transaction with no payload from Quorum peer",
() -> {
Party sender = getSender(senderHolder);
Response response = restUtils.sendRaw(sender, null, getRecipientParties(recipients));
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(400);
});
When(
"sender party receives transaction with an unknown party from Quorum peer",
() -> {
Party sender = getSender(senderHolder);
ExecutionContext executionContext = ExecutionContext.currentContext();
String unknown =
transaction.utils.Utils.generateValidButUnknownPublicKey(
executionContext.getEncryptorType())
.encodeToBase64();
final Response response =
sender
.getRestClientWebTarget()
.path("sendraw")
.request()
.header(SENDER, sender.getPublicKey())
.header(RECIPIENTS, unknown)
.post(Entity.entity(transactionData, MediaType.APPLICATION_OCTET_STREAM));
assertThat(response).isNotNull();
assertThat(response.getStatus()).isEqualTo(404);
});
Then(
"an invalid request error is raised",
() -> {
// FIXME: validated in sending function
});
Then(
"^sender party stores the transaction$",
() -> {
Party sender = getSender(senderHolder);
try (PreparedStatement statement =
sender
.getDatabaseConnection()
.prepareStatement("SELECT COUNT(*) FROM ENCRYPTED_TRANSACTION WHERE HASH = ?")) {
statement.setBytes(1, Base64.getDecoder().decode(storedHashes.iterator().next()));
try (ResultSet results = statement.executeQuery()) {
assertThat(results.next()).isTrue();
assertThat(results.getLong(1)).isEqualTo(1);
}
}
});
Then(
"^forwards the transaction to recipient part(?:y|ies)$",
() -> {
recipients.stream()
.map(partyHelper::findByAlias)
.forEach(
rec -> {
String storedHash = storedHashes.stream().findAny().get();
Response response = restUtils.receiveRaw(storedHash, rec);
assertThat(response.getStatus()).isEqualTo(200);
final byte[] result = response.readEntity(byte[].class);
assertThat(result).isEqualTo(transactionData);
});
});
Then(
"^.*does not forward transaction to any recipients?$",
() -> {
partyHelper
.getParties()
.filter(p -> !senderHolder.contains(p.getAlias()))
.forEach(
p -> {
String storedHash = storedHashes.stream().findAny().get();
Response response = restUtils.receiveRaw(storedHash, p);
assertThat(response.getStatus()).isEqualTo(404);
});
});
}
static List<String> parseAliases(String alias) {
return Arrays.asList(alias.split(",| and "));
}
}
<|start_filename|>tessera-recover/src/test/java/com/quorum/tessera/recovery/workflow/PreparePayloadForRecipientTest.java<|end_filename|>
package com.quorum.tessera.recovery.workflow;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.PrivacyMode;
import com.quorum.tessera.enclave.RecipientBox;
import com.quorum.tessera.encryption.PublicKey;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
public class PreparePayloadForRecipientTest {
private PreparePayloadForRecipient preparePayloadForRecipient;
@Before
public void onSetup() {
preparePayloadForRecipient = new PreparePayloadForRecipient();
}
@Test
public void targetKeyIsRecipientOfTransaction() {
final PublicKey targetResendKey = PublicKey.from("target".getBytes());
final EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getRecipientKeys()).thenReturn(List.of(targetResendKey));
when(payload.getRecipientBoxes()).thenReturn(List.of(mock(RecipientBox.class)));
final BatchWorkflowContext workflowEvent = new BatchWorkflowContext();
workflowEvent.setEncodedPayload(payload);
workflowEvent.setRecipientKey(targetResendKey);
final EncodedPayload formattedPayload = mock(EncodedPayload.class);
try (var mockStatic = mockStatic(EncodedPayload.Builder.class)) {
EncodedPayload.Builder builder = mock(EncodedPayload.Builder.class);
mockStatic
.when(() -> EncodedPayload.Builder.forRecipient(payload, targetResendKey))
.thenReturn(builder);
when(builder.build()).thenReturn(formattedPayload);
preparePayloadForRecipient.execute(workflowEvent);
mockStatic.verify(() -> EncodedPayload.Builder.forRecipient(payload, targetResendKey));
}
final Set<EncodedPayload> payloadsToPublish = workflowEvent.getPayloadsToPublish();
assertThat(payloadsToPublish).containsExactly(formattedPayload);
}
@Test
public void targetKeyIsSenderOfTransactionWithRecipientsPresent() {
final PublicKey targetResendKey = PublicKey.from("target".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getSenderKey()).thenReturn(targetResendKey);
when(payload.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
when(payload.getRecipientBoxes())
.thenReturn(List.of(mock(RecipientBox.class), mock(RecipientBox.class)));
final BatchWorkflowContext workflowEvent = new BatchWorkflowContext();
workflowEvent.setEncodedPayload(payload);
workflowEvent.setRecipientKey(targetResendKey);
try (var mockStatic = mockStatic(EncodedPayload.Builder.class)) {
EncodedPayload.Builder builder1 = mock(EncodedPayload.Builder.class);
when(builder1.build()).thenReturn(mock(EncodedPayload.class));
EncodedPayload.Builder builder2 = mock(EncodedPayload.Builder.class);
when(builder2.build()).thenReturn(mock(EncodedPayload.class));
mockStatic
.when(() -> EncodedPayload.Builder.forRecipient(payload, recipient1))
.thenReturn(builder1);
mockStatic
.when(() -> EncodedPayload.Builder.forRecipient(payload, recipient2))
.thenReturn(builder2);
preparePayloadForRecipient.execute(workflowEvent);
mockStatic.verify(() -> EncodedPayload.Builder.forRecipient(payload, recipient1));
mockStatic.verify(() -> EncodedPayload.Builder.forRecipient(payload, recipient2));
verify(builder1).build();
verify(builder2).build();
}
final Set<EncodedPayload> payloadsToPublish = workflowEvent.getPayloadsToPublish();
assertThat(payloadsToPublish).hasSize(2);
}
@Test
public void targetKeyIsSenderOfTransactionWithNoRecipientsPresent() {
final PublicKey targetResendKey = PublicKey.from("target".getBytes());
final EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getSenderKey()).thenReturn(targetResendKey);
when(payload.getRecipientBoxes())
.thenReturn(
List.of(RecipientBox.from("box1".getBytes()), RecipientBox.from("box2".getBytes())));
final BatchWorkflowContext workflowEvent = new BatchWorkflowContext();
workflowEvent.setEncodedPayload(payload);
workflowEvent.setRecipientKey(targetResendKey);
preparePayloadForRecipient.execute(workflowEvent);
final Set<EncodedPayload> payloadsToPublish = workflowEvent.getPayloadsToPublish();
assertThat(payloadsToPublish.size()).isEqualTo(2);
assertThat(
payloadsToPublish.stream()
.map(EncodedPayload::getSenderKey)
.filter(targetResendKey::equals)
.count())
.isEqualTo(2);
assertThat(
payloadsToPublish.stream()
.map(EncodedPayload::getRecipientBoxes)
.flatMap(Collection::stream)
.collect(Collectors.toList()))
.containsExactlyInAnyOrder(
RecipientBox.from("box1".getBytes()), RecipientBox.from("box2".getBytes()));
}
@Test
public void psvTransactionOnlyUsesKeysWithBoxes() {
final PublicKey targetResendKey = PublicKey.from("target".getBytes());
final PublicKey recipient1 = PublicKey.from("recipient1".getBytes());
final PublicKey recipient2 = PublicKey.from("recipient2".getBytes());
final EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getSenderKey()).thenReturn(targetResendKey);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.PRIVATE_STATE_VALIDATION);
when(payload.getRecipientKeys()).thenReturn(List.of(recipient1, recipient2));
when(payload.getRecipientBoxes()).thenReturn(List.of(RecipientBox.from("box1".getBytes())));
final BatchWorkflowContext workflowEvent = new BatchWorkflowContext();
workflowEvent.setEncodedPayload(payload);
workflowEvent.setRecipientKey(targetResendKey);
try (var mockStatic = mockStatic(EncodedPayload.Builder.class)) {
EncodedPayload.Builder builder1 = mock(EncodedPayload.Builder.class);
when(builder1.build()).thenReturn(mock(EncodedPayload.class));
EncodedPayload.Builder builder2 = mock(EncodedPayload.Builder.class);
when(builder2.build()).thenReturn(mock(EncodedPayload.class));
mockStatic
.when(() -> EncodedPayload.Builder.forRecipient(payload, recipient1))
.thenReturn(builder1);
mockStatic
.when(() -> EncodedPayload.Builder.forRecipient(payload, recipient2))
.thenReturn(builder2);
preparePayloadForRecipient.execute(workflowEvent);
mockStatic.verify(() -> EncodedPayload.Builder.forRecipient(payload, recipient1));
verify(builder1).build();
verifyNoMoreInteractions(builder1, builder2);
}
final Set<EncodedPayload> payloadsToPublish = workflowEvent.getPayloadsToPublish();
assertThat(payloadsToPublish).hasSize(1);
}
}
<|start_filename|>tessera-context/src/main/java/com/quorum/tessera/context/RuntimeContext.java<|end_filename|>
package com.quorum.tessera.context;
import com.quorum.tessera.config.keys.KeyEncryptor;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.serviceloader.ServiceLoaderUtil;
import jakarta.ws.rs.client.Client;
import java.net.URI;
import java.util.List;
import java.util.ServiceLoader;
import java.util.Set;
public interface RuntimeContext {
Set<PublicKey> getKeys();
KeyEncryptor getKeyEncryptor();
List<PublicKey> getAlwaysSendTo();
List<URI> getPeers();
Client getP2pClient();
boolean isRemoteKeyValidation();
boolean isEnhancedPrivacy();
URI getP2pServerUri();
boolean isDisablePeerDiscovery();
boolean isUseWhiteList();
boolean isRecoveryMode();
Set<PublicKey> getPublicKeys();
boolean isOrionMode();
boolean isMultiplePrivateStates();
static RuntimeContext getInstance() {
return ServiceLoaderUtil.loadSingle(ServiceLoader.load(RuntimeContext.class));
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/KeyConfiguration.java<|end_filename|>
package com.quorum.tessera.config;
import com.quorum.tessera.config.adapters.PathAdapter;
import com.quorum.tessera.config.constraints.ValidPath;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import jakarta.xml.bind.annotation.XmlAccessType;
import jakarta.xml.bind.annotation.XmlAccessorType;
import jakarta.xml.bind.annotation.XmlElement;
import jakarta.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
@XmlAccessorType(XmlAccessType.FIELD)
public class KeyConfiguration extends ConfigItem {
@ValidPath(checkExists = true, message = "Password file does not exist")
@XmlElement(type = String.class)
@XmlJavaTypeAdapter(PathAdapter.class)
private Path passwordFile;
@Size(
max = 0,
message =
"For security reasons, passwords should not be provided directly in the config. Provide them in a separate file with \"passwordFile\" or at the CLI prompt during node startup.")
private List<String> passwords;
@Valid
@NotNull
@Size(min = 1, message = "At least 1 public/private key pair must be provided")
private List<KeyData> keyData;
@XmlElement private List<@Valid DefaultKeyVaultConfig> keyVaultConfigs;
@Valid @XmlElement private AzureKeyVaultConfig azureKeyVaultConfig;
@Valid @XmlElement private HashicorpKeyVaultConfig hashicorpKeyVaultConfig;
public KeyConfiguration(
final Path passwordFile,
final List<String> passwords,
final List<KeyData> keyData,
final AzureKeyVaultConfig azureKeyVaultConfig,
final HashicorpKeyVaultConfig hashicorpKeyVaultConfig) {
this.passwordFile = passwordFile;
this.passwords = <PASSWORD>;
this.keyData = keyData;
this.azureKeyVaultConfig = azureKeyVaultConfig;
this.hashicorpKeyVaultConfig = hashicorpKeyVaultConfig;
if (null != azureKeyVaultConfig) {
addKeyVaultConfig(azureKeyVaultConfig);
}
if (null != hashicorpKeyVaultConfig) {
addKeyVaultConfig(hashicorpKeyVaultConfig);
}
}
public KeyConfiguration() {}
public Path getPasswordFile() {
return this.passwordFile;
}
public List<String> getPasswords() {
return this.passwords;
}
public List<KeyData> getKeyData() {
return this.keyData;
}
public AzureKeyVaultConfig getAzureKeyVaultConfig() {
return this.azureKeyVaultConfig;
}
public HashicorpKeyVaultConfig getHashicorpKeyVaultConfig() {
return hashicorpKeyVaultConfig;
}
public List<KeyVaultConfig> getKeyVaultConfigs() {
if (keyVaultConfigs == null) {
return null;
}
return keyVaultConfigs.stream().map(KeyVaultConfig.class::cast).collect(Collectors.toList());
}
public Optional<DefaultKeyVaultConfig> getKeyVaultConfig(KeyVaultType type) {
if (type == null) {
return Optional.empty();
}
if (KeyVaultType.AZURE.equals(type) && azureKeyVaultConfig != null) {
return Optional.of(KeyVaultConfigConverter.convert(azureKeyVaultConfig));
}
if (KeyVaultType.HASHICORP.equals(type) && hashicorpKeyVaultConfig != null) {
return Optional.of(KeyVaultConfigConverter.convert(hashicorpKeyVaultConfig));
}
if (keyVaultConfigs == null) {
return Optional.empty();
}
return keyVaultConfigs.stream().filter(c -> type.equals(c.getKeyVaultType())).findFirst();
}
public void setPasswordFile(Path passwordFile) {
this.passwordFile = passwordFile;
}
public void setPasswords(List<String> passwords) {
this.passwords = passwords;
}
public void setKeyData(List<KeyData> keyData) {
this.keyData = keyData;
}
public void addKeyVaultConfig(KeyVaultConfig keyVaultConfig) {
if (keyVaultConfigs == null) {
keyVaultConfigs = new ArrayList<>();
}
final DefaultKeyVaultConfig typedKeyVaultConfig;
if (AzureKeyVaultConfig.class.isInstance(keyVaultConfig)) {
typedKeyVaultConfig =
KeyVaultConfigConverter.convert(AzureKeyVaultConfig.class.cast(keyVaultConfig));
} else if (HashicorpKeyVaultConfig.class.isInstance(keyVaultConfig)) {
typedKeyVaultConfig =
KeyVaultConfigConverter.convert(HashicorpKeyVaultConfig.class.cast(keyVaultConfig));
} else {
typedKeyVaultConfig = DefaultKeyVaultConfig.class.cast(keyVaultConfig);
}
keyVaultConfigs.add(typedKeyVaultConfig);
}
}
<|start_filename|>tessera-jaxrs/jaxrs-client/src/main/java/module-info.java<|end_filename|>
module tessera.jaxrs.client {
requires jakarta.ws.rs;
requires tessera.config;
requires tessera.security;
requires tessera.shared;
requires tessera.context;
exports com.quorum.tessera.jaxrs.client;
provides com.quorum.tessera.context.RestClientFactory with
com.quorum.tessera.jaxrs.client.ClientFactory;
uses com.quorum.tessera.ssl.context.SSLContextFactory;
uses com.quorum.tessera.ssl.context.ClientSSLContextFactory;
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/ResidentGroup.java<|end_filename|>
package com.quorum.tessera.config;
import jakarta.validation.constraints.NotNull;
import jakarta.xml.bind.annotation.XmlAccessType;
import jakarta.xml.bind.annotation.XmlAccessorType;
import jakarta.xml.bind.annotation.XmlElement;
import java.util.List;
@XmlAccessorType(XmlAccessType.FIELD)
public class ResidentGroup extends ConfigItem {
@XmlElement @NotNull private String name;
@XmlElement private String description;
@XmlElement private List<String> members;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<String> getMembers() {
return members;
}
public void setMembers(List<String> members) {
this.members = members;
}
}
<|start_filename|>tessera-jaxrs/openapi/generate/src/main/java/module-info.java<|end_filename|>
module tessera.openapi.generate {
requires static tessera.common.jaxrs;
requires static tessera.partyinfo.jaxrs;
requires static tessera.transaction.jaxrs;
requires static tessera.thirdparty.jaxrs;
requires static tessera.openapi.common;
requires static tessera.enclave.api;
requires static tessera.partyinfo;
requires static tessera.transaction;
requires static tessera.shared;
requires static tessera.partyinfo.model;
requires static tessera.encryption.api;
requires static tessera.config;
requires static tessera.recovery;
requires static jakarta.json;
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/main/java/com/quorum/tessera/api/constraint/RequestPrivacyValidator.java<|end_filename|>
package com.quorum.tessera.api.constraint;
import com.quorum.tessera.api.SendRequest;
import com.quorum.tessera.api.SendSignedRequest;
import com.quorum.tessera.enclave.PrivacyMode;
import jakarta.validation.ConstraintValidator;
import jakarta.validation.ConstraintValidatorContext;
import java.util.Objects;
public class RequestPrivacyValidator implements ConstraintValidator<PrivacyValid, Object> {
@Override
public boolean isValid(Object request, ConstraintValidatorContext context) {
PrivacyMode privacyMode;
String execHash;
if (request instanceof SendRequest) {
privacyMode = PrivacyMode.fromFlag(((SendRequest) request).getPrivacyFlag());
execHash = ((SendRequest) request).getExecHash();
} else if (request instanceof SendSignedRequest) {
privacyMode = PrivacyMode.fromFlag(((SendSignedRequest) request).getPrivacyFlag());
execHash = ((SendSignedRequest) request).getExecHash();
} else {
context
.buildConstraintViolationWithTemplate(
"Invalid usage. This validator can only be apply to SendRequest or SendSignedRequest")
.addConstraintViolation();
return false;
}
if (PrivacyMode.PRIVATE_STATE_VALIDATION == privacyMode) {
if (Objects.isNull(execHash) || execHash.length() == 0) {
context.buildConstraintViolationWithTemplate("Exec hash missing").addConstraintViolation();
return false;
}
}
return true;
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/main/java/com/quorum/tessera/p2p/RecoveryResource.java<|end_filename|>
package com.quorum.tessera.p2p;
import static jakarta.ws.rs.core.MediaType.APPLICATION_JSON;
import static jakarta.ws.rs.core.MediaType.APPLICATION_OCTET_STREAM;
import static java.util.Collections.emptyList;
import com.quorum.tessera.data.MessageHash;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.enclave.PrivacyMode;
import com.quorum.tessera.p2p.recovery.PushBatchRequest;
import com.quorum.tessera.recovery.workflow.BatchResendManager;
import com.quorum.tessera.shared.Constants;
import com.quorum.tessera.transaction.TransactionManager;
import io.swagger.v3.oas.annotations.Hidden;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.HeaderParam;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.core.Response;
import java.util.*;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Tag(name = "peer-to-peer")
@Path("/")
public class RecoveryResource {
private static final Logger LOGGER = LoggerFactory.getLogger(RecoveryResource.class);
private final TransactionManager transactionManager;
private final BatchResendManager batchResendManager;
public RecoveryResource(
TransactionManager transactionManager, BatchResendManager batchResendManager) {
this.transactionManager = Objects.requireNonNull(transactionManager);
this.batchResendManager = Objects.requireNonNull(batchResendManager);
}
@Operation(
summary = "/pushBatch",
operationId = "pushPayloadBatch",
description =
"store batch of encoded payloads to the server's database (available only when the server is in recovery mode)")
@ApiResponse(responseCode = "200", description = "batch successfully stored")
@POST
@Path("pushBatch")
@Consumes(APPLICATION_JSON)
public Response pushBatch(@Valid @NotNull final PushBatchRequest pushBatchRequest) {
LOGGER.debug("Received push request");
com.quorum.tessera.recovery.resend.PushBatchRequest request =
com.quorum.tessera.recovery.resend.PushBatchRequest.from(
pushBatchRequest.getEncodedPayloads(), EncodedPayloadCodec.LEGACY);
batchResendManager.storeResendBatch(request);
LOGGER.debug("Push batch processed successfully");
return Response.status(Response.Status.OK).build();
}
// path /push with application/octet-stream is overloaded (RecoveryResource &
// TransactionResource); swagger annotations cannot handle situations like this so hide this
// operation and use TransactionResource::push to document both
@Hidden
@POST
@Path("push")
@Consumes(APPLICATION_OCTET_STREAM)
public Response push(
final byte[] payload, @HeaderParam(Constants.API_VERSION_HEADER) final List<String> headers) {
LOGGER.debug("Received push request during recovery mode");
final Set<String> versions =
Optional.ofNullable(headers).orElse(emptyList()).stream()
.filter(Objects::nonNull)
.flatMap(v -> Arrays.stream(v.split(",")))
.collect(Collectors.toSet());
final EncodedPayloadCodec codec = EncodedPayloadCodec.getPreferredCodec(versions);
final PayloadEncoder payloadEncoder = PayloadEncoder.create(codec);
final EncodedPayload encodedPayload = payloadEncoder.decode(payload);
if (encodedPayload.getPrivacyMode() != PrivacyMode.STANDARD_PRIVATE) {
return Response.status(Response.Status.FORBIDDEN)
.entity("Transactions with enhanced privacy are not accepted during recovery mode")
.build();
}
final MessageHash messageHash = transactionManager.storePayload(encodedPayload);
LOGGER.debug("Push request generated hash {}", messageHash);
return Response.status(Response.Status.CREATED).entity(Objects.toString(messageHash)).build();
}
}
<|start_filename|>migration/multitenancy/src/main/java/com/quorum/tessera/multitenancy/migration/JdbcConfigUtil.java<|end_filename|>
package com.quorum.tessera.multitenancy.migration;
import com.quorum.tessera.config.JdbcConfig;
import jakarta.persistence.EntityManagerFactory;
import jakarta.persistence.Persistence;
import java.util.Map;
public interface JdbcConfigUtil {
static EntityManagerFactory entityManagerFactory(JdbcConfig jdbcConfig) {
return Persistence.createEntityManagerFactory("tessera", toMap(jdbcConfig));
}
static Map toMap(JdbcConfig jdbcConfig) {
return Map.of(
"jakarta.persistence.jdbc.url", jdbcConfig.getUrl(),
"jakarta.persistence.jdbc.user", jdbcConfig.getUsername(),
"jakarta.persistence.jdbc.password", jdbcConfig.getPassword());
}
}
<|start_filename|>tests/acceptance-test/src/test/java/com/quorum/tessera/test/rest/OpenApiIT.java<|end_filename|>
package com.quorum.tessera.test.rest;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.test.Party;
import com.quorum.tessera.test.PartyHelper;
import jakarta.json.Json;
import jakarta.json.JsonObject;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.net.URI;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
public class OpenApiIT {
private static final Logger LOGGER = LoggerFactory.getLogger(OpenApiIT.class);
private Client client;
private Party node;
@Rule public TestName testName = new TestName();
@Before
public void setUp() {
PartyHelper partyHelper = PartyHelper.create();
node = partyHelper.getParties().findFirst().get();
client = node.getRestClient();
LOGGER.debug("Begin test: {}", testName.getMethodName());
}
@After
public void after() {
LOGGER.debug("After test: {}", testName.getMethodName());
client.close();
}
@Test
public void openapiJson() throws IOException {
final List<URI> allUris = List.of(node.getQ2TUri(), node.getP2PUri());
for (URI u : allUris) {
LOGGER.debug("json: {}/api", u);
final Response resp =
client.target(u).path("/api").request(MediaType.APPLICATION_JSON_TYPE).get();
final String body = resp.readEntity(String.class);
LOGGER.debug("openapi document {}", body);
assertThat(resp.getStatus()).isEqualTo(200);
assertThat(resp.getMediaType()).isEqualTo(MediaType.APPLICATION_JSON_TYPE);
assertThat(body).isNotEmpty();
try (Reader reader = new StringReader(body)) {
JsonObject result = Json.createReader(reader).readObject();
assertThat(result).isNotEmpty();
}
}
}
@Test
public void openapiYaml() {
final List<URI> allUris = List.of(node.getQ2TUri(), node.getP2PUri());
final MediaType applicationYamlType = new MediaType("application", "yaml");
for (URI u : allUris) {
LOGGER.debug("yaml: {}/api", u);
final Response resp = client.target(u).path("/api").request(applicationYamlType).get();
final String body = resp.readEntity(String.class);
LOGGER.debug("openapi document {}", body);
assertThat(resp.getStatus()).isEqualTo(200);
assertThat(resp.getMediaType()).isEqualTo(applicationYamlType);
assertThat(body).isNotEmpty();
Yaml yaml = new Yaml();
Object result = yaml.load(body);
assertThat(result).isNotNull();
}
}
@Test
public void openapiUnsupportedAccepts() {
final List<URI> allUris = List.of(node.getQ2TUri(), node.getP2PUri());
for (URI u : allUris) {
LOGGER.info("json: {}/api", u);
final Response resp =
client.target(u).path("/api").request(MediaType.APPLICATION_OCTET_STREAM_TYPE).get();
assertThat(resp.getStatus()).isEqualTo(400);
}
}
}
<|start_filename|>tessera-jaxrs/transaction-jaxrs/src/test/java/com/quorum/tessera/q2t/internal/RestPayloadPublisherTest.java<|end_filename|>
package com.quorum.tessera.q2t.internal;
import static org.assertj.core.api.Assertions.*;
import static org.assertj.core.api.Fail.failBecauseExceptionWasNotThrown;
import static org.mockito.Mockito.*;
import com.quorum.tessera.discovery.Discovery;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.enclave.PrivacyMode;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.partyinfo.node.NodeInfo;
import com.quorum.tessera.partyinfo.node.Recipient;
import com.quorum.tessera.transaction.exception.EnhancedPrivacyNotSupportedException;
import com.quorum.tessera.transaction.exception.MandatoryRecipientsNotSupportedException;
import com.quorum.tessera.transaction.publish.NodeOfflineException;
import com.quorum.tessera.transaction.publish.PublishPayloadException;
import com.quorum.tessera.version.EnhancedPrivacyVersion;
import jakarta.ws.rs.ProcessingException;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.client.Invocation;
import jakarta.ws.rs.client.WebTarget;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.assertj.core.api.Assertions;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.MockedStatic;
public class RestPayloadPublisherTest {
private Client client;
private PayloadEncoder payloadEncoder;
private final MockedStatic<PayloadEncoder> payloadEncoderFactoryFunction =
mockStatic(PayloadEncoder.class);
private Discovery discovery;
private RestPayloadPublisher payloadPublisher;
@Before
public void beforeTest() {
client = mock(Client.class);
payloadEncoder = mock(PayloadEncoder.class);
discovery = mock(Discovery.class);
payloadPublisher = new RestPayloadPublisher(client, discovery);
payloadEncoderFactoryFunction
.when(() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)))
.thenReturn(payloadEncoder);
}
@After
public void afterTest() {
try {
verifyNoMoreInteractions(client, payloadEncoder, discovery);
payloadEncoderFactoryFunction.verifyNoMoreInteractions();
} finally {
payloadEncoderFactoryFunction.close();
}
}
@Test
public void publish() {
final String targetUrl = "nodeUrl";
final EncodedPayload encodedPayload = mock(EncodedPayload.class);
final PublicKey publicKey = mock(PublicKey.class);
for (Response.Status expectedResponseStatus : Response.Status.values()) {
for (PrivacyMode privacyMode : PrivacyMode.values()) {
when(encodedPayload.getPrivacyMode()).thenReturn(privacyMode);
final NodeInfo nodeInfo = mock(NodeInfo.class);
when(nodeInfo.supportedApiVersions())
.thenReturn(Set.of(EnhancedPrivacyVersion.API_VERSION_2, "2.1", "3.0", "4.0"));
when(nodeInfo.getUrl()).thenReturn(targetUrl);
when(discovery.getRemoteNodeInfo(publicKey)).thenReturn(nodeInfo);
final byte[] payloadData = "Payload".getBytes();
when(payloadEncoder.encode(encodedPayload)).thenReturn(payloadData);
WebTarget webTarget = mock(WebTarget.class);
when(client.target(targetUrl)).thenReturn(webTarget);
when(webTarget.path("/push")).thenReturn(webTarget);
Invocation.Builder invocationBuilder = mock(Invocation.Builder.class);
Response response = Response.status(expectedResponseStatus).build();
when(invocationBuilder.post(
Entity.entity(payloadData, MediaType.APPLICATION_OCTET_STREAM_TYPE)))
.thenReturn(response);
when(webTarget.request()).thenReturn(invocationBuilder);
if (expectedResponseStatus == Response.Status.OK
|| expectedResponseStatus == Response.Status.CREATED) {
payloadPublisher.publishPayload(encodedPayload, publicKey);
} else {
PublishPayloadException publishPayloadException =
Assertions.catchThrowableOfType(
() -> payloadPublisher.publishPayload(encodedPayload, publicKey),
PublishPayloadException.class);
assertThat(publishPayloadException)
.hasMessage(String.format("Unable to push payload to recipient url %s", targetUrl));
}
}
}
int iterations = Response.Status.values().length * PrivacyMode.values().length;
verify(client, times(iterations)).target(targetUrl);
verify(discovery, times(iterations)).getRemoteNodeInfo(publicKey);
verify(payloadEncoder, times(iterations)).encode(encodedPayload);
payloadEncoderFactoryFunction.verify(
times(iterations), () -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
}
@Test
public void publishEnhancedTransactionsToNodesThatDoNotSupport() {
Map<PrivacyMode, Set<String>> privacyModeAndVersions = new HashMap<>();
privacyModeAndVersions.put(PrivacyMode.PARTY_PROTECTION, Set.of("v1"));
privacyModeAndVersions.put(PrivacyMode.PRIVATE_STATE_VALIDATION, Set.of("v1"));
for (Map.Entry<PrivacyMode, Set<String>> pair : privacyModeAndVersions.entrySet()) {
String targetUrl = "http://someplace.com";
EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getPrivacyMode()).thenReturn(pair.getKey());
byte[] payloadData = "Some Data".getBytes();
when(payloadEncoder.encode(encodedPayload)).thenReturn(payloadData);
PublicKey recipientKey = mock(PublicKey.class);
NodeInfo nodeInfo = mock(NodeInfo.class);
when(nodeInfo.supportedApiVersions()).thenReturn(pair.getValue());
Recipient recipient = mock(Recipient.class);
when(recipient.getKey()).thenReturn(recipientKey);
when(recipient.getUrl()).thenReturn(targetUrl);
when(nodeInfo.getRecipients()).thenReturn(Set.of(recipient));
when(discovery.getRemoteNodeInfo(recipientKey)).thenReturn(nodeInfo);
EnhancedPrivacyNotSupportedException exception =
catchThrowableOfType(
() -> payloadPublisher.publishPayload(encodedPayload, recipientKey),
EnhancedPrivacyNotSupportedException.class);
assertThat(exception)
.hasMessageContaining("Transactions with enhanced privacy is not currently supported");
verify(discovery).getRemoteNodeInfo(eq(recipientKey));
}
payloadEncoderFactoryFunction.verify(
times(2), () -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
}
@Test
public void handleConnectionError() {
final String targetUri = "http://jimmywhite.com";
final PublicKey recipientKey = mock(PublicKey.class);
Recipient recipient = mock(Recipient.class);
when(recipient.getKey()).thenReturn(recipientKey);
when(recipient.getUrl()).thenReturn(targetUri);
NodeInfo nodeInfo = mock(NodeInfo.class);
when(nodeInfo.getRecipients()).thenReturn(Set.of(recipient));
when(nodeInfo.getUrl()).thenReturn(targetUri);
when(discovery.getRemoteNodeInfo(recipientKey)).thenReturn(nodeInfo);
Client client = mock(Client.class);
when(client.target(targetUri)).thenThrow(ProcessingException.class);
final EncodedPayload payload = mock(EncodedPayload.class);
when(payload.getPrivacyMode()).thenReturn(PrivacyMode.STANDARD_PRIVATE);
when(payloadEncoder.encode(payload)).thenReturn("SomeData".getBytes());
RestPayloadPublisher restPayloadPublisher = new RestPayloadPublisher(client, discovery);
try {
restPayloadPublisher.publishPayload(payload, recipientKey);
failBecauseExceptionWasNotThrown(NodeOfflineException.class);
} catch (NodeOfflineException ex) {
assertThat(ex).hasMessageContaining(targetUri);
verify(client).target(targetUri);
verify(discovery).getRemoteNodeInfo(eq(recipientKey));
verify(payloadEncoder).encode(payload);
verify(discovery).getRemoteNodeInfo(eq(recipientKey));
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
}
}
@Test
public void publishMandatoryRecipientsToNodesThatDoNotSupport() {
String targetUrl = "http://someplace.com";
EncodedPayload encodedPayload = mock(EncodedPayload.class);
when(encodedPayload.getPrivacyMode()).thenReturn(PrivacyMode.MANDATORY_RECIPIENTS);
byte[] payloadData = "Some Data".getBytes();
when(payloadEncoder.encode(encodedPayload)).thenReturn(payloadData);
PublicKey recipientKey = mock(PublicKey.class);
NodeInfo nodeInfo = mock(NodeInfo.class);
when(nodeInfo.supportedApiVersions()).thenReturn(Set.of("v2", "2.1", "3.0"));
Recipient recipient = mock(Recipient.class);
when(recipient.getKey()).thenReturn(recipientKey);
when(recipient.getUrl()).thenReturn(targetUrl);
when(nodeInfo.getRecipients()).thenReturn(Set.of(recipient));
when(discovery.getRemoteNodeInfo(recipientKey)).thenReturn(nodeInfo);
assertThatExceptionOfType(MandatoryRecipientsNotSupportedException.class)
.isThrownBy(() -> payloadPublisher.publishPayload(encodedPayload, recipientKey))
.withMessageContaining(
"Transactions with mandatory recipients are not currently supported on recipient");
verify(discovery).getRemoteNodeInfo(eq(recipientKey));
payloadEncoderFactoryFunction.verify(
() -> PayloadEncoder.create(any(EncodedPayloadCodec.class)));
}
}
<|start_filename|>tessera-recover/src/main/java/com/quorum/tessera/recovery/internal/RecoveryProvider.java<|end_filename|>
package com.quorum.tessera.recovery.internal;
import com.quorum.tessera.data.staging.StagingEntityDAO;
import com.quorum.tessera.discovery.Discovery;
import com.quorum.tessera.recovery.Recovery;
import com.quorum.tessera.recovery.resend.BatchTransactionRequester;
import com.quorum.tessera.transaction.TransactionManager;
public class RecoveryProvider {
public static Recovery provider() {
StagingEntityDAO stagingEntityDAO = StagingEntityDAO.create();
Discovery discovery = Discovery.create();
BatchTransactionRequester batchTransactionRequester = BatchTransactionRequester.create();
TransactionManager transactionManager = TransactionManager.create();
return new RecoveryImpl(
stagingEntityDAO, discovery, batchTransactionRequester, transactionManager);
}
}
<|start_filename|>tessera-data/src/main/java/com/quorum/tessera/data/staging/StagingTransaction.java<|end_filename|>
package com.quorum.tessera.data.staging;
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PrivacyMode;
import jakarta.persistence.*;
import java.io.Serializable;
import java.util.*;
/** The JPA entity that contains the staging transaction information. */
@EntityListeners(StagingTransactionListener.class)
@Entity
@Table(
name = "ST_TRANSACTION",
indexes = {
@Index(name = "ST_TRANSACTION_VALSTG", columnList = "VALIDATION_STAGE"),
@Index(name = "ST_TRANSACTION_HASH", columnList = "HASH")
})
@NamedQueries({
@NamedQuery(
name = "StagingTransaction.stagingQuery",
query =
"select st FROM StagingTransaction st where st.validationStage is null and not exists "
+ " (select act from StagingAffectedTransaction act where act.sourceTransaction.hash = st.hash and "
+ " (select coalesce(sum(CASE WHEN ast.validationStage is Null THEN 1 else 0 END), 1) from StagingTransaction ast where ast.hash = act.hash) > 0"
+ " )"),
@NamedQuery(
name = "StagingTransaction.countAll",
query = "select count(st) from StagingTransaction st"),
@NamedQuery(
name = "StagingTransaction.countStaged",
query = "select count(st) from StagingTransaction st where st.validationStage is not null"),
@NamedQuery(
name = "StagingTransaction.findAllOrderByStage",
query =
"select st from StagingTransaction st order by coalesce(st.validationStage, select max(st.validationStage)+1 from StagingTransaction st), st.hash")
})
public class StagingTransaction implements Serializable {
@Id
@GeneratedValue(generator = "ATOMIC_LONG", strategy = GenerationType.AUTO)
@Column(name = "ID")
private Long id;
@Basic
@Column(name = "HASH", nullable = false, updatable = false)
private String hash;
@Column(name = "PRIVACY_MODE", updatable = false)
@Enumerated(EnumType.ORDINAL)
private PrivacyMode privacyMode = PrivacyMode.STANDARD_PRIVATE;
@Column(name = "VALIDATION_STAGE")
@Basic
private Long validationStage;
@Column(name = "TIMESTAMP", updatable = false)
private long timestamp;
@OneToMany(
fetch = FetchType.LAZY,
cascade = {CascadeType.ALL},
mappedBy = "sourceTransaction",
orphanRemoval = true)
private Set<StagingAffectedTransaction> affectedContractTransactions = new HashSet<>();
@Lob
@Column(name = "PAYLOAD")
private byte[] payload;
@Enumerated(value = EnumType.STRING)
@Column(name = "PAYLOAD_CODEC", nullable = false)
private EncodedPayloadCodec encodedPayloadCodec;
@Transient private EncodedPayload encodedPayload;
public StagingTransaction() {}
@PrePersist
public void onPersist() {
this.timestamp = System.currentTimeMillis();
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getHash() {
return this.hash;
}
public void setHash(final String hash) {
this.hash = hash;
}
public long getTimestamp() {
return this.timestamp;
}
public Set<StagingAffectedTransaction> getAffectedContractTransactions() {
return affectedContractTransactions;
}
public void setAffectedContractTransactions(
Set<StagingAffectedTransaction> affectedContractTransactions) {
this.affectedContractTransactions = affectedContractTransactions;
}
public Long getValidationStage() {
return validationStage;
}
public void setValidationStage(Long validationStage) {
this.validationStage = validationStage;
}
public PrivacyMode getPrivacyMode() {
return privacyMode;
}
public void setPrivacyMode(PrivacyMode privacyMode) {
this.privacyMode = privacyMode;
}
public byte[] getPayload() {
return payload;
}
public void setPayload(byte[] payload) {
this.payload = payload;
}
public EncodedPayloadCodec getEncodedPayloadCodec() {
return encodedPayloadCodec;
}
public void setEncodedPayloadCodec(EncodedPayloadCodec encodedPayloadCodec) {
this.encodedPayloadCodec = encodedPayloadCodec;
}
public EncodedPayload getEncodedPayload() {
return encodedPayload;
}
public void setEncodedPayload(EncodedPayload encodedPayload) {
this.encodedPayload = encodedPayload;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (id == null) return false;
StagingTransaction that = (StagingTransaction) o;
return Objects.equals(id, that.id);
}
@Override
public int hashCode() {
return Objects.hash(id);
}
}
<|start_filename|>tessera-jaxrs/thirdparty-jaxrs/src/main/java/com/quorum/tessera/thirdparty/KeyResource.java<|end_filename|>
package com.quorum.tessera.thirdparty;
import com.quorum.tessera.context.RuntimeContext;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.thirdparty.model.GetPublicKeysResponse;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.json.Json;
import jakarta.json.JsonArrayBuilder;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.util.Set;
@Tag(name = "third-party")
@Path("/keys")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public class KeyResource {
@GET
@Operation(summary = "/keys", description = "get all public keys managed by the server's enclave")
@ApiResponse(
responseCode = "200",
description = "server's public keys",
content = @Content(schema = @Schema(implementation = GetPublicKeysResponse.class)))
public Response getPublicKeys() {
RuntimeContext runtimeContext = RuntimeContext.getInstance();
Set<PublicKey> publicKeys = runtimeContext.getPublicKeys();
final JsonArrayBuilder keyBuilder = Json.createArrayBuilder();
publicKeys.stream()
.map(key -> Json.createObjectBuilder().add("key", key.encodeToBase64()).build())
.forEach(keyBuilder::add);
final String output =
Json.createObjectBuilder().add("keys", keyBuilder.build()).build().toString();
return Response.status(Response.Status.OK).entity(output).build();
}
}
<|start_filename|>config/src/test/java/com/quorum/tessera/config/constraints/KeyVaultConfigValidatorTest.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.quorum.tessera.config.DefaultKeyVaultConfig;
import com.quorum.tessera.config.KeyVaultType;
import jakarta.validation.ConstraintValidatorContext;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.UUID;
import org.junit.Before;
import org.junit.Test;
public class KeyVaultConfigValidatorTest {
private KeyVaultConfigValidator keyVaultConfigValidator;
private ConstraintValidatorContext context;
@Before
public void setUp() {
context = mock(ConstraintValidatorContext.class);
ConstraintValidatorContext.ConstraintViolationBuilder builder =
mock(ConstraintValidatorContext.ConstraintViolationBuilder.class);
when(context.buildConstraintViolationWithTemplate(any(String.class))).thenReturn(builder);
keyVaultConfigValidator = new KeyVaultConfigValidator();
ValidKeyVaultConfig validKeyVaultConfig = mock(ValidKeyVaultConfig.class);
keyVaultConfigValidator.initialize(validKeyVaultConfig);
}
@Test
public void nullKeyConfigurationIsAllowedAndWillBePickedUpByNotNullAnnotation() {
assertThat(keyVaultConfigValidator.isValid(null, context)).isTrue();
}
@Test
public void nullKeyVaultTypeIsAllowedAndWillBePickedUpByNotNullAnnotation() {
DefaultKeyVaultConfig config = new DefaultKeyVaultConfig();
assertThat(keyVaultConfigValidator.isValid(config, context)).isTrue();
}
@Test
public void validAzureConfig() {
DefaultKeyVaultConfig config = new DefaultKeyVaultConfig();
config.setKeyVaultType(KeyVaultType.AZURE);
config.setProperty("url", "someurl");
assertThat(keyVaultConfigValidator.isValid(config, context)).isTrue();
}
@Test
public void invalidAzureConfig() {
DefaultKeyVaultConfig config = new DefaultKeyVaultConfig();
config.setKeyVaultType(KeyVaultType.AZURE);
assertThat(keyVaultConfigValidator.isValid(config, context)).isFalse();
}
@Test
public void validHashicorpConfig() throws Exception {
Path somePath = Files.createTempFile(UUID.randomUUID().toString(), ".txt");
somePath.toFile().deleteOnExit();
DefaultKeyVaultConfig config = new DefaultKeyVaultConfig();
config.setKeyVaultType(KeyVaultType.HASHICORP);
config.setProperty("url", "someurl");
config.setProperty("tlsKeyStorePath", somePath.toString());
config.setProperty("tlsTrustStorePath", somePath.toString());
assertThat(keyVaultConfigValidator.isValid(config, context)).isTrue();
}
@Test
public void invalidHashicorpConfig() {
Path somePath = mock(Path.class);
DefaultKeyVaultConfig config = new DefaultKeyVaultConfig();
config.setKeyVaultType(KeyVaultType.HASHICORP);
config.setProperty("tlsKeyStorePath", somePath.toString());
config.setProperty("tlsTrustStorePath", somePath.toString());
assertThat(keyVaultConfigValidator.isValid(config, context)).isFalse();
}
@Test
public void validAWSConfig() {
DefaultKeyVaultConfig config = new DefaultKeyVaultConfig();
config.setKeyVaultType(KeyVaultType.AWS);
config.setProperty("endpoint", "http://someurl");
assertThat(keyVaultConfigValidator.isValid(config, context)).isTrue();
}
@Test
public void validAWSConfigNoEndpoint() {
DefaultKeyVaultConfig config = new DefaultKeyVaultConfig();
config.setKeyVaultType(KeyVaultType.AWS);
assertThat(keyVaultConfigValidator.isValid(config, context)).isTrue();
}
@Test
public void invalidAWSConfig() {
DefaultKeyVaultConfig config = new DefaultKeyVaultConfig();
config.setKeyVaultType(KeyVaultType.AWS);
config.setProperty("endpoint", "noscheme");
assertThat(keyVaultConfigValidator.isValid(config, context)).isFalse();
}
}
<|start_filename|>server/jersey-server/src/main/java/com/quorum/tessera/server/monitoring/InfluxDbClient.java<|end_filename|>
package com.quorum.tessera.server.monitoring;
import com.quorum.tessera.config.AppType;
import com.quorum.tessera.config.InfluxConfig;
import com.quorum.tessera.ssl.context.ClientSSLContextFactory;
import com.quorum.tessera.ssl.context.SSLContextFactory;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.ClientBuilder;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.client.WebTarget;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import java.lang.management.ManagementFactory;
import java.net.URI;
import java.util.List;
import javax.management.MBeanServer;
import javax.net.ssl.SSLContext;
public class InfluxDbClient {
private final URI tesseraAppUri;
private final InfluxConfig influxConfig;
private final AppType appType;
private final MBeanServer mbs;
public InfluxDbClient(URI tesseraAppUri, InfluxConfig influxConfig, AppType appType) {
this.tesseraAppUri = tesseraAppUri;
this.influxConfig = influxConfig;
this.appType = appType;
this.mbs = ManagementFactory.getPlatformMBeanServer();
}
public Response postMetrics() {
MetricsEnquirer metricsEnquirer = new MetricsEnquirer(mbs);
List<MBeanMetric> metrics = metricsEnquirer.getMBeanMetrics(appType);
InfluxDbProtocolFormatter formatter = new InfluxDbProtocolFormatter();
String formattedMetrics = formatter.format(metrics, tesseraAppUri, appType);
ClientBuilder clientBuilder = ClientBuilder.newBuilder();
if (influxConfig.isSsl()) {
final SSLContextFactory sslContextFactory = ClientSSLContextFactory.create();
final SSLContext sslContext =
sslContextFactory.from(
influxConfig.getServerUri().toString(), influxConfig.getSslConfig());
clientBuilder.sslContext(sslContext);
}
Client client = clientBuilder.build();
WebTarget influxTarget =
client
.target(influxConfig.getServerUri())
.path("write")
.queryParam("db", influxConfig.getDbName());
return influxTarget
.request(MediaType.TEXT_PLAIN)
.accept(MediaType.TEXT_PLAIN)
.post(Entity.text(formattedMetrics));
}
}
<|start_filename|>tests/acceptance-test/src/main/java/module-info.java<|end_filename|>
module tessera.acceptance.tests {
requires org.slf4j;
requires java.sql;
requires tessera.encryption.jnacl;
requires tessera.security;
requires tessera.config;
requires tessera.encryption.api;
requires jakarta.ws.rs;
requires tessera.partyinfo.jaxrs;
requires tessera.jaxrs.client;
requires tessera.enclave.api;
requires tessera.common.jaxrs;
requires tessera.partyinfo.model;
requires tessera.application;
requires tessera.shared;
requires tessera.data;
requires jdk.httpserver;
requires java.net.http;
requires jakarta.json;
}
<|start_filename|>shared/src/test/java/com/quorum/tessera/version/ApiVersionTest.java<|end_filename|>
package com.quorum.tessera.version;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Test;
public class ApiVersionTest {
@Test
public void create() {
assertThat(ApiVersion.versions())
.containsExactlyInAnyOrder("v1", "v2", "2.1", "3.0", "4.0", "5.0");
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/constraints/KeyConfigurationValidator.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import com.quorum.tessera.config.KeyConfiguration;
import jakarta.validation.ConstraintValidator;
import jakarta.validation.ConstraintValidatorContext;
public class KeyConfigurationValidator
implements ConstraintValidator<ValidKeyConfiguration, KeyConfiguration> {
@Override
public boolean isValid(KeyConfiguration keyConfiguration, ConstraintValidatorContext cvc) {
return keyConfiguration == null
|| !(keyConfiguration.getPasswordFile() != null && keyConfiguration.getPasswords() != null);
}
}
<|start_filename|>tessera-context/src/main/java/com/quorum/tessera/context/RestClientFactory.java<|end_filename|>
package com.quorum.tessera.context;
import com.quorum.tessera.config.ServerConfig;
import jakarta.ws.rs.client.Client;
import java.util.ServiceLoader;
public interface RestClientFactory {
Client buildFrom(ServerConfig serverContext);
static RestClientFactory create() {
return ServiceLoader.load(RestClientFactory.class).findFirst().get();
}
}
<|start_filename|>tessera-data/src/main/java/com/quorum/tessera/data/PrivacyGroupEntity.java<|end_filename|>
package com.quorum.tessera.data;
import jakarta.persistence.*;
import java.io.Serializable;
import java.util.Arrays;
/** The JPA entity that contains the privacy group information */
@NamedQueries({
@NamedQuery(
name = "PrivacyGroup.FindById",
query = "SELECT pg FROM PrivacyGroupEntity pg WHERE pg.id = :id"),
@NamedQuery(
name = "PrivacyGroup.FindByLookupId",
query = "select pg from PrivacyGroupEntity pg WHERE pg.lookupId = :lookupId"),
@NamedQuery(name = "PrivacyGroup.FindAll", query = "select pg from PrivacyGroupEntity pg")
})
@Entity
@Table(
name = "PRIVACY_GROUP",
indexes = {@Index(name = "PRIVACY_GROUP_LOOKUPID", columnList = "LOOKUP_ID")})
public class PrivacyGroupEntity implements Serializable {
@Id
@Column(name = "ID")
private byte[] id;
@Lob
@Column(name = "LOOKUP_ID")
private byte[] lookupId;
@Lob
@Column(name = "DATA", nullable = false)
private byte[] data;
@Column(name = "TIMESTAMP", updatable = false)
private long timestamp;
public PrivacyGroupEntity(final byte[] id, final byte[] lookupId, final byte[] data) {
this.id = id;
this.lookupId = lookupId;
this.data = data;
}
public PrivacyGroupEntity() {}
@PrePersist
public void onPersist() {
this.timestamp = System.currentTimeMillis();
}
public byte[] getId() {
return id;
}
public void setId(byte[] id) {
this.id = id;
}
public byte[] getLookupId() {
return lookupId;
}
public void setLookupId(byte[] lookupId) {
this.lookupId = lookupId;
}
public byte[] getData() {
return data;
}
public void setData(byte[] data) {
this.data = data;
}
public long getTimestamp() {
return timestamp;
}
@Override
public boolean equals(Object obj) {
return (obj instanceof PrivacyGroupEntity) && Arrays.equals(id, ((PrivacyGroupEntity) obj).id);
}
@Override
public int hashCode() {
return Arrays.hashCode(id);
}
}
<|start_filename|>migration/multitenancy/src/main/java/module-info.java<|end_filename|>
module tessera.migration.multitenancy {
requires tessera.cli.api;
requires tessera.data;
requires tessera.config;
requires tessera.encryption.api;
requires info.picocli;
requires tessera.enclave.api;
requires java.sql;
requires jakarta.persistence;
opens com.quorum.tessera.multitenancy.migration to
info.picocli;
exports com.quorum.tessera.multitenancy.migration to
info.picocli;
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/constraints/HasKeysOrRemoteEnclaveValidator.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import com.quorum.tessera.config.AppType;
import com.quorum.tessera.config.Config;
import jakarta.validation.ConstraintValidator;
import jakarta.validation.ConstraintValidatorContext;
import java.util.Objects;
import java.util.Optional;
public class HasKeysOrRemoteEnclaveValidator
implements ConstraintValidator<HasKeysOrRemoteEnclave, Config> {
@Override
public boolean isValid(Config config, ConstraintValidatorContext constraintValidatorContext) {
return Optional.ofNullable(config.getKeys())
.map(Objects::nonNull)
.orElse(config.getServerConfigs().stream().anyMatch(s -> s.getApp() == AppType.ENCLAVE));
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/main/java/com/quorum/tessera/api/exception/NotFoundExceptionMapper.java<|end_filename|>
package com.quorum.tessera.api.exception;
import jakarta.ws.rs.NotFoundException;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.Response.Status;
import jakarta.ws.rs.ext.ExceptionMapper;
import jakarta.ws.rs.ext.Provider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Provider
public class NotFoundExceptionMapper implements ExceptionMapper<NotFoundException> {
private static final Logger LOGGER = LoggerFactory.getLogger(NotFoundExceptionMapper.class);
@Override
public Response toResponse(final NotFoundException ex) {
LOGGER.warn("Entity not found: {}", ex.getMessage());
LOGGER.debug(null, ex);
return Response.status(Status.NOT_FOUND)
.entity(ex.getMessage())
.type(MediaType.TEXT_PLAIN)
.build();
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/test/java/com/quorum/tessera/api/exception/MandatoryRecipientsNotSupportedExceptionMapperTest.java<|end_filename|>
package com.quorum.tessera.api.exception;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.transaction.exception.MandatoryRecipientsNotSupportedException;
import jakarta.ws.rs.core.Response;
import org.junit.Test;
public class MandatoryRecipientsNotSupportedExceptionMapperTest {
private MandatoryRecipientsNotSupportedExceptionMapper instance =
new MandatoryRecipientsNotSupportedExceptionMapper();
@Test
public void toResponse() {
final MandatoryRecipientsNotSupportedException ex =
new MandatoryRecipientsNotSupportedException("OUCH");
final Response result = instance.toResponse(ex);
assertThat(result).isNotNull();
final String message = (String) result.getEntity();
assertThat(message).isEqualTo("OUCH");
assertThat(result.getStatus()).isEqualTo(403);
}
}
<|start_filename|>tessera-jaxrs/transaction-jaxrs/src/main/java/com/quorum/tessera/q2t/internal/BatchPayloadPublisherProvider.java<|end_filename|>
package com.quorum.tessera.q2t.internal;
import com.quorum.tessera.threading.CancellableCountDownLatchFactory;
import com.quorum.tessera.threading.ExecutorFactory;
import com.quorum.tessera.transaction.publish.BatchPayloadPublisher;
import com.quorum.tessera.transaction.publish.PayloadPublisher;
public class BatchPayloadPublisherProvider {
public static BatchPayloadPublisher provider() {
ExecutorFactory executorFactory = new ExecutorFactory();
CancellableCountDownLatchFactory countDownLatchFactory = new CancellableCountDownLatchFactory();
PayloadPublisher payloadPublisher = PayloadPublisher.create();
return new AsyncBatchPayloadPublisher(executorFactory, countDownLatchFactory, payloadPublisher);
}
}
<|start_filename|>tessera-recover/src/main/java/com/quorum/tessera/recovery/resend/PushBatchRequest.java<|end_filename|>
package com.quorum.tessera.recovery.resend;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import java.util.List;
public interface PushBatchRequest {
List<byte[]> getEncodedPayloads();
EncodedPayloadCodec getEncodedPayloadCodec();
static PushBatchRequest from(
List<byte[]> encodedPayloads, EncodedPayloadCodec encodedPayloadCodec) {
return new PushBatchRequest() {
@Override
public List<byte[]> getEncodedPayloads() {
return List.copyOf(encodedPayloads);
}
@Override
public EncodedPayloadCodec getEncodedPayloadCodec() {
return encodedPayloadCodec;
}
};
}
}
<|start_filename|>config/src/test/java/com/quorum/tessera/config/adapters/MapAdapterTest.java<|end_filename|>
package com.quorum.tessera.config.adapters;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.config.ConfigProperties;
import jakarta.xml.bind.JAXBElement;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.xml.namespace.QName;
import org.junit.Before;
import org.junit.Test;
public class MapAdapterTest {
private MapAdapter adapter;
@Before
public void onSetup() {
adapter = new MapAdapter();
}
@Test
public void marshalEmpty() throws Exception {
Map<String, String> map = new HashMap<>();
ConfigProperties outcome = adapter.marshal(map);
assertThat(outcome.getProperties()).isEmpty();
}
@Test
public void marshalNull() throws Exception {
assertThat(adapter.marshal(null)).isNull();
}
@Test
public void marshal() throws Exception {
Map<String, String> map = new LinkedHashMap<>();
map.put("message", "I love sparrows!!");
map.put("greeting", "Hellow");
ConfigProperties outcome = adapter.marshal(map);
assertThat(outcome.getProperties()).hasSize(2);
List<String> names =
outcome.getProperties().stream()
.map(JAXBElement::getName)
.map(QName::getLocalPart)
.collect(Collectors.toList());
assertThat(names).containsExactly("message", "greeting");
List<String> values =
outcome.getProperties().stream().map(JAXBElement::getValue).collect(Collectors.toList());
assertThat(values).containsExactly("I love sparrows!!", "Hellow");
}
@Test
public void unmarshal() throws Exception {
ConfigProperties properties = new ConfigProperties();
JAXBElement<String> someElement =
new JAXBElement<>(QName.valueOf("message"), String.class, "I love sparrows!!");
JAXBElement<String> someOtherElement =
new JAXBElement<>(QName.valueOf("greeting"), String.class, "Hellow");
properties.setProperties(Arrays.asList(someElement, someOtherElement));
Map<String, String> result = adapter.unmarshal(properties);
Map<String, String> map = new LinkedHashMap<>();
map.put("message", "I love sparrows!!");
map.put("greeting", "Hellow");
assertThat(result).containsAllEntriesOf(map);
}
@Test
public void unmarshalNull() throws Exception {
assertThat(adapter.unmarshal(null)).isNull();
}
@Test
public void unmarshalEmpty() throws Exception {
assertThat(adapter.unmarshal(new ConfigProperties())).isEmpty();
}
}
<|start_filename|>enclave/enclave-api/src/main/java/com/quorum/tessera/enclave/EncodedPayloadCodec.java<|end_filename|>
package com.quorum.tessera.enclave;
import com.quorum.tessera.version.BaseVersion;
import com.quorum.tessera.version.CBORSupportVersion;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Stream;
public enum EncodedPayloadCodec {
CBOR(CBORSupportVersion.API_VERSION_5),
LEGACY(BaseVersion.API_VERSION_1);
String minimumSupportedVersion;
EncodedPayloadCodec(String minimumSupportedVersion) {
this.minimumSupportedVersion = minimumSupportedVersion;
}
public String getMinimumSupportedVersion() {
return minimumSupportedVersion;
}
public static EncodedPayloadCodec current() {
return CBOR;
}
public static EncodedPayloadCodec getPreferredCodec(Set<String> versions) {
return Stream.of(EncodedPayloadCodec.values())
.sorted(
(c1, c2) -> {
Function<EncodedPayloadCodec, Double> parseValue =
c -> Double.parseDouble(c.getMinimumSupportedVersion().replaceAll("[^\\d.]", ""));
return Double.compare(parseValue.apply(c2), parseValue.apply(c1));
})
.filter(codec -> versions.contains(codec.getMinimumSupportedVersion()))
.findFirst()
.orElse(LEGACY);
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/ArgonOptions.java<|end_filename|>
package com.quorum.tessera.config;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import jakarta.xml.bind.annotation.XmlAccessType;
import jakarta.xml.bind.annotation.XmlAccessorType;
import jakarta.xml.bind.annotation.XmlAttribute;
import java.util.Optional;
@XmlAccessorType(XmlAccessType.FIELD)
public class ArgonOptions extends ConfigItem {
@Pattern(regexp = "^(id|i|d)$")
@XmlAttribute(name = "variant")
private String algorithm;
@NotNull @XmlAttribute private Integer iterations;
@NotNull @XmlAttribute private Integer memory;
@NotNull @XmlAttribute private Integer parallelism;
public ArgonOptions(String algorithm, Integer iterations, Integer memory, Integer parallelism) {
this.algorithm = Optional.ofNullable(algorithm).orElse("id");
this.iterations = iterations;
this.memory = memory;
this.parallelism = parallelism;
}
public ArgonOptions() {}
public String getAlgorithm() {
return algorithm;
}
public Integer getIterations() {
return iterations;
}
public Integer getMemory() {
return memory;
}
public Integer getParallelism() {
return parallelism;
}
public void setAlgorithm(String algorithm) {
this.algorithm = algorithm;
}
public void setIterations(Integer iterations) {
this.iterations = iterations;
}
public void setMemory(Integer memory) {
this.memory = memory;
}
public void setParallelism(Integer parallelism) {
this.parallelism = parallelism;
}
}
<|start_filename|>server/jersey-server/src/test/java/com/quorum/tessera/server/jersey/JerseyServerIT.java<|end_filename|>
package com.quorum.tessera.server.jersey;
import static org.assertj.core.api.Assertions.assertThat;
import com.quorum.tessera.config.CommunicationType;
import com.quorum.tessera.config.CrossDomainConfig;
import com.quorum.tessera.config.ServerConfig;
import com.quorum.tessera.config.util.JaxbUtil;
import jakarta.ws.rs.client.ClientBuilder;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.MultivaluedHashMap;
import jakarta.ws.rs.core.MultivaluedMap;
import jakarta.ws.rs.core.Response;
import java.net.URI;
import java.util.Arrays;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class JerseyServerIT {
private URI serverUri = URI.create("http://localhost:8080");
private JerseyServer server;
@Before
public void onSetUp() throws Exception {
System.setProperty("sun.net.http.allowRestrictedHeaders", "true");
ServerConfig serverConfig = new ServerConfig();
serverConfig.setCommunicationType(CommunicationType.REST);
serverConfig.setServerAddress("http://localhost:8080");
CrossDomainConfig crossDomainConfig = new CrossDomainConfig();
crossDomainConfig.setAllowedOrigins(Arrays.asList("*.acme.com", "*.other.com"));
serverConfig.setCrossDomainConfig(crossDomainConfig);
JaxbUtil.marshalWithNoValidation(serverConfig, System.out);
server = new JerseyServer(serverConfig, SampleApplication.class);
server.start();
}
@After
public void onTearDown() throws Exception {
server.stop();
}
@Test
public void ping() {
MultivaluedMap<String, Object> headers = new MultivaluedHashMap<>();
headers.add("Origin", "*.acme.com");
Response result =
ClientBuilder.newClient().target(serverUri).path("ping").request().headers(headers).get();
assertThat(result.getHeaderString("Access-Control-Allow-Origin")).isEqualTo("*.acme.com");
assertThat(result.getHeaderString("Access-Control-Allow-Credentials")).isEqualTo("true");
assertThat(result.getStatus()).isEqualTo(200);
assertThat(result.readEntity(String.class)).isEqualTo("HEllow");
}
@Test
public void create() {
SamplePayload payload = new SamplePayload();
payload.setValue("Hellow");
Response result =
ClientBuilder.newClient()
.target(serverUri)
.path("create")
.request()
.post(Entity.entity(payload, MediaType.APPLICATION_JSON));
assertThat(result.getStatus()).isEqualTo(201);
assertThat(result.getLocation()).isNotNull();
Response result2 =
ClientBuilder.newClient()
.target(result.getLocation())
.request(MediaType.APPLICATION_JSON)
.get();
SamplePayload p = result2.readEntity(SamplePayload.class);
assertThat(p).isNotNull();
assertThat(p.getValue()).isEqualTo("Hellow");
Response result3 =
ClientBuilder.newClient().target(serverUri).path(p.getId()).request().delete();
assertThat(result3.getStatus()).isEqualTo(200);
SamplePayload deleted = result3.readEntity(SamplePayload.class);
assertThat(deleted.getValue()).isEqualTo("Hellow");
}
}
<|start_filename|>tessera-jaxrs/sync-jaxrs/src/test/java/com/quorum/tessera/p2p/recovery/ResendBatchPublisherProviderTest.java<|end_filename|>
package com.quorum.tessera.p2p.recovery;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.mockStatic;
import com.quorum.tessera.enclave.EncodedPayloadCodec;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.recovery.resend.ResendBatchPublisher;
import org.junit.Test;
public class ResendBatchPublisherProviderTest {
@Test
public void provider() {
try (var recoveryClientMockedStatic = mockStatic(RecoveryClient.class);
var payloadEncoderMockedStatic = mockStatic(PayloadEncoder.class)) {
recoveryClientMockedStatic
.when(RecoveryClient::create)
.thenReturn(mock(RecoveryClient.class));
payloadEncoderMockedStatic
.when(() -> PayloadEncoder.create(EncodedPayloadCodec.LEGACY))
.thenReturn(mock(PayloadEncoder.class));
ResendBatchPublisher resendBatchPublisher = ResendBatchPublisherProvider.provider();
assertThat(resendBatchPublisher)
.isNotNull()
.isExactlyInstanceOf(RestResendBatchPublisher.class);
recoveryClientMockedStatic.verify(RecoveryClient::create);
recoveryClientMockedStatic.verifyNoMoreInteractions();
payloadEncoderMockedStatic.verify(() -> PayloadEncoder.create(EncodedPayloadCodec.LEGACY));
payloadEncoderMockedStatic.verifyNoMoreInteractions();
}
}
@Test
public void defaultConstructorForCoverage() {
assertThat(new ResendBatchPublisherProvider()).isNotNull();
}
}
<|start_filename|>tessera-core/src/test/java/com/quorum/tessera/transaction/internal/PrivacyHelperProviderTest.java<|end_filename|>
package com.quorum.tessera.transaction.internal;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.context.RuntimeContext;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.transaction.PrivacyHelper;
import org.junit.Test;
public class PrivacyHelperProviderTest {
@Test
public void defaultConstructorForCoverage() {
assertThat(new PrivacyHelperProvider()).isNotNull();
}
@Test
public void provider() {
try (var mockedRuntimeContext = mockStatic(RuntimeContext.class);
var mockedEncryptedTransactionDAO = mockStatic(EncryptedTransactionDAO.class)) {
RuntimeContext runtimeContext = mock(RuntimeContext.class);
when(runtimeContext.isEnhancedPrivacy()).thenReturn(true);
mockedRuntimeContext.when(RuntimeContext::getInstance).thenReturn(runtimeContext);
mockedEncryptedTransactionDAO
.when(EncryptedTransactionDAO::create)
.thenReturn(mock(EncryptedTransactionDAO.class));
PrivacyHelper privacyHelper = PrivacyHelperProvider.provider();
assertThat(privacyHelper).isNotNull();
}
}
}
<|start_filename|>tessera-jaxrs/thirdparty-jaxrs/src/main/java/module-info.java<|end_filename|>
module tessera.thirdparty.jaxrs {
requires jakarta.json;
requires jakarta.ws.rs;
requires tessera.config;
requires tessera.shared;
requires tessera.encryption.api;
requires tessera.context;
requires tessera.transaction;
requires tessera.common.jaxrs;
requires tessera.partyinfo;
requires tessera.partyinfo.model;
requires io.swagger.v3.oas.annotations;
exports com.quorum.tessera.thirdparty;
provides com.quorum.tessera.config.apps.TesseraApp with
com.quorum.tessera.thirdparty.ThirdPartyRestApp;
}
<|start_filename|>security/src/main/java/com/quorum/tessera/ssl/util/CertificateUtil.java<|end_filename|>
package com.quorum.tessera.ssl.util;
import jakarta.xml.bind.DatatypeConverter;
import java.security.MessageDigest;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
public interface CertificateUtil {
default String thumbPrint(final X509Certificate certificate) throws CertificateException {
try {
final byte[] encoded = certificate.getEncoded();
return DatatypeConverter.printHexBinary(MessageDigest.getInstance("SHA-1").digest(encoded))
.toLowerCase();
} catch (Exception ex) {
throw new CertificateException(
"Cannot generate thumbprint for this certificate. Cause by ", ex);
}
}
static CertificateUtil create() {
return new CertificateUtil() {};
}
}
<|start_filename|>config/src/test/java/com/quorum/tessera/config/constraints/Base64ValidatorTest.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verifyZeroInteractions;
import jakarta.validation.ConstraintValidatorContext;
import java.util.Base64;
import java.util.UUID;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class Base64ValidatorTest {
private Base64Validator validator = new Base64Validator();
private ConstraintValidatorContext constraintValidatorContext;
@Before
public void onSetup() {
this.constraintValidatorContext = mock(ConstraintValidatorContext.class);
}
@After
public void onTearDown() {
verifyZeroInteractions(constraintValidatorContext);
}
@Test
public void validBase64() {
final String value = Base64.getEncoder().encodeToString("HELLOW".getBytes());
assertThat(validator.isValid(value, constraintValidatorContext)).isTrue();
}
@Test
public void invalidBase64() {
final String value = UUID.randomUUID().toString();
assertThat(validator.isValid(value, constraintValidatorContext)).isFalse();
}
@Test
public void nullValueIsIgnoredAndReturns() {
assertThat(validator.isValid(null, constraintValidatorContext)).isTrue();
}
@Test
public void naclFailureValueIsIgnoredAndReturns() {
assertThat(validator.isValid("NACL_FAILURE: It's broken son!!", constraintValidatorContext))
.isTrue();
assertThat(validator.isValid("NACL_FAILURE", constraintValidatorContext)).isTrue();
}
}
<|start_filename|>tessera-data/src/main/java/com/quorum/tessera/data/internal/EncryptedRawTransactionDAOImpl.java<|end_filename|>
package com.quorum.tessera.data.internal;
import com.quorum.tessera.data.*;
import jakarta.persistence.EntityManagerFactory;
import jakarta.persistence.EntityNotFoundException;
import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery;
import java.util.List;
import java.util.Optional;
import org.bouncycastle.util.encoders.Hex;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A JPA implementation of {@link EncryptedTransactionDAO} */
public class EncryptedRawTransactionDAOImpl implements EncryptedRawTransactionDAO {
private static final Logger LOGGER =
LoggerFactory.getLogger(EncryptedRawTransactionDAOImpl.class);
private final EntityManagerTemplate entityManagerTemplate;
public EncryptedRawTransactionDAOImpl(EntityManagerFactory entityManagerFactory) {
this.entityManagerTemplate = new EntityManagerTemplate(entityManagerFactory);
}
@Override
public EncryptedRawTransaction save(final EncryptedRawTransaction entity) {
LOGGER.debug(
"Persisting EncryptedRawTransaction with hash {}, payload {}, key {}, nonce {} and from {}",
entity.getHash(),
toHexString(entity.getEncryptedPayload()),
toHexString(entity.getEncryptedKey()),
toHexString(entity.getNonce()),
toHexString(entity.getSender()));
return entityManagerTemplate.execute(
entityManager -> {
entityManager.persist(entity);
return entity;
});
}
@Override
public Optional<EncryptedRawTransaction> retrieveByHash(final MessageHash hash) {
LOGGER.debug("Retrieving payload with hash {}", hash);
EncryptedRawTransaction encryptedRawTransaction =
entityManagerTemplate.execute(
entityManager -> entityManager.find(EncryptedRawTransaction.class, hash));
return Optional.ofNullable(encryptedRawTransaction);
}
@Override
public void delete(final MessageHash hash) {
LOGGER.info("Deleting transaction with hash {}", hash);
entityManagerTemplate.execute(
entityManager -> {
EncryptedRawTransaction txn = entityManager.find(EncryptedRawTransaction.class, hash);
if (txn == null) {
throw new EntityNotFoundException();
}
entityManager
.createNamedQuery("EncryptedRawTransaction.DeleteByHash")
.setParameter("hash", hash.getHashBytes())
.executeUpdate();
return txn;
});
}
@Override
public boolean upcheck() {
// if query succeeds then DB is up and running (else get exception)
try {
return entityManagerTemplate.execute(
entityManager -> {
Object result =
entityManager.createNamedQuery("EncryptedRawTransaction.Upcheck").getSingleResult();
return true;
});
} catch (Exception e) {
return false;
}
}
@Override
public long transactionCount() {
upcheck();
return entityManagerTemplate.execute(
entityManager -> {
CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
CriteriaQuery<Long> countQuery = criteriaBuilder.createQuery(Long.class);
countQuery.select(criteriaBuilder.count(countQuery.from(EncryptedRawTransaction.class)));
return entityManager.createQuery(countQuery).getSingleResult();
});
}
@Override
public List<EncryptedRawTransaction> retrieveTransactions(int offset, int maxResult) {
LOGGER.debug(
"Fetching batch(offset:{}, maxResult:{}) of EncryptedRawTransaction entries",
offset,
maxResult);
return entityManagerTemplate.execute(
entityManager ->
entityManager
.createNamedQuery("EncryptedRawTransaction.FindAll", EncryptedRawTransaction.class)
.setFirstResult(offset)
.setMaxResults(maxResult)
.getResultList());
}
private String toHexString(byte[] val) {
if (null == val) {
return "null";
}
return Hex.toHexString(val);
}
}
<|start_filename|>config/src/test/java/com/quorum/tessera/config/constraints/UnsupportedKeyPairValidatorTest.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import com.quorum.tessera.config.KeyDataConfig;
import com.quorum.tessera.config.keypairs.UnsupportedKeyPair;
import jakarta.validation.ConstraintValidatorContext;
import java.nio.file.Path;
import org.junit.Before;
import org.junit.Test;
public class UnsupportedKeyPairValidatorTest {
private UnsupportedKeyPairValidator validator = new UnsupportedKeyPairValidator();
private ConstraintValidatorContext context;
private UnsupportedKeyPair keyPair;
@Before
public void setUp() {
this.context = mock(ConstraintValidatorContext.class);
ConstraintValidatorContext.ConstraintViolationBuilder builder =
mock(ConstraintValidatorContext.ConstraintViolationBuilder.class);
when(context.buildConstraintViolationWithTemplate(any(String.class))).thenReturn(builder);
this.keyPair = new UnsupportedKeyPair();
}
@Test
public void directViolationIfPublicKeyButNoPrivateKey() {
keyPair.setPublicKey("public");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.bothDirectKeysRequired.message}");
}
@Test
public void directViolationIfNoPublicKeyButPrivateKey() {
keyPair.setPrivateKey("private");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.bothDirectKeysRequired.message}");
}
@Test
public void
directViolationIsDefaultIfNoDirectPublicEvenIfMultipleIncompleteKeyPairTypesProvided() {
KeyDataConfig keyDataConfig = mock(KeyDataConfig.class);
Path path = mock(Path.class);
keyPair.setPrivateKey("private");
keyPair.setConfig(keyDataConfig);
keyPair.setPrivateKeyPath(path);
keyPair.setAzureVaultPrivateKeyId("privAzure");
keyPair.setHashicorpVaultPrivateKeyId("privHashicorp");
keyPair.setAwsSecretsManagerPrivateKeyId(("privAWS"));
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.bothDirectKeysRequired.message}");
}
@Test
public void
directViolationIsDefaultIfNoDirectPrivateEvenIfMultipleIncompleteKeyPairTypesProvided() {
KeyDataConfig keyDataConfig = mock(KeyDataConfig.class);
Path path = mock(Path.class);
keyPair.setConfig(keyDataConfig);
keyPair.setPublicKey("public");
keyPair.setPublicKeyPath(path);
keyPair.setAzureVaultPublicKeyId("pubAzure");
keyPair.setHashicorpVaultPublicKeyId("pubHashicorp");
keyPair.setAwsSecretsManagerPublicKeyId("pubAWS");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.bothDirectKeysRequired.message}");
}
@Test
public void inlineViolationIfPrivateKeyConfigButNoPublicKey() {
KeyDataConfig keyDataConfig = mock(KeyDataConfig.class);
keyPair.setConfig(keyDataConfig);
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.bothInlineKeysRequired.message}");
}
@Test
public void inlineViolationIfNoPublicEvenIfVaultAndFilesystemAreIncomplete() {
KeyDataConfig keyDataConfig = mock(KeyDataConfig.class);
Path path = mock(Path.class);
keyPair.setConfig(keyDataConfig);
keyPair.setPublicKeyPath(path);
keyPair.setAzureVaultPublicKeyId("pubId");
keyPair.setHashicorpVaultPublicKeyId("pubId");
keyPair.setAwsSecretsManagerPublicKeyId("pubId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.bothInlineKeysRequired.message}");
}
@Test
public void azureViolationIfPublicIdButNoPrivateId() {
keyPair.setAzureVaultPublicKeyId("pubId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate("{UnsupportedKeyPair.bothAzureKeysRequired.message}");
}
@Test
public void azureViolationIfNoPublicIdButPrivateId() {
keyPair.setAzureVaultPrivateKeyId("privId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate("{UnsupportedKeyPair.bothAzureKeysRequired.message}");
}
@Test
public void azureViolationIfNoPublicIdEvenIfFilesystemIncomplete() {
Path path = mock(Path.class);
keyPair.setPublicKeyPath(path);
keyPair.setAzureVaultPrivateKeyId("privId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate("{UnsupportedKeyPair.bothAzureKeysRequired.message}");
}
@Test
public void hashicorpViolationIfPublicIdOnly() {
keyPair.setHashicorpVaultPublicKeyId("pubId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPrivateIdOnly() {
keyPair.setHashicorpVaultPrivateKeyId("privId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfSecretEngineNameOnly() {
keyPair.setHashicorpVaultSecretEngineName("secretEngineName");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfSecretNameOnly() {
keyPair.setHashicorpVaultSecretName("secretName");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPublicIdAndPrivateIdOnly() {
keyPair.setHashicorpVaultPublicKeyId("pubId");
keyPair.setHashicorpVaultPrivateKeyId("privId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPublicIdAndSecretEngineNameOnly() {
keyPair.setHashicorpVaultPublicKeyId("pubId");
keyPair.setHashicorpVaultSecretEngineName("secretEngine");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPublicIdAndSecretNameOnly() {
keyPair.setHashicorpVaultPublicKeyId("pubId");
keyPair.setHashicorpVaultSecretName("secretName");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPrivateIdAndSecretEngineNameOnly() {
keyPair.setHashicorpVaultPrivateKeyId("privId");
keyPair.setHashicorpVaultSecretEngineName("secretEngine");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPrivateIdAndSecretNameOnly() {
keyPair.setHashicorpVaultPrivateKeyId("privId");
keyPair.setHashicorpVaultSecretName("secretName");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfSecretEngineNameAndSecretNameOnly() {
keyPair.setHashicorpVaultSecretEngineName("secretEngine");
keyPair.setHashicorpVaultSecretName("secretName");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPublicIdAndPrivateIdAndSecretEngineNameOnly() {
keyPair.setHashicorpVaultPublicKeyId("pubId");
keyPair.setHashicorpVaultPrivateKeyId("privId");
keyPair.setHashicorpVaultSecretEngineName("secretEngine");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPublicIdAndPrivateIdAndSecretNameOnly() {
keyPair.setHashicorpVaultPublicKeyId("pubId");
keyPair.setHashicorpVaultPrivateKeyId("privId");
keyPair.setHashicorpVaultSecretName("secretName");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPublicIdAndSecretEngineNameAndSecretNameOnly() {
keyPair.setHashicorpVaultPublicKeyId("pubId");
keyPair.setHashicorpVaultSecretEngineName("secretEngine");
keyPair.setHashicorpVaultSecretName("secretName");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void hashicorpViolationIfPrivateIdAndSecretEngineNameAndSecretNameOnly() {
keyPair.setHashicorpVaultPrivateKeyId("privId");
keyPair.setHashicorpVaultSecretEngineName("secretEngine");
keyPair.setHashicorpVaultSecretName("secretName");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.allHashicorpKeyDataRequired.message}");
}
@Test
public void azureViolationIfNoPrivateIdEvenIfFilesystemIncomplete() {
Path path = mock(Path.class);
keyPair.setAzureVaultPublicKeyId("pubId");
keyPair.setPublicKeyPath(path);
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate("{UnsupportedKeyPair.bothAzureKeysRequired.message}");
}
@Test
public void awsViolationIfPublicIdButNoPrivateId() {
keyPair.setAwsSecretsManagerPublicKeyId("pubId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate("{UnsupportedKeyPair.bothAWSKeysRequired.message}");
}
@Test
public void awsViolationIfNoPublicIdButPrivateId() {
keyPair.setAwsSecretsManagerPrivateKeyId("privId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate("{UnsupportedKeyPair.bothAWSKeysRequired.message}");
}
@Test
public void awsViolationIfNoPublicIdEvenIfFilesystemIncomplete() {
Path path = mock(Path.class);
keyPair.setPublicKeyPath(path);
keyPair.setAwsSecretsManagerPrivateKeyId("privId");
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate("{UnsupportedKeyPair.bothAWSKeysRequired.message}");
}
@Test
public void filesystemViolationIfPublicPathButNoPrivatePath() {
Path path = mock(Path.class);
keyPair.setPublicKeyPath(path);
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.bothFilesystemKeysRequired.message}");
}
@Test
public void filesystemViolationIfNoPublicPathButPrivatePath() {
Path path = mock(Path.class);
keyPair.setPrivateKeyPath(path);
validator.isValid(keyPair, context);
verify(context)
.buildConstraintViolationWithTemplate(
"{UnsupportedKeyPair.bothFilesystemKeysRequired.message}");
}
@Test
public void defaultViolationIfNoRecognisedKeyPairDataProvided() {
// nothing set
validator.isValid(keyPair, context);
verifyNoMoreInteractions(context);
}
}
<|start_filename|>shared/src/test/java/com/quorum/tessera/version/CBORSupportVersionTest.java<|end_filename|>
package com.quorum.tessera.version;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Test;
public class CBORSupportVersionTest {
private CBORSupportVersion version = new CBORSupportVersion();
@Test
public void getVersion() {
assertThat(version.getVersion()).isEqualTo("5.0");
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/constraints/ValidServerAddress.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import static java.lang.annotation.ElementType.ANNOTATION_TYPE;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.ElementType.TYPE_PARAMETER;
import static java.lang.annotation.ElementType.TYPE_USE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import jakarta.validation.Constraint;
import jakarta.validation.Payload;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@Target({FIELD, METHOD, PARAMETER, ANNOTATION_TYPE, TYPE_PARAMETER, TYPE_USE})
@Retention(RUNTIME)
@Constraint(validatedBy = ServerAddressValidator.class)
@Documented
public @interface ValidServerAddress {
boolean isBindingAddress() default false;
String[] supportedSchemes() default {"unix", "http", "https"};
String message() default "{ValidServerAddress.message}";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
}
<|start_filename|>enclave/enclave-api/src/main/java/com/quorum/tessera/enclave/CBOREncoder.java<|end_filename|>
package com.quorum.tessera.enclave;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.dataformat.cbor.CBORFactory;
import com.fasterxml.jackson.dataformat.cbor.CBORGenerator;
import com.fasterxml.jackson.dataformat.cbor.CBORParser;
import com.quorum.tessera.encryption.PublicKey;
import java.io.ByteArrayOutputStream;
import java.util.*;
public class CBOREncoder implements PayloadEncoder {
final CBORFactory cborFactory = new CBORFactory();
@Override
public byte[] encode(EncodedPayload payload) {
ByteArrayOutputStream output = new ByteArrayOutputStream();
try (CBORGenerator generator = cborFactory.createGenerator(output)) {
generator.writeStartObject(11);
generator.writeBinaryField("sender", payload.getSenderKey().getKeyBytes());
generator.writeBinaryField("cipherText", payload.getCipherText());
generator.writeBinaryField("nonce", payload.getCipherTextNonce().getNonceBytes());
generator.writeBinaryField("recipientNonce", payload.getRecipientNonce().getNonceBytes());
generator.writeFieldName("recipientBoxes");
generator.writeStartArray(payload.getRecipientBoxes().size());
for (RecipientBox box : payload.getRecipientBoxes()) {
generator.writeBinary(box.getData());
}
generator.writeEndArray();
generator.writeFieldName("recipients");
generator.writeStartArray(payload.getRecipientKeys().size());
for (PublicKey key : payload.getRecipientKeys()) {
generator.writeBinary(key.getKeyBytes());
}
generator.writeEndArray();
generator.writeNumberField("privacyFlag", payload.getPrivacyMode().getPrivacyFlag());
generator.writeFieldName("affected");
generator.writeStartObject(payload.getAffectedContractTransactions().size());
for (Map.Entry<TxHash, SecurityHash> entry :
payload.getAffectedContractTransactions().entrySet()) {
generator.writeFieldName(entry.getKey().encodeToBase64());
generator.writeBinary(entry.getValue().getData());
}
generator.writeEndObject();
generator.writeBinaryField("execHash", payload.getExecHash());
generator.writeFieldName("mandatoryFor");
generator.writeStartArray(payload.getMandatoryRecipients().size());
for (PublicKey recipient : payload.getMandatoryRecipients()) {
generator.writeBinary(recipient.getKeyBytes());
}
generator.writeEndArray();
final byte[] privacyGroupId =
payload.getPrivacyGroupId().map(PrivacyGroup.Id::getBytes).orElse(new byte[0]);
generator.writeBinaryField("privacyGroupId", privacyGroupId);
generator.writeEndObject();
generator.flush();
} catch (Exception ex) {
throw new RuntimeException("Unable to encode payload. ", ex);
}
return output.toByteArray();
}
@Override
public EncodedPayload decode(byte[] input) {
EncodedPayload.Builder payloadBuilder = EncodedPayload.Builder.create();
try (final CBORParser parser = cborFactory.createParser(input)) {
validateToken(JsonToken.START_OBJECT, parser.nextToken());
while (parser.nextFieldName() != null) {
if (parser.getCurrentName().equals("sender")) {
validateToken(JsonToken.VALUE_EMBEDDED_OBJECT, parser.nextToken());
final byte[] senderKey = parser.getBinaryValue();
payloadBuilder.withSenderKey(PublicKey.from(senderKey));
continue;
}
if (parser.getCurrentName().equals("cipherText")) {
validateToken(JsonToken.VALUE_EMBEDDED_OBJECT, parser.nextToken());
final byte[] cipherText = parser.getBinaryValue();
payloadBuilder.withCipherText(cipherText);
continue;
}
if (parser.getCurrentName().equals("nonce")) {
validateToken(JsonToken.VALUE_EMBEDDED_OBJECT, parser.nextToken());
final byte[] nonceBytes = parser.getBinaryValue();
payloadBuilder.withCipherTextNonce(nonceBytes);
continue;
}
if (parser.getCurrentName().equals("recipientNonce")) {
validateToken(JsonToken.VALUE_EMBEDDED_OBJECT, parser.nextToken());
final byte[] recipientNonceBytes = parser.getBinaryValue();
payloadBuilder.withRecipientNonce(recipientNonceBytes);
continue;
}
if (parser.getCurrentName().equals("recipients")) {
validateToken(JsonToken.START_ARRAY, parser.nextToken());
while (parser.nextToken() != JsonToken.END_ARRAY) {
final byte[] recipientBytes = parser.getBinaryValue();
payloadBuilder.withRecipientKey(PublicKey.from(recipientBytes));
}
continue;
}
if (parser.getCurrentName().equals("recipientBoxes")) {
validateToken(JsonToken.START_ARRAY, parser.nextToken());
while (parser.nextToken() != JsonToken.END_ARRAY) {
final byte[] box = parser.getBinaryValue();
payloadBuilder.withRecipientBox(box);
}
continue;
}
if (parser.getCurrentName().equals("privacyFlag")) {
final int flag = parser.nextIntValue(0);
payloadBuilder.withPrivacyFlag(flag);
continue;
}
if (parser.getCurrentName().equals("affected")) {
validateToken(JsonToken.START_OBJECT, parser.nextToken());
final Map<TxHash, byte[]> affectedTxs = new HashMap<>();
while (parser.nextToken() != JsonToken.END_OBJECT) {
final TxHash txHash = new TxHash(parser.currentName());
validateToken(JsonToken.VALUE_EMBEDDED_OBJECT, parser.nextToken());
final byte[] securityHashBytes = parser.getBinaryValue();
affectedTxs.put(txHash, securityHashBytes);
}
payloadBuilder.withAffectedContractTransactions(affectedTxs);
continue;
}
if (parser.getCurrentName().equals("execHash")) {
validateToken(JsonToken.VALUE_EMBEDDED_OBJECT, parser.nextToken());
final byte[] execHash = parser.getBinaryValue();
payloadBuilder.withExecHash(execHash);
continue;
}
if (parser.getCurrentName().equals("mandatoryFor")) {
validateToken(JsonToken.START_ARRAY, parser.nextToken());
final Set<PublicKey> mandatoryRecipients = new HashSet<>();
while (parser.nextToken() != JsonToken.END_ARRAY) {
final byte[] recipient = parser.getBinaryValue();
mandatoryRecipients.add(PublicKey.from(recipient));
}
payloadBuilder.withMandatoryRecipients(mandatoryRecipients);
continue;
}
if (parser.getCurrentName().equals("privacyGroupId")) {
validateToken(JsonToken.VALUE_EMBEDDED_OBJECT, parser.nextToken());
final byte[] groupId = parser.getBinaryValue();
if (groupId.length > 0)
payloadBuilder.withPrivacyGroupId(PrivacyGroup.Id.fromBytes(groupId));
}
}
} catch (Exception ex) {
throw new RuntimeException("Unable to decode payload data. ", ex);
}
return payloadBuilder.build();
}
@Override
public EncodedPayloadCodec encodedPayloadCodec() {
return EncodedPayloadCodec.CBOR;
}
private void validateToken(JsonToken expected, JsonToken current) {
if (current != expected) {
throw new IllegalArgumentException("Invalid payload data");
}
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/AppType.java<|end_filename|>
package com.quorum.tessera.config;
import jakarta.xml.bind.annotation.XmlEnumValue;
public enum AppType {
P2P,
Q2T,
@XmlEnumValue("ThirdParty")
THIRD_PARTY,
ENCLAVE,
ADMIN
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/constraints/SslConfigValidator.java<|end_filename|>
package com.quorum.tessera.config.constraints;
import com.quorum.tessera.config.SslAuthenticationMode;
import com.quorum.tessera.config.SslConfig;
import com.quorum.tessera.config.SslConfigType;
import com.quorum.tessera.config.SslTrustMode;
import com.quorum.tessera.config.util.EnvironmentVariableProvider;
import com.quorum.tessera.config.util.EnvironmentVariableProviderFactory;
import com.quorum.tessera.config.util.EnvironmentVariables;
import jakarta.validation.ConstraintValidator;
import jakarta.validation.ConstraintValidatorContext;
import java.nio.file.Files;
import java.util.Objects;
public class SslConfigValidator implements ConstraintValidator<ValidSsl, SslConfig> {
private final EnvironmentVariableProvider envVarProvider;
public SslConfigValidator() {
this(EnvironmentVariableProviderFactory.load().create());
}
public SslConfigValidator(EnvironmentVariableProvider envVarProvider) {
this.envVarProvider = envVarProvider;
}
@Override
public boolean isValid(SslConfig sslConfig, ConstraintValidatorContext context) {
context.disableDefaultConstraintViolation();
if (Objects.isNull(sslConfig)) {
return true;
}
if (sslConfig.getTls() == SslAuthenticationMode.STRICT) {
if (!sslConfig.isGenerateKeyStoreIfNotExisted()) {
if (!isServerKeyStoreConfigValid(sslConfig, context)
|| !isClientKeyStoreConfigValid(sslConfig, context)) {
return false;
}
}
if (!isTrustModeConfigValid(sslConfig, context)) {
return false;
}
if (!isServerConfigValidForWhiteListMode(sslConfig, context)) {
return false;
}
if (!isServerConfigValidForCAMode(sslConfig, context)) {
return false;
}
if (!isClientConfigValidForWhiteListMode(sslConfig, context)) {
return false;
}
if (!isClientConfigValidForCAMode(sslConfig, context)) {
return false;
}
}
return true;
}
private boolean isServerKeyStoreConfigValid(
SslConfig sslConfig, ConstraintValidatorContext context) {
if (SslConfigType.CLIENT_ONLY == sslConfig.getSslConfigType()) {
return true;
}
if (Objects.isNull(sslConfig.getServerKeyStore())
|| !isPasswordProvided(
sslConfig.getServerKeyStorePassword(),
sslConfig.getEnvironmentVariablePrefix(),
EnvironmentVariables.SERVER_KEYSTORE_PWD)
|| Files.notExists(sslConfig.getServerKeyStore())) {
if (Objects.isNull(sslConfig.getServerTlsKeyPath())
|| Objects.isNull(sslConfig.getServerTlsCertificatePath())
|| Files.notExists(sslConfig.getServerTlsKeyPath())
|| Files.notExists(sslConfig.getServerTlsCertificatePath())) {
setMessage(
"Server keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created",
context);
return false;
}
}
return true;
}
private boolean isClientKeyStoreConfigValid(
SslConfig sslConfig, ConstraintValidatorContext context) {
if (SslConfigType.SERVER_ONLY == sslConfig.getSslConfigType()) {
return true;
}
if (Objects.isNull(sslConfig.getClientKeyStore())
|| !isPasswordProvided(
sslConfig.getClientKeyStorePassword(),
sslConfig.getEnvironmentVariablePrefix(),
EnvironmentVariables.CLIENT_KEYSTORE_PWD)
|| Files.notExists(sslConfig.getClientKeyStore())) {
if (Objects.isNull(sslConfig.getClientTlsKeyPath())
|| Objects.isNull(sslConfig.getClientTlsCertificatePath())
|| Files.notExists(sslConfig.getClientTlsKeyPath())
|| Files.notExists(sslConfig.getClientTlsCertificatePath())) {
setMessage(
"Client keystore configuration not valid. "
+ "Please ensure keystore file exists or keystore password not null, "
+ "otherwise please set keystore generation flag to true to have keystore created",
context);
return false;
}
}
return true;
}
private boolean isTrustModeConfigValid(SslConfig sslConfig, ConstraintValidatorContext context) {
if ((Objects.isNull(sslConfig.getServerTrustMode())
&& sslConfig.getSslConfigType() != SslConfigType.CLIENT_ONLY)
|| (Objects.isNull(sslConfig.getClientTrustMode())
&& sslConfig.getSslConfigType() != SslConfigType.SERVER_ONLY)) {
setMessage(
"Trust mode does not have valid value. Please check server/client trust mode config",
context);
return false;
}
return true;
}
private boolean isServerConfigValidForWhiteListMode(
SslConfig sslConfig, ConstraintValidatorContext context) {
if (sslConfig.getServerTrustMode() == SslTrustMode.WHITELIST) {
if (Objects.isNull(sslConfig.getKnownClientsFile())
|| Files.notExists(sslConfig.getKnownClientsFile())) {
setMessage(
"Known clients file not found. If server trust mode is WHITELIST, known clients file must be provided",
context);
return false;
}
}
return true;
}
private boolean isServerConfigValidForCAMode(
SslConfig sslConfig, ConstraintValidatorContext context) {
if (sslConfig.getServerTrustMode() == SslTrustMode.CA) {
if (Objects.isNull(sslConfig.getServerTrustStore())
|| !isPasswordProvided(
sslConfig.getServerTrustStorePassword(),
sslConfig.getEnvironmentVariablePrefix(),
EnvironmentVariables.SERVER_TRUSTSTORE_PWD)
|| Files.notExists(sslConfig.getServerTrustStore())) {
if (Objects.isNull(sslConfig.getServerTrustCertificates())) {
setMessage(
"Trust store config not valid. If server trust mode is CA, trust store must exist and not be null",
context);
return false;
}
}
}
return true;
}
private boolean isClientConfigValidForWhiteListMode(
SslConfig sslConfig, ConstraintValidatorContext context) {
if (sslConfig.getClientTrustMode() == SslTrustMode.WHITELIST) {
if (Objects.isNull(sslConfig.getKnownServersFile())
|| Files.notExists(sslConfig.getKnownServersFile())) {
setMessage(
"Known servers file not found. If client trust mode is WHITELIST, known servers file must be provided",
context);
return false;
}
}
return true;
}
private boolean isClientConfigValidForCAMode(
SslConfig sslConfig, ConstraintValidatorContext context) {
if (sslConfig.getClientTrustMode() == SslTrustMode.CA) {
if (Objects.isNull(sslConfig.getClientTrustStore())
|| !isPasswordProvided(
sslConfig.getClientTrustStorePassword(),
sslConfig.getEnvironmentVariablePrefix(),
EnvironmentVariables.CLIENT_TRUSTSTORE_PWD)
|| Files.notExists(sslConfig.getClientTrustStore())) {
if (Objects.isNull(sslConfig.getClientTrustCertificates())) {
setMessage(
"Trust store config not valid. If client trust mode is CA, trust store must exist and not be null",
context);
return false;
}
}
}
return true;
}
private boolean isPasswordProvided(char[] configPassword, String envVarPrefix, String envVar) {
return configPassword != null
|| envVarProvider.hasEnv(envVar)
|| envVarProvider.hasEnv(envVarPrefix + "_" + envVar);
}
private void setMessage(final String message, ConstraintValidatorContext context) {
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
}
}
<|start_filename|>migration/multitenancy/src/main/java/com/quorum/tessera/multitenancy/migration/MigrationCliAdapter.java<|end_filename|>
package com.quorum.tessera.multitenancy.migration;
import com.quorum.tessera.cli.CliAdapter;
import com.quorum.tessera.cli.CliResult;
import com.quorum.tessera.cli.CliType;
import com.quorum.tessera.config.Config;
import jakarta.persistence.EntityManagerFactory;
import java.util.concurrent.Callable;
import picocli.CommandLine;
@CommandLine.Command(
headerHeading = "Usage:%n%n",
synopsisHeading = "%n",
descriptionHeading = "%nDescription:%n%n",
parameterListHeading = "%nParameters:%n",
optionListHeading = "%nOptions:%n",
header = "Migrate one database into another")
public class MigrationCliAdapter implements CliAdapter, Callable<CliResult> {
@CommandLine.Option(
names = "--primary",
description = "path to primary node configuration file",
required = true)
private Config configPrimary;
@CommandLine.Option(
names = "--secondary",
description = "path to secondary node configuration file",
required = true)
private Config configSecondary;
@Override
public CliType getType() {
return CliType.MULTITENANCY_MIGRATION;
}
@Override
public CliResult execute(String... args) {
EntityManagerFactory primaryEntityManagerFactory =
JdbcConfigUtil.entityManagerFactory(configPrimary.getJdbcConfig());
EntityManagerFactory secondaryEntityManagerFactory =
JdbcConfigUtil.entityManagerFactory(configSecondary.getJdbcConfig());
// migrate raw
new MigrationRunner(primaryEntityManagerFactory, secondaryEntityManagerFactory).run();
return new CliResult(0, true, null);
}
@Override
public CliResult call() {
return this.execute();
}
}
<|start_filename|>tessera-jaxrs/common-jaxrs/src/test/java/com/quorum/tessera/api/common/VersionResourceTest.java<|end_filename|>
package com.quorum.tessera.api.common;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import com.quorum.tessera.api.Version;
import com.quorum.tessera.version.ApiVersion;
import jakarta.json.Json;
import jakarta.json.JsonArray;
import jakarta.json.JsonArrayBuilder;
import java.util.List;
import java.util.stream.Collectors;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class VersionResourceTest {
private VersionResource instance;
private Version expectedVersion;
private static final String VERSION_VALUE = "MOCK";
@Before
public void onSetUp() {
expectedVersion = mock(Version.class);
when(expectedVersion.version()).thenReturn(VERSION_VALUE);
instance = new VersionResource(expectedVersion);
}
@After
public void afterTest() {
verifyNoMoreInteractions(expectedVersion);
}
@Test
public void getVersion() {
assertThat(instance.getVersion()).isEqualTo(VERSION_VALUE);
verify(expectedVersion).version();
}
@Test
public void getDistributionVersion() {
assertThat(instance.getDistributionVersion()).isEqualTo(VERSION_VALUE);
verify(expectedVersion).version();
}
@Test
public void getVersions() {
// Make sure that elements are defined in unnatural order to test sorting
List<Double> versions = List.of(03.00, 01.00, 02.00);
JsonArray result;
try (var apiVersionMockedStatic = mockStatic(ApiVersion.class)) {
apiVersionMockedStatic
.when(ApiVersion::versions)
.thenReturn(
versions.stream()
.map(String::valueOf)
.map(s -> "v" + s)
.collect(Collectors.toList()));
result = instance.getVersions();
apiVersionMockedStatic.verify(ApiVersion::versions);
apiVersionMockedStatic.verifyNoMoreInteractions();
}
JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder();
versions.stream().sorted().map(String::valueOf).forEach(v -> jsonArrayBuilder.add(v));
JsonArray expected = jsonArrayBuilder.build();
assertThat(result).containsExactlyElementsOf(expected);
}
@Test
public void getVersionsNoPrefix() {
// Make sure that elements are defined in unnatural order to test sorting
List<Double> versions = List.of(03.00, 01.00, 02.00);
JsonArray result;
try (var apiVersionMockedStatic = mockStatic(ApiVersion.class)) {
apiVersionMockedStatic
.when(ApiVersion::versions)
.thenReturn(versions.stream().map(String::valueOf).collect(Collectors.toList()));
result = instance.getVersions();
apiVersionMockedStatic.verify(ApiVersion::versions);
apiVersionMockedStatic.verifyNoMoreInteractions();
}
JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder();
versions.stream().sorted().map(String::valueOf).forEach(v -> jsonArrayBuilder.add(v));
JsonArray expected = jsonArrayBuilder.build();
assertThat(result).containsExactlyElementsOf(expected);
}
@Test
public void defaultConstructor() {
VersionResource versionResource = new VersionResource();
assertThat(versionResource).isNotNull();
assertThat(versionResource.getDistributionVersion())
.isEqualTo(System.getProperty("project.version"), "project.version not set");
assertThat(versionResource.getVersion())
.isEqualTo(System.getProperty("project.version"), "project.version not set");
}
}
<|start_filename|>config/src/main/java/com/quorum/tessera/config/ConfigProperties.java<|end_filename|>
package com.quorum.tessera.config;
import jakarta.xml.bind.JAXBElement;
import jakarta.xml.bind.annotation.XmlAccessType;
import jakarta.xml.bind.annotation.XmlAccessorType;
import jakarta.xml.bind.annotation.XmlAnyElement;
import java.util.ArrayList;
import java.util.List;
@XmlAccessorType(XmlAccessType.FIELD)
public class ConfigProperties {
@XmlAnyElement private List<JAXBElement<String>> properties = new ArrayList<>();
public ConfigProperties() {}
public List<JAXBElement<String>> getProperties() {
return properties;
}
public void setProperties(List<JAXBElement<String>> properties) {
this.properties = properties;
}
}
<|start_filename|>tessera-recover/src/test/java/com/quorum/tessera/recovery/workflow/internal/BatchResendManagerImplTest.java<|end_filename|>
package com.quorum.tessera.recovery.workflow.internal;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.AdditionalMatchers.gt;
import static org.mockito.AdditionalMatchers.lt;
import static org.mockito.Mockito.*;
import com.quorum.tessera.base64.Base64Codec;
import com.quorum.tessera.data.EncryptedTransaction;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.data.staging.StagingEntityDAO;
import com.quorum.tessera.data.staging.StagingTransaction;
import com.quorum.tessera.enclave.*;
import com.quorum.tessera.encryption.Nonce;
import com.quorum.tessera.encryption.PublicKey;
import com.quorum.tessera.recovery.resend.PushBatchRequest;
import com.quorum.tessera.recovery.resend.ResendBatchRequest;
import com.quorum.tessera.recovery.resend.ResendBatchResponse;
import com.quorum.tessera.recovery.workflow.BatchResendManager;
import com.quorum.tessera.recovery.workflow.BatchWorkflow;
import com.quorum.tessera.recovery.workflow.BatchWorkflowContext;
import com.quorum.tessera.recovery.workflow.BatchWorkflowFactory;
import java.util.List;
import java.util.Optional;
import java.util.ServiceLoader;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class BatchResendManagerImplTest {
private PayloadEncoder payloadEncoder;
private StagingEntityDAO stagingEntityDAO;
private EncryptedTransactionDAO encryptedTransactionDAO;
private BatchResendManager manager;
private static final String KEY_STRING = "<KEY>0Bc=";
private final PublicKey publicKey = PublicKey.from(Base64Codec.create().decode(KEY_STRING));
private BatchWorkflowFactory batchWorkflowFactory;
@Before
public void beforeTest() {
payloadEncoder = mock(PayloadEncoder.class);
stagingEntityDAO = mock(StagingEntityDAO.class);
encryptedTransactionDAO = mock(EncryptedTransactionDAO.class);
batchWorkflowFactory = mock(BatchWorkflowFactory.class);
manager =
new BatchResendManagerImpl(
stagingEntityDAO, encryptedTransactionDAO, 5, batchWorkflowFactory);
}
@After
public void tearDown() {
verifyNoMoreInteractions(payloadEncoder);
verifyNoMoreInteractions(stagingEntityDAO);
verifyNoMoreInteractions(encryptedTransactionDAO);
verifyNoMoreInteractions(batchWorkflowFactory);
}
@Test
public void resendBatch() {
ResendBatchRequest request =
ResendBatchRequest.Builder.create().withBatchSize(3).withPublicKey(KEY_STRING).build();
List<EncryptedTransaction> transactions =
IntStream.range(0, 5)
.mapToObj(i -> mock(EncryptedTransaction.class))
.collect(Collectors.toUnmodifiableList());
when(encryptedTransactionDAO.transactionCount()).thenReturn(101L);
when(encryptedTransactionDAO.retrieveTransactions(lt(100), anyInt())).thenReturn(transactions);
when(encryptedTransactionDAO.retrieveTransactions(gt(99), anyInt()))
.thenReturn(singletonList(mock(EncryptedTransaction.class)));
BatchWorkflow batchWorkflow = mock(BatchWorkflow.class);
when(batchWorkflow.getPublishedMessageCount()).thenReturn(999L);
when(batchWorkflowFactory.create(101L)).thenReturn(batchWorkflow);
final ResendBatchResponse result = manager.resendBatch(request);
assertThat(result.getTotal()).isEqualTo(999L);
verify(batchWorkflow).getPublishedMessageCount();
verify(batchWorkflow, times(101)).execute(any(BatchWorkflowContext.class));
verify(encryptedTransactionDAO, times(21)).retrieveTransactions(anyInt(), anyInt());
verify(encryptedTransactionDAO).transactionCount();
verify(batchWorkflowFactory).create(101L);
}
@Test
public void useMaxResultsWhenBatchSizeNotProvided() {
final ResendBatchRequest request =
ResendBatchRequest.Builder.create().withPublicKey(KEY_STRING).build();
List<EncryptedTransaction> transactions =
IntStream.range(0, 5)
.mapToObj(i -> mock(EncryptedTransaction.class))
.collect(Collectors.toUnmodifiableList());
when(encryptedTransactionDAO.transactionCount()).thenReturn(101L);
BatchWorkflow batchWorkflow = mock(BatchWorkflow.class);
when(batchWorkflow.getPublishedMessageCount())
.thenReturn(999L); // arbitary total that's returned as result.getTotal()
when(batchWorkflowFactory.create(101L)).thenReturn(batchWorkflow);
when(encryptedTransactionDAO.retrieveTransactions(lt(100), anyInt())).thenReturn(transactions);
when(encryptedTransactionDAO.retrieveTransactions(gt(99), anyInt()))
.thenReturn(List.of(mock(EncryptedTransaction.class)));
final ResendBatchResponse result = manager.resendBatch(request);
assertThat(result.getTotal()).isEqualTo(999L);
verify(batchWorkflow, times(101)).execute(any(BatchWorkflowContext.class));
verify(encryptedTransactionDAO, times(21)).retrieveTransactions(anyInt(), anyInt());
verify(encryptedTransactionDAO).transactionCount();
verify(batchWorkflowFactory).create(101L);
}
@Test
public void useMaxResultsAlsoWhenBatchSizeTooLarge() {
final ResendBatchRequest request =
ResendBatchRequest.Builder.create()
.withBatchSize(10000000)
.withPublicKey(KEY_STRING)
.build();
List<EncryptedTransaction> transactions =
IntStream.range(0, 5)
.mapToObj(i -> mock(EncryptedTransaction.class))
.collect(Collectors.toUnmodifiableList());
when(encryptedTransactionDAO.transactionCount()).thenReturn(101L);
when(encryptedTransactionDAO.retrieveTransactions(lt(100), anyInt())).thenReturn(transactions);
when(encryptedTransactionDAO.retrieveTransactions(gt(99), anyInt()))
.thenReturn(singletonList(mock(EncryptedTransaction.class)));
final BatchWorkflow batchWorkflow = mock(BatchWorkflow.class);
when(batchWorkflow.getPublishedMessageCount()).thenReturn(999L);
when(batchWorkflowFactory.create(101L)).thenReturn(batchWorkflow);
final ResendBatchResponse result = manager.resendBatch(request);
assertThat(result.getTotal()).isEqualTo(999L);
verify(batchWorkflow, times(101)).execute(any(BatchWorkflowContext.class));
verify(encryptedTransactionDAO, times(21)).retrieveTransactions(anyInt(), anyInt());
verify(encryptedTransactionDAO).transactionCount();
verify(batchWorkflowFactory).create(101L);
}
@Test
public void createWithMinimalConstructor() {
assertThat(
new BatchResendManagerImpl(
stagingEntityDAO, encryptedTransactionDAO, 1, mock(BatchWorkflowFactory.class)))
.isNotNull();
}
@Test
public void calculateBatchCount() {
long numberOfRecords = 10;
long maxResults = 3;
int batchCount = BatchResendManagerImpl.calculateBatchCount(maxResults, numberOfRecords);
assertThat(batchCount).isEqualTo(4);
}
@Test
public void calculateBatchCountTotalLowerThanBatchSizeIsSingleBatch() {
long numberOfRecords = 100;
long maxResults = 10;
int batchCount = BatchResendManagerImpl.calculateBatchCount(maxResults, numberOfRecords);
assertThat(batchCount).isEqualTo(10);
}
@Test
public void createBatchResendManager() {
BatchResendManager expected = mock(BatchResendManager.class);
BatchResendManager result;
try (var staticServiceLoader = mockStatic(ServiceLoader.class)) {
ServiceLoader<BatchResendManager> serviceLoader = mock(ServiceLoader.class);
when(serviceLoader.findFirst()).thenReturn(Optional.of(expected));
staticServiceLoader
.when(() -> ServiceLoader.load(BatchResendManager.class))
.thenReturn(serviceLoader);
result = BatchResendManager.create();
staticServiceLoader.verify(() -> ServiceLoader.load(BatchResendManager.class));
staticServiceLoader.verifyNoMoreInteractions();
verify(serviceLoader).findFirst();
verifyNoMoreInteractions(serviceLoader);
}
assertThat(result).isNotNull().isSameAs(expected);
}
@Test
public void testStoreResendBatchMultipleVersions() {
try (var payloadDigestMockedStatic = mockStatic(PayloadDigest.class);
var payloadEncoderMockedStatic = mockStatic(PayloadEncoder.class)) {
payloadDigestMockedStatic
.when(PayloadDigest::create)
.thenReturn((PayloadDigest) cipherText -> cipherText);
payloadEncoderMockedStatic
.when(() -> PayloadEncoder.create(any()))
.thenReturn(payloadEncoder);
final EncodedPayload encodedPayload =
EncodedPayload.Builder.create()
.withSenderKey(publicKey)
.withCipherText("cipherText".getBytes())
.withCipherTextNonce(new Nonce("nonce".getBytes()))
.withRecipientBoxes(singletonList("box".getBytes()))
.withRecipientNonce(new Nonce("recipientNonce".getBytes()))
.withRecipientKeys(singletonList(PublicKey.from("receiverKey".getBytes())))
.withPrivacyMode(PrivacyMode.STANDARD_PRIVATE)
.withAffectedContractTransactions(emptyMap())
.withExecHash(new byte[0])
.build();
when(payloadEncoder.decode(any())).thenReturn(encodedPayload);
final byte[] raw = new PayloadEncoderImpl().encode(encodedPayload);
PushBatchRequest request = PushBatchRequest.from(List.of(raw), EncodedPayloadCodec.LEGACY);
StagingTransaction existing = new StagingTransaction();
when(stagingEntityDAO.retrieveByHash(any())).thenReturn(Optional.of(existing));
when(stagingEntityDAO.update(any(StagingTransaction.class)))
.thenReturn(new StagingTransaction());
manager.storeResendBatch(request);
verify(stagingEntityDAO).save(any(StagingTransaction.class));
verify(payloadEncoder).decode(any());
verify(payloadEncoder).encodedPayloadCodec();
payloadDigestMockedStatic.verify(PayloadDigest::create);
payloadDigestMockedStatic.verifyNoMoreInteractions();
}
}
}
<|start_filename|>security/src/main/java/module-info.java<|end_filename|>
module tessera.security {
requires jakarta.xml.bind;
requires cryptacular;
requires org.slf4j;
requires tessera.config;
requires tessera.shared;
requires org.bouncycastle.pkix;
requires org.bouncycastle.provider;
exports com.quorum.tessera.ssl.context;
uses com.quorum.tessera.ssl.context.ClientSSLContextFactory;
uses com.quorum.tessera.ssl.context.ServerSSLContextFactory;
provides com.quorum.tessera.ssl.context.ClientSSLContextFactory with
com.quorum.tessera.ssl.context.ClientSSLContextFactoryImpl;
provides com.quorum.tessera.ssl.context.ServerSSLContextFactory with
com.quorum.tessera.ssl.context.ServerSSLContextFactoryImpl;
}
| Chirag-786/tessera |
<|start_filename|>epub2twpub/epub-reader.js<|end_filename|>
/*
Reads an EPUB file and makes the content available via properties
*/
const fs = require("fs"),
path = require("path"),
{promisify} = require("util"),
readFileAsync = promisify(fs.readFile),
writeFileAsync = promisify(fs.writeFile),
{DOMParser,XMLSerializer} = require("xmldom"),
JSZip = require("jszip"),
{TextExtractor} = require("./text-extractor"),
{hash,resolvePath} = require("./utils");
const BINARY_MEDIA_TYPES = [
"image/gif",
"image/png",
"image/jpeg",
"audio/mpeg",
"audio/mp4"
];
const URL_PREFIX = "https://example.com/";
class EpubReader {
constructor (app) {
this.app = app;
this.metadata = Object.create(null); // Hashmap metadata items
this.manifest = Object.create(null); // Hashmap by ID of {properties:,id:,href:,media-type:}
this.spine = []; // Array of IDs of items comprising the publication
this.chunks = []; // Array of chunks {href:, nodes: [], anchorIds: [], stylesheetIds: []}
this.toc = []; // Tree of {id:, text:, href:, children: {}}
this.stylesheets = Object.create(null); // Hashmap by ID of {text:}
this.images = Object.create(null); // Hashmap by path of {type:, text:}
this.errors = []; // Array of errors
}
logError(message) {
this.errors.push(message);
}
/*
Load an EPUB from a file path
*/
async load(epubFilepath) {
// Read the ZIP file
const epubFileData = await readFileAsync(epubFilepath);
this.epubHash = hash(epubFileData);
this.zip = await JSZip.loadAsync(epubFileData);
// Load the container file
this.containerFileContents = await this.zip.file("META-INF/container.xml").async("string");
this.containerFileDoc = new DOMParser().parseFromString(this.containerFileContents,"text/xml");
// Load the package file
this.packageFilePath = findNodeAndGetAttribute(this.containerFileDoc,["container","rootfiles","rootfile"],"full-path");
this.packageFileContents = await this.zip.file(this.packageFilePath).async("string");
this.packageFileDoc = new DOMParser().parseFromString(this.packageFileContents,"text/xml");
// Read Dublin Core metadata and meta tags
const nodeMetadata = findNode(this.packageFileDoc,["package","metadata"]);
Array.from(nodeMetadata.childNodes).forEach(node => {
const n = (node.tagName || "").toLowerCase();
if(n.substr(0,3) === "dc:") {
this.metadata[n] = node.textContent.replace(/\s+/mg," ");
} else if(n === "meta") {
const p = node.getAttribute("property"),
ref = node.getAttribute("refines"),
id = node.getAttribute("id"),
scheme = node.getAttribute("scheme"),
name = node.getAttribute("name"),
content = node.getAttribute("content");
if(p) {
this.metadata[p] = node.textContent.replace(/\s+/mg," ");
} else if(name && content) {
this.metadata[name] = content;
}
}
});
// Read manifest
const nodeManifest = findNode(this.packageFileDoc,["package","manifest"]);
Array.from(nodeManifest.childNodes).forEach(node => {
const n = (node.tagName || "").toLowerCase();
if(n === "item") {
const p = node.getAttribute("properties") || "",
id = node.getAttribute("id"),
mediaType = node.getAttribute("media-type");
var href = resolvePath(node.getAttribute("href"),this.packageFilePath);
// Some books include an extraneous slash in internal URLs
if(href.startsWith("/")) {
href = href.slice(1);
}
this.manifest[id] = {properties: p.split(" "), id: id, href: href, "media-type": mediaType};
}
});
// Get the spine node
this.nodeSpine = findNode(this.packageFileDoc,["package","spine"]);
// Read the spine
Array.from(this.nodeSpine.childNodes).forEach(node => {
if((node.tagName || "").toLowerCase() === "itemref") {
this.spine.push(node.getAttribute("idref"));
}
});
// Load the TOC
await this.loadToc();
// Read the text chunks and stylesheets
await this.loadTextChunks();
// Load the images
await this.loadImages();
}
/*
Check for a metadata item
*/
hasMetadataItem(name) {
return name in this.metadata;
}
/*
Get a metadata item
*/
getMetadataItem(name,defaultValue) {
if(name in this.metadata) {
return this.metadata[name];
} else {
return defaultValue;
}
}
/*
Get a manifest item
*/
getManifestItem(id,defaultValue) {
return this.manifest[id] || defaultValue;
}
/*
Get the media type of a manifest item
*/
getMediaTypeOfItem(href) {
var result;
for(const id of Object.keys(this.manifest)) {
const manifestItem = this.manifest[id];
if(manifestItem.href === href) {
result = manifestItem["media-type"];
}
}
return result;
}
/*
Load the table of contents
Returns a tree of {id:, text:, href:, children: {}}
*/
async loadToc() {
this.tocItem = this.manifest[this.nodeSpine.getAttribute("toc")].href;
// Get the TOC file
this.tocContents = await this.zip.file(this.tocItem).async("string");
this.tocDoc = new DOMParser().parseFromString(this.tocContents,"text/xml");
// Visit each node collecting up the entries
const visitNodes = nodes => {
const results = [];
Array.from(nodes).forEach(node => {
if(node.nodeType === 1 && node.tagName === "navPoint") {
results.push(visitNode(node));
}
});
return results;
};
const visitNode = node => {
const href = findNodeAndGetAttribute(node,["content"],"src");
return {
id: node.getAttribute("id"),
text: findNode(node,["navLabel","text"]).textContent,
href: resolvePath(href,this.packageFilePath),
children: visitNodes(node.childNodes)
};
};
// Start at the root
const navMap = findNode(this.tocDoc,["ncx","navMap"]);
this.toc = visitNodes(navMap.childNodes);
}
/*
Load the text chunks and stylesheets
*/
async loadTextChunks() {
// Setup the text extractor
const textExtractor = new TextExtractor({
getFile: async fileHref => {
const file = this.zip.file(fileHref);
return {
type: this.getMediaTypeOfItem(fileHref),
contents: file ? await file.async("nodebuffer") : ""
}
},
logError: this.logError.bind(this)
});
await textExtractor.initialise();
// Extract each HTML file listed in the spine
for(const spineItem of this.spine) {
const manifestItem = this.manifest[spineItem];
if(manifestItem["media-type"] === "application/xhtml+xml" ) {
const results = await textExtractor.getPageText(manifestItem.href);
// Collect the IDs of the stylesheets used in this file
const stylesheetIds = [];
for(const stylesheetText of results.stylesheets) {
// If we just got the text then generate an href
const id = hash(stylesheetText,6);
// Save the id
stylesheetIds.push(id);
// Save the stylesheet text if we don't already have this ID
if(!(id in this.stylesheets)) {
this.stylesheets[id] = stylesheetText;
}
}
// Copy the chunks, adding the stylesheets
for(const chunk of results.chunks) {
chunk.stylesheetIds = stylesheetIds;
this.chunks.push(chunk);
}
}
}
}
/*
Load all the images
*/
async loadImages() {
// Get the image manifest items
for(const id of Object.keys(this.manifest)) {
const manifestItem = this.manifest[id];
if(manifestItem["media-type"].split("/")[0] === "image" ) {
const file = this.zip.file(manifestItem.href),
encoding = BINARY_MEDIA_TYPES.includes(manifestItem["media-type"]) ? "base64" : "text";
if(file) {
this.images[manifestItem.href] = {
type: manifestItem["media-type"],
text: await file.async(encoding)
};
} else {
this.logError(`Missing image: ${manifestItem.href}`);
}
}
}
}
}
function findNodeAndGetAttribute(rootNode,selectors,attributeName) {
const node = findNode(rootNode,selectors);
if(node) {
return node.getAttribute(attributeName);
}
return null;
}
/*
Find an XML node identified by a list of child tag names
rootNode: reference to root node
selectors: array of child tag names
*/
function findNode(rootNode,selectors) {
let node = rootNode;
for(selector of selectors) {
node = Array.from(node.childNodes).find(node => !!node.tagName && node.tagName === selector);
if(!node) {
return null;
}
}
return node;
}
exports.EpubReader = EpubReader;
<|start_filename|>epub2twpub/transform-stylesheets.js<|end_filename|>
/*
Transform CSS
*/
const css = require("css");
/*
Not accepted CSS properties are omitted and an optional scoping class is added to each selector
*/
function cleanStylesheet(text,scopingClass) {
const ACCEPTED_PROPERTIES = [
// "background-color",
"clear",
// "color",
"display",
"float",
// "font-size",
"font-style",
"font-weight",
// "height",
// "line-height",
"list-style-type",
"text-align",
"text-decoration",
// "text-indent",
"text-transform"
// "white-space",
// "width"
];
const obj = css.parse(text,{
silent: true
});
const visitNode = node => {
if(Array.isArray(node)) {
node.forEach(node => visitNode(node));
} else if(typeof node === "object") {
// Adjust selectors to add a scoping class
if(node.selectors && scopingClass) {
node.selectors.forEach((selector,index) => {
node.selectors[index] = "." + scopingClass + " " + selector;
});
}
// Remove any properties not on the accept list
if(node.declarations) {
for(let d=node.declarations.length-1; d>=0; d--) {
const declaration = node.declarations[d];
if(ACCEPTED_PROPERTIES.indexOf(declaration.property) === -1) {
node.declarations.splice(d,1);
}
}
}
Object.keys(node).forEach(key => {
visitNode(node[key]);
});
}
};
visitNode(obj);
return css.stringify(obj,{
compress: true
});
}
exports.cleanStylesheet = cleanStylesheet;
function cleanStyleAttribute(text) {
const PREFIX = "html {",
SUFFIX = "}";
text = cleanStylesheet(PREFIX + text + SUFFIX);
return text.slice(5,-1);
}
exports.cleanStyleAttribute = cleanStyleAttribute;
<|start_filename|>twpub-wiki/plugins/twpub-tools/core/scroller.js<|end_filename|>
/*\
title: $:/core/modules/utils/dom/scroller.js
type: application/javascript
module-type: utils
Module that creates a $tw.utils.Scroller object prototype that manages scrolling in the browser
\*/
(function(){
/*jslint node: true, browser: true */
/*global $tw: false */
"use strict";
/*
Event handler for when the `tm-scroll` event hits the document body
*/
var PageScroller = function() {
this.idRequestFrame = null;
this.requestAnimationFrame = window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
function(callback) {
return window.setTimeout(callback, 1000/60);
};
this.cancelAnimationFrame = window.cancelAnimationFrame ||
window.webkitCancelAnimationFrame ||
window.webkitCancelRequestAnimationFrame ||
window.mozCancelAnimationFrame ||
window.mozCancelRequestAnimationFrame ||
function(id) {
window.clearTimeout(id);
};
};
PageScroller.prototype.isScrolling = function() {
}
PageScroller.prototype.cancelScroll = function(srcWindow) {
};
/*
Handle an event
*/
PageScroller.prototype.handleEvent = function(event) {
if(event.type === "tm-scroll") {
if(event.paramObject && event.paramObject.selector) {
this.scrollSelectorIntoView(null,event.paramObject.selector);
} else {
this.scrollIntoView(event.target);
}
return false; // Event was handled
}
return true;
};
/*
Handle a scroll event hitting the page document
*/
PageScroller.prototype.scrollIntoView = function(element,callback) {
element.scrollIntoView({behavior: "smooth"});
$tw.utils.addClass(element,"tc-navigating");
setTimeout(function() {
$tw.utils.removeClass(element,"tc-navigating");
},$tw.utils.getAnimationDuration() * 1);
};
PageScroller.prototype.scrollSelectorIntoView = function(baseElement,selector,callback) {
baseElement = baseElement || document.body;
var element = baseElement.querySelector(selector);
if(element) {
this.scrollIntoView(element,callback);
}
};
exports.PageScroller = PageScroller;
})();
<|start_filename|>twpub-wiki/plugins/twpub-tools/selection-tracker.js<|end_filename|>
/*\
title: $:/plugins/tiddlywiki/twpub-tools/selection-tracker.js
type: application/javascript
module-type: startup
Background daemon to track the selection
\*/
(function(){
/*jslint node: true, browser: true */
/*global $tw: false */
"use strict";
// Export name and synchronous status
exports.name = "selection-tracker";
exports.platforms = ["browser"];
exports.after = ["render"];
exports.synchronous = true;
exports.startup = function() {
$tw.selectionTracker = new SelectionTracker($tw.wiki,{
allowBlankSelectionPopup: true
});
};
function SelectionTracker(wiki,options) {
options = options || {};
var self = this;
this.wiki = wiki;
var timerId = null;
document.addEventListener("selectionchange",function(event) {
if(timerId) {
clearTimeout(timerId);
}
timerId = setTimeout(function() {
timerId = null;
self.handleSelectionChange();
},500);
});
}
SelectionTracker.prototype.handleSelectionChange = function() {
var selection = document.getSelection();
if(selection && selection.type === "Range") {
// Helper to get the tiddler title corresponding to a chunk container
var getTitleOfContainer = function(domNode) {
return domNode.id;
}
// Get information about the selection anchor and focus
var getSelectionInfo = function(targetDomNode,targetOffset) {
// Find the chunk container node
var domNode = targetDomNode;
if(domNode.nodeType === Node.TEXT_NODE) {
domNode = domNode.parentNode;
}
var container = domNode.closest(".twpub-chunk-frame");
if(!container) {
return null;
}
// Find the index of the container within the child nodes of its parent
var childNodeIndex = Array.prototype.indexOf.call(container.parentNode.childNodes,container);
// Walk through the chunk collecting the text before and after the specified domNode and offset
var beforeText = null, afterText = [];
var splitTextResult = function() {
beforeText = afterText;
afterText = [];
},
processNode = function(domNode) {
// Check for a text node
if(domNode.nodeType === Node.TEXT_NODE) {
// If this is the target node then perform the split
if(domNode === targetDomNode) {
afterText.push(domNode.textContent.substring(0,targetOffset));
splitTextResult();
afterText.push(domNode.textContent.substring(targetOffset));
} else {
afterText.push(domNode.textContent);
}
} else {
// Process the child nodes
$tw.utils.each(domNode.childNodes,function(childNode,childNodeIndex) {
// Check whether we need to split on this child node
if(domNode === targetDomNode && childNodeIndex === targetOffset) {
splitTextResult();
}
processNode(childNode);
});
}
};
processNode(container);
if(beforeText === null) {
splitTextResult();
}
// Return results
return {
container: container,
childNodeIndex: childNodeIndex,
beforeText: beforeText.join(""),
afterText: afterText.join("")
}
}
var anchor = getSelectionInfo(selection.anchorNode,selection.anchorOffset),
focus = getSelectionInfo(selection.focusNode,selection.focusOffset);
// Check that the containers share a parent
if(anchor && focus && anchor.container.parentNode === focus.container.parentNode) {
// Make sure that the anchor is before the focus
if((anchor.childNodeIndex > focus.childNodeIndex) || (anchor.container === focus.container && anchor.beforeText.length > focus.beforeText.length)) {
var temp = anchor;
anchor = focus;
focus = temp;
}
var chunks = [];
// Check for the selection being all in one chunk
if(anchor.container === focus.container) {
chunks.push({
title: getTitleOfContainer(anchor.container),
prefix: anchor.beforeText,
text: anchor.afterText.substring(0,anchor.afterText.length - focus.afterText.length),
suffix: focus.afterText
});
} else {
// We span two or more chunks
chunks.push({
title: getTitleOfContainer(anchor.container),
prefix: anchor.beforeText,
text: anchor.afterText
});
// Get the titles and text of the intervening tiddlers
var domNode;
if(anchor.container !== focus.container) {
domNode = anchor.container.nextElementSibling;
while(domNode && domNode !== focus.container) {
chunks.push({
title: getTitleOfContainer(domNode),
text: domNode.textContent
});
domNode = domNode.nextElementSibling;
}
}
chunks.push({
title: getTitleOfContainer(focus.container),
text: focus.beforeText,
suffix: focus.afterText
});
}
// Get the title of the tiddler containing the actions to be executed
var actionsTiddler = anchor.container.parentNode.getAttribute("data-selection-actions-title");
// Action the selection
this.performSelectionActions({
chunks: chunks,
actionsTiddler: actionsTiddler
});
}
}
};
SelectionTracker.prototype.performSelectionActions = function(options) {
// Create the annotated tiddlers and the annotation tiddlers
var annotatedTiddlerTitles = [];
for(var index=0; index<options.chunks.length; index++) {
var chunk = options.chunks[index];
var existingTiddler = $tw.wiki.getTiddler(chunk.title);
// Override the chunks to add the dynannotate code if they are still shadow tiddlers
if(!$tw.wiki.tiddlerExists(chunk.title)) {
$tw.wiki.addTiddler(new $tw.Tiddler(existingTiddler,$tw.wiki.getModificationFields(),{
text: $tw.wiki.getTiddlerText("$:/plugins/immateriel/twpub-tools/templates/new-chunk").replace("****INSERTION**POINT****",existingTiddler.fields.text),
tags: ["$:/tags/TwpubAnnotated"]
}));
}
annotatedTiddlerTitles.push(chunk.title);
$tw.wiki.addTiddler(new $tw.Tiddler($tw.wiki.getModificationFields(),{
title: $tw.wiki.generateNewTitle("$:/twpub/annotation"),
"annotate-tiddler": chunk.title,
"annotate-text": chunk.text,
"annotate-prefix": chunk.prefix,
"annotate-suffix": chunk.suffix,
tags: ["$:/tags/TwpubAnnotation"]
}));
}
// Create the extract tiddler
var extractTiddlerTitle = $tw.wiki.generateNewTitle("Extract"),
draftTiddlerTitle = $tw.wiki.generateNewTitle("Draft of '" + extractTiddlerTitle + "'");
$tw.wiki.addTiddler(new $tw.Tiddler({
title: draftTiddlerTitle,
"draft.of": extractTiddlerTitle,
"draft.title": extractTiddlerTitle,
text: "Please type your notes here",
tags: ["$:/tags/TwpubExtract"],
list: annotatedTiddlerTitles
}));
// Invoke the actions, passing the extract tiddler title as a variable
if(options.actionsTiddler) {
var actions = $tw.wiki.getTiddlerText(options.actionsTiddler)
if(actions) {
$tw.rootWidget.invokeActionString(actions,undefined,undefined,{
modifiedTiddler: draftTiddlerTitle
});
}
}
};
})();
<|start_filename|>package.json<|end_filename|>
{
"name": "twpub-tools",
"version": "0.1.0",
"description": "Tools for working with TWPUBs",
"main": "index.js",
"author": "",
"license": "BSD-3-Clause",
"dependencies": {
"css": "^3.0.0",
"jszip": "^3.5.0",
"puppeteer": "^8.0.0",
"tiddlywiki": "github:Jermolene/TiddlyWiki5",
"xmldom": "^0.5.0"
},
"scripts": {
"test": "node epub2twpub/test.js",
"start": "npm run build && npx http-server ./output",
"start-with-global-tiddlywiki": "tiddlywiki ./twpub-wiki --output ./output --rendertiddler $:/core/save/all index.html text/plain && npx http-server ./output",
"build": "./bin/slice-epubs-and-build-wiki.sh ./epubs ./tmp/twpubs && ./bin/build-twpub-library.sh ./tmp/twpubs ./output/library && ./bin/build-wiki.sh ./output",
"clean": "./bin/clean.sh"
}
}
<|start_filename|>twpub-wiki/plugins/twpub-tools/twpubtextparser.js<|end_filename|>
/*\
title: $:/core/modules/parsers/twpubtextparser.js
type: application/javascript
module-type: parser
Inherits from the base wikitext parser but is forced into inline mode
\*/
(function(){
/*jslint node: true, browser: true */
/*global $tw: false */
"use strict";
var WikiParser = require("$:/core/modules/parsers/wikiparser/wikiparser.js")["text/vnd.tiddlywiki"],
HtmlParser = $tw.modules.createClassFromModule(require("$:/core/modules/parsers/wikiparser/rules/html.js"),$tw.WikiRuleBase),
EntityParser = $tw.modules.createClassFromModule(require("$:/core/modules/parsers/wikiparser/rules/entity.js"),$tw.WikiRuleBase);
var TwpubTextParser = function(type,text,options) {
var parser = new WikiParser(type,text,$tw.utils.extend({},options,{
parseAsInline: true,
rules: {
pragma: [],
block: [],
inline: [HtmlParser,EntityParser]
}
}));
this.tree = parser.tree;
this.prototype = parser.prototype;
};
exports["text/vnd.twpub"] = TwpubTextParser;
})();
<|start_filename|>epub2twpub/index.js<|end_filename|>
/*
Convert an EPUB file into a TWPUB plugin
*/
const fs = require("fs"),
path = require("path"),
{promisify} = require("util"),
readFileAsync = promisify(fs.readFile),
writeFileAsync = promisify(fs.writeFile),
{ArgParser} = require("./utils"),
{EpubReader} = require("./epub-reader"),
{TwpubPlugin} = require("./twpub-plugin");
class App {
constructor(args) {
// Get our app version number
this.version = require("../package.json").version;
// Parse arguments
this.args = new ArgParser(args,{
defaultOption: "epub",
mandatoryArguments: {
epub: "single",
output: "single"
}
});
}
async main() {
// Setup the epub
this.epubReader = new EpubReader(this);
await this.epubReader.load(this.args.byName.epub[0]);
// Create the twpub plugin
this.twpubPlugin = new TwpubPlugin(this,{epubReader: this.epubReader});
// Convert the epub
this.twpubPlugin.convertEpub();
// Save the twpub plugin
await writeFileAsync(this.args.byName.output[0],this.twpubPlugin.getPluginText(),"utf8");
}
}
const app = new App(process.argv.slice(2));
app.main().then(() => {
process.exit(0);
}).catch(err => {
console.error(err);
process.exit(1);
});
<|start_filename|>epub2twpub/puppeteer/get-page-text.js<|end_filename|>
/*
This script is executed within the context of a web page loaded into Puppeteer to extract the text chunks and stylesheets from a page.
Returns a structure: {chunks: [], stylsheets: [text]}
Each chunk entry is: {nodes: [], anchorIds: [], href:} where nodes is a tree of objects representing DOM nodes and strings representing
text nodes, and anchorIds is an array of anchor IDs associated with each chunk
Each stylsheet entry is the text of the stylesheet
*/
exports.getPageText = function(win,doc) {
win = win || window;
doc = doc || document;
const URL_PREFIX = "https://example.com/";
class ChunkList {
constructor() {
this.outputChunks = [];
this.isWithinChunk = false;
this.ancestorStack = [];
this.parentStack = [];
}
get stack() {
return this.ancestorStack;
}
findTopmostAncestor(callback) {
for(let t=this.ancestorStack.length-1; t>=0; t--) {
if(callback(this.ancestorStack[t])) {
return this.ancestorStack[t];
}
}
return null;
}
startChunk() {
if(this.isWithinChunk) {
this.endChunk();
}
const chunk = {
nodes: [],
anchorIds: [],
href: doc.location.href.slice(URL_PREFIX.length)
};
this.outputChunks.push(chunk);
this.isWithinChunk = true;
this.parentStack = [chunk.nodes];
this.ancestorStack.filter(nodeInfo => isStartOfNewChunk(nodeInfo.tag) || isInterestingPhrasingContent(nodeInfo.tag)).forEach(nodeInfo => {
const newNode = Object.assign({},nodeInfo);
this.parentStack[this.parentStack.length - 1].push(newNode);
delete newNode.private;
if(Object.keys(newNode.attributes).length === 0) {
delete newNode.attributes;
}
newNode.childNodes = [];
this.parentStack.push(newNode.childNodes);
});
}
endChunk() {
this.isWithinChunk = false;
}
addText(text) {
if(!this.isWithinChunk) {
this.startChunk();
}
const nodes = this.parentStack[this.parentStack.length - 1];
nodes.push(text);
}
openTag(nodeInfo) {
if(!this.isWithinChunk) {
this.startChunk();
}
const nodes = this.parentStack[this.parentStack.length - 1],
newNode = Object.assign({},nodeInfo);
nodes.push(newNode);
delete newNode.private;
if(Object.keys(newNode.attributes).length === 0) {
delete newNode.attributes;
}
newNode.childNodes = [];
this.parentStack.push(newNode.childNodes);
}
closeTag() {
this.parentStack.pop();
if(this.parentStack.length === 0) {
this.parentStack = [this.outputChunks[this.outputChunks.length - 1].nodes];
}
}
addAnchor(id) {
if(!this.isWithinChunk) {
this.startChunk();
}
const chunk = this.outputChunks[this.outputChunks.length - 1];
chunk.anchorIds = chunk.anchorIds || [];
chunk.anchorIds.push(id);
}
}
// Main
// Extract the stylesheet text
const stylesheets = [];
for(const styleNode of doc.styleSheets) {
stylesheets.push(Array.from(styleNode.cssRules).map(rule => rule.cssText).join("\n"));
}
// Visit each node of the document to extract the text
const chunks = new ChunkList();
visitNode(doc.body);
// Filter out blank chunks
const nonBlankChunks = chunks.outputChunks.filter(chunk => {
return !(chunk.anchorIds.length === 0 && (chunk.nodes.length === 1) && (typeof (chunk.nodes[0]) === "string") && (!(/\S/.test(chunk.nodes[0]))));
})
// Get the expected test results if present
const domExpectedResults = document.getElementsByTagName("script")[0];
var expectedResults;
if(domExpectedResults && domExpectedResults.id === "expectedResults") {
try {
expectedResults = JSON.parse(domExpectedResults.textContent);
} catch(e) {
}
}
// Return the stylesheets and the chunks
return {
stylesheets: stylesheets,
chunks: nonBlankChunks,
expectedResults: expectedResults
};
// Node iterator
function visitNode(e,options) {
options = options || {};
var disableBlockProcessing = !!options.disableBlockProcessing;
switch(e.nodeType) {
case 1: // Node.ELEMENT_NODE
const nodeInfo = {
tag: e.tagName.toLowerCase(),
attributes: {
},
private: {
}
},
isonc = isStartOfNewChunk(nodeInfo.tag),
isipc = isInterestingPhrasingContent(nodeInfo.tag);
if(nodeInfo.tag === "li") {
const parentListElement = chunks.findTopmostAncestor(function(nodeInfo) {
return nodeInfo.tag === "ol" || nodeInfo.tag === "ul";
});
var count;
if(e.hasAttribute("value")) {
count = parseInt(e.getAttribute("value"),10) || 1;
} else {
count = (parentListElement.private.count || 0) + 1;
}
nodeInfo.attributes.value = count + "";
parentListElement.private.count = count;
} else if(nodeInfo.tag === "img") {
if(e.hasAttribute("src")) {
nodeInfo.attributes.src = e.src.slice(URL_PREFIX.length);
}
if(e.hasAttribute("width")) {
nodeInfo.attributes.width = e.getAttribute("width");
}
if(e.hasAttribute("height")) {
nodeInfo.attributes.height = e.getAttribute("height");
}
if(e.hasAttribute("title")) {
nodeInfo.attributes.tooltip = e.getAttribute("title");
}
if(e.hasAttribute("alt")) {
nodeInfo.attributes.alt = e.getAttribute("alt");
}
} else if(nodeInfo.tag === "a") {
if(e.href) {
nodeInfo.attributes.href = e.href;
}
}
if(e.hasAttribute("colspan")) {
nodeInfo.attributes.colspan = e.getAttribute("colspan");
}
if(e.hasAttribute("rowspan")) {
nodeInfo.attributes.rowspan = e.getAttribute("rowspan");
}
if(e.hasAttribute("dir")) {
nodeInfo.attributes.dir = e.getAttribute("dir");
}
if(e.className) {
nodeInfo.attributes["class"] = e.className;
}
if(e.style && e.style.cssText) {
nodeInfo.attributes.style = e.style.cssText;
}
if(isonc && !options.disableBlockProcessing) {
// Start new chunk. We do so by ending any current chunk so as to defer the creation of the chunk until we know it is needed
chunks.endChunk();
} else if(isipc || (isonc && options.disableBlockProcessing)) {
chunks.openTag(nodeInfo);
}
if(nodeInfo.tag === "table") {
disableBlockProcessing = true;
}
chunks.stack.push(nodeInfo);
if(e.hasAttribute("id") || e.hasAttribute("name")) {
chunks.addAnchor(e.getAttribute("id") || e.getAttribute("name"));
}
if(e.childNodes) {
for(let i=0; i<e.childNodes.length; i++) {
visitNode(e.childNodes[i],{
disableBlockProcessing: disableBlockProcessing
});
}
}
chunks.stack.pop();
if(isonc && !options.disableBlockProcessing) {
chunks.endChunk();
} else if(isipc || (isonc && options.disableBlockProcessing)) {
chunks.closeTag();
}
break;
case 3: // Node.TEXT_NODE
chunks.addText(e.nodeValue);
break;
}
}
// Utilities
function isStartOfNewChunk(tagName) {
return [
"div","p","h1","h2","h3","h4","h5","h6","li","center","blockquote","table","address","map","ol","ul"
].indexOf(tagName) !== -1;
}
function isInterestingPhrasingContent(tagName) {
return [
"a",
"tt","i","b","u","s","strike","big","small","font","em","strong","dfn","code","samp","kbd",
"var","cite","abbr","acronym","sub","sup","q","span","bdo","a","img","basefont","br","area",
"tbody","thead","tr","th","td",
"svg","image"
].indexOf(tagName) !== -1;
}
};
<|start_filename|>bin/make-twpub-library-plugin.js<|end_filename|>
#!/usr/bin/env node
/*
Make a plugin containing all of the twpub plugins
node ./bin/make-twpub-library-plugin.js <path-to-directory-of-twpub-json-files> <path-to-output-file>
*/
const fs = require("fs"),
path = require("path");
// Check arguments
const twpubPath = process.argv[2],
outputFilepath = process.argv[3];
if(!twpubPath) {
throw "Missing twpub directory path path";
}
if(!outputFilepath) {
throw "Missing output filepath";
}
// Get the JSON of the plugins
const twpubPlugins = fs.readdirSync(twpubPath)
.map(filename => path.resolve(twpubPath,filename))
.filter(filepath => !fs.statSync(filepath).isDirectory() && filepath.endsWith(".json"))
.map(filepath => JSON.parse(fs.readFileSync(filepath,"utf8")));
// Assemble the output tiddler
const outputData = {
title: "$:/TWPUBLibrary",
type: "application/json",
"plugin-type": "library",
"text": JSON.stringify({
tiddlers: twpubPlugins.reduce((accumulator,twpubPlugin) => {
accumulator[twpubPlugin.title] = twpubPlugin;
return accumulator;
},{})
})
};
// Save the output tiddler
fs.writeFileSync(outputFilepath,JSON.stringify(outputData),"utf8");
<|start_filename|>epub2twpub/twpub-plugin.js<|end_filename|>
/*
Class representing a twpub plugin
*/
const {cleanStylesheet,cleanStyleAttribute} = require("./transform-stylesheets"),
{flattenTree} = require("./flatten-tree"),
{hash} = require("./utils");
const URL_PREFIX = "https://example.com/";
class TwpubPlugin {
constructor (app,options) {
this.app = app;
this.epubReader = options.epubReader;
this.fields = {}; // Fields of the plugin tiddler itself
this.tiddlers = {}; // Payload tiddlers
this.errors = []; // Array of conversion errors
}
logError(message) {
this.errors.push(message);
}
convertEpub() {
// Get the hash of the epub
this.hash = this.epubReader.epubHash.slice(0,16);
// Construct the title of the plugin
this.titlePrefix = "$:/plugins/twpub/" + this.hash;
// For text chunks, make a map of href (including anchor) to title
this.createAnchorToTitleMapping();
// Convert the text, TOC, stylesheets and images into tiddlers
this.convertText();
this.convertToc();
this.convertStylesheets();
this.convertImages();
// Setup the fields of the plugin tiddler
this.fields.list = "readme errors cover text";
this.fields.version = "v0.0.1";
this.fields["plugin-type"] = "plugin";
this.fields.type = "application/json";
this.fields["epub-title"] = this.epubReader.getMetadataItem("dc:title","(Untitled)");
this.fields.name = "TWPUB";
this.fields.title = this.titlePrefix;
this.fields.description = this.fields["epub-title"];
this.fields["converter-version"] = this.app.version.toString();
this.fields["epub-creator"] = this.epubReader.getMetadataItem("dc:creator","(Unknown)");
this.fields["conversion-errors"] = this.errors.length.toString();
this.fields["count-chunks"] = this.epubReader.chunks.length.toString();
this.fields["count-images"] = Object.keys(this.epubReader.images).length.toString();
// Cover tab
if(this.epubReader.hasMetadataItem("cover")) {
const href = this.epubReader.getManifestItem(this.epubReader.getMetadataItem("cover")).href;
if(href) {
this.fields["cover-image"] = this.titlePrefix + "/images/" + this.epubReader.getManifestItem(this.epubReader.getMetadataItem("cover")).href;
this.addTiddler({
title: this.titlePrefix + "/cover",
type: "text/vnd.tiddlywiki",
text: "<$transclude tiddler=\"" + this.titlePrefix + "\" subtiddler=\"" + this.fields["cover-image"] + "\"/>"
});
}
}
// Readme tab
this.addTiddler({
title: this.titlePrefix + "/readme",
type: "text/vnd.tiddlywiki",
text: ["epub-title",
"epub-creator",
"converter-version",
"conversion-errors",
"count-chunks",
"count-images"
].filter(field => field in this.fields).map(field => `|${field} |''${this.fields[field]}'' |`).join("\n")
});
// Errors tab
this.addTiddler({
title: this.titlePrefix + "/errors",
type: "text/vnd.tiddlywiki",
text: this.epubReader.errors.concat(this.errors).map(message => "# " + message + "\n").join("\n") || "None"
});
// Full text tab
this.addTiddler({
title: this.titlePrefix + "/text",
type: "text/vnd.tiddlywiki",
text: `\\define link-actions()
<$action-sendmessage $message="tm-scroll" selector={{{ [<navigateTo>escapecss[]addprefix[#]] }}}/>
\\end
<$linkcatcher actions=<<link-actions>>>
<div class="tc-table-of-contents">
<<toc "${this.titlePrefix}/toc">>
</div>
<$list filter="[all[tiddlers+shadows]prefix[${this.titlePrefix}/text/]sort[]]">
<a id=<<currentTiddler>>>
<$transclude mode="inline"/>
</a>
</$list>
</$linkcatcher>`
});
}
createAnchorToTitleMapping() {
this.mapAnchorToTitle = Object.create(null);
this.epubReader.chunks.forEach((chunk,index) => {
const title = this.makeTextTiddlerTitle(index);
// If we've not seen the file before, add a mapping for the file itself, without an anchor ID
if(!this.mapAnchorToTitle[chunk.href]) {
this.mapAnchorToTitle[chunk.href] = title;
}
// Add mappings for each anchor ID
chunk.anchorIds.forEach(anchorId => {
if(!this.mapAnchorToTitle[chunk.href + "#" + anchorId]) {
this.mapAnchorToTitle[chunk.href + "#" + anchorId] = title;
}
});
});
}
makeTextTiddlerTitle(index) {
return this.titlePrefix + "/text/" + ("" + index).padStart(9,"0");
}
convertText() {
this.epubReader.chunks.forEach((chunk,index) => {
// Construct the title for this chunk
const title = this.makeTextTiddlerTitle(index);
// Collect the scoping classes to be wrapped around this chunk
const scopingClasses = chunk.stylesheetIds.map(id => this.makeStylesheetScopeClass(id));
// Process some elements and attributes to wikitext
this.processTextChunk(chunk);
// Flatten the nodes to text
const flatText = flattenTree(chunk.nodes);
// Add the tiddler
this.addTiddler({
role: "text",
title: title,
type: "text/vnd.twpub",
text: "<div class=\"" + scopingClasses.join(" ") + "\">" + flatText + "</div>"
});
});
}
processTextChunk(chunk) {
// Visit each node to apply our custom processing
const visitNode = node => {
if(typeof node !== "string") {
// Attribute-specific processing
if(node.attributes && node.attributes.style) {
// Clean style attributes
node.attributes.style = cleanStyleAttribute(node.attributes.style);
}
// Element-specific processing
switch(node.tag) {
// Replace <img> tags with <$image> widgets
case "img":
node.tag = "$image";
node.attributes.source = this.titlePrefix + "/images/" + node.attributes.src;
delete node.attributes.src;
break;
// Replace <a> tags with <$link> widgets
case "a":
if(node.attributes && node.attributes.href) {
if(node.attributes.href.startsWith(URL_PREFIX)) {
// It's an internal link
var target = node.attributes.href.slice(URL_PREFIX.length);
if(target.charAt(0) === "/") {
target = target.slice(1);
}
const anchorId = this.mapAnchorToTitle[target];
if(anchorId) {
node.tag = "$link";
node.attributes.to = anchorId;
delete node.attributes.href;
// Provide dummy content if there are no child nodes to avoid the <$link>
// widget using the target title as the default link text
if(node.childNodes.length === 0) {
node.childNodes = [{
tag: "$text",
attributes: {
text: ""
},
}];
}
return
} else {
this.logError(`Missing TOC link to \`${target}\` from \`${chunk.href}\``);
}
} else {
// It's an external link
node.attributes.rel = "noopener noreferrer";
node.attributes.target = "_blank";
}
}
break;
}
}
visitNodes(node.childNodes);
},
visitNodes = childNodes => {
childNodes = childNodes || [];
for(const childNode of childNodes) {
visitNode(childNode);
}
};
visitNodes(chunk.nodes);
}
convertToc() {
const visitNodes = (nodes,tag) => {
const titles = [];
nodes.forEach(node => {
titles.push(visitNode(node,tag));
});
return titles;
};
const visitNode = (node,tag) => {
const title = this.titlePrefix + "/toc/" + node.id;
const childTitles = visitNodes(node.children,title);
var target = node.href;
if(target.charAt(0) === "/") {
target = target.slice(1);
}
const targetTitle = this.mapAnchorToTitle[target];
if(!targetTitle) {
console.log("Missing link to",target)
}
this.addTiddler({
title: title,
caption: node.text,
tags: tag,
target: targetTitle,
list: stringifyList(childTitles),
role: "toc"
});
return title;
};
visitNodes(this.epubReader.toc,this.titlePrefix + "/toc");
}
makeStylesheetScopeClass(id) {
return "twpub-" + this.hash + "-" + id;
}
convertStylesheets() {
const scopingClasses = Object.keys(this.epubReader.stylesheets).map(id => this.makeStylesheetScopeClass(id)),
makeSelectors = target => scopingClasses.map(className => "." + className + " " + target).join(","),
cleanText = [];
cleanText.push(`
${makeSelectors("blockquote")} {
border-color: initial;
border-style: initial;
margin: initial;
padding: initial;
quotes: initial;
}
`);
for(const id in this.epubReader.stylesheets) {
cleanText.push(cleanStylesheet(this.epubReader.stylesheets[id],this.makeStylesheetScopeClass(id)));
};
this.addTiddler({
role: "stylesheet",
title: this.titlePrefix + "/stylesheets",
type: "text/css",
tags: "$:/tags/Stylesheet",
text: cleanText.join("\n")
});
}
convertImages() {
for(const imagePath in this.epubReader.images) {
const imageInfo = this.epubReader.images[imagePath];
this.addTiddler({
role: "image",
title: this.titlePrefix + "/images/" + imagePath,
type: imageInfo.type,
text: imageInfo.text
});
};
}
addTiddler(fields) {
this.tiddlers[fields.title] = fields;
}
/*
Get the JSON of the entire plugin
*/
getPluginText() {
this.fields.text = JSON.stringify({tiddlers: this.tiddlers},null,4)
return JSON.stringify(this.fields,null,4);
}
}
function stringifyList(value) {
if(Array.isArray(value)) {
const result = new Array(value.length);
for(const t of value) {
const entry = value[t] || "";
if(entry.indexOf(" ") !== -1) {
result[t] = "[[" + entry + "]]";
} else {
result[t] = entry;
}
}
return result.join(" ");
} else {
return value || "";
}
};
exports.TwpubPlugin = TwpubPlugin;
<|start_filename|>epub2twpub/text-extractor.js<|end_filename|>
/*
Class representing the Puppeteer-based wrapper for get-page-text.js
*/
const puppeteer = require("puppeteer"),
{getPageText} = require("./puppeteer/get-page-text");
const URL_PREFIX = "https://example.com/";
class TextExtractor {
/*
Options:
getFile: function(href) returns {type:, contents:}
logError: function(msg)
*/
constructor (options) {
this.getFile = options.getFile;
this.logError = options.logError;
}
async initialise() {
this.browser = await puppeteer.launch();
this.page = await this.browser.newPage();
await this.page.setJavaScriptEnabled(false);
await this.page.setRequestInterception(true);
this.page.on("request", async request => {
if(request.method() === "GET" && request.url().startsWith(URL_PREFIX)) {
const fileHref = request.url().slice(URL_PREFIX.length);
const {type,contents} = await this.getFile(fileHref);
if(!type) {
this.logError(`Missing file \`${fileHref}\``);
return request.respond({status: 404, contentType: "text/plain", body: "Not found!"});
} else {
request.respond({status: 200, contentType: type, body: contents});
}
} else {
request.abort();
}
});
}
async getPageText(href) {
// console.log("processing page",href)
const pageURL = URL_PREFIX + href;
await this.page.goto(pageURL,{waitUntil: "load"});
return await this.page.evaluate(getPageText);
}
async close() {
await this.page.close();
await this.browser.close();
}
}
exports.TextExtractor = TextExtractor;
<|start_filename|>epub2twpub/test.js<|end_filename|>
/*
Run tests of getPageText() from HTML files in the ./fixtures/html/ directory
*/
const inputDir = "./fixtures/html/";
const fs = require("fs"),
path = require("path"),
{promisify} = require("util"),
readFileAsync = promisify(fs.readFile),
writeFileAsync = promisify(fs.writeFile),
{TextExtractor} = require("./text-extractor"),
flattenTree = require("./flatten-tree").flattenTree;
async function main() {
// Collect up paths of .HTML files in the input directory
const filepaths = [];
function scanDirectory(pathname) {
const files = fs.readdirSync(pathname);
files.forEach(function(filename) {
const p = path.resolve(pathname,filename),
s = fs.lstatSync(p),
x = path.extname(filename);
if(s.isDirectory()) {
scanDirectory(p);
} else if(x === ".html") {
filepaths.push(p);
}
});
}
scanDirectory(inputDir);
// Accumulate test failures
const failures = [];
// Test each page in turn
for(const filepath of filepaths) {
const results = await testPage(filepath);
// Compare the results
if(!compareResults(results)) {
failures.push(results);
}
}
// Check for failures
return failures;
}
async function testPage(filepath) {
// Setup the text extractor
const textExtractor = new TextExtractor({
getFile: async fileHref => {
if(fileHref === "index.html") {
return {
type: "text/html",
contents: await readFileAsync(filepath,"utf8")
}
} else {
return {
type: null,
contents: null
}
}
},
logError: msg => {
console.log("Text extractor error: " + msg)
}
});
await textExtractor.initialise();
// Get the text of the page
const results = await textExtractor.getPageText("index.html");
// Flatten the nodes of the results
for(const chunk of results.chunks) {
chunk.text = flattenTree(chunk.nodes);
delete chunk.nodes;
}
results.filepath = filepath;
return results;
}
function compareResults(results) {
if(results.chunks.length === results.expectedResults.length) {
for(let index = 0; index < results.chunks.length; index++) {
let r = results.chunks[index],
e = results.expectedResults[index];
if(r.text !== e.text || (r.anchorIds || []).join(",") !== (e.anchorIds || []).join(",")) {
return false;
}
}
return true;
}
return false;
};
main().then(results => {
// Check for failures
if(results.length === 0) {
process.exit(0);
} else {
console.error("Tests failed");
console.error(JSON.stringify(results,null,4));
process.exit(1);
}
}).catch(err => {
console.error(err);
process.exit(1);
});
<|start_filename|>epub2twpub/utils.js<|end_filename|>
/*
Simple command line argument parser
*/
exports.ArgParser = class ArgParser {
constructor(args,options) {
// Collect the arguments into a hashmap
this.byName = new Object(null);
let target = options.defaultOption || "";
args.forEach(arg => {
if(arg.startsWith("--")) {
if(arg.length > 2) {
target = arg.slice(2);
if(!(target in this.byName)) {
this.byName[target] = [];
} else {
throw "Repeated option " + target + "-" + JSON.stringify(this.byName,null,4);
}
} else {
throw "Missing option name after --";
}
} else {
this.byName[target].push(arg);
}
});
// Check for mandatory arguments
Object.keys(options.mandatoryArguments || []).forEach(mandatoryArgumentName => {
const mandatoryArgumentType = options.mandatoryArguments[mandatoryArgumentName];
switch (mandatoryArgumentType) {
case "single":
if(!(mandatoryArgumentName in this.byName)) {
throw "Missing mandatory argument --" + mandatoryArgumentName;
}
if(this.byName[mandatoryArgumentName].length > 1) {
throw "Option --" + mandatoryArgumentName + " must have a single argument";
}
break;
default:
throw "Unknown mandatoryArgument type " + mandatoryArgumentType;
}
});
}
}
/*
HTML encode a string (including double quotes so that we can encode attribute values)
*/
exports.htmlEncode = function(str) {
// Convert & to "&", < to "<", > to ">", " to """
return str.toString().replace(/&/mg,"&").replace(/</mg,"<").replace(/>/mg,">").replace(/\"/mg,""");
}
/*
List of tags that TW5 treats as self closing (and will not accept a closing tag)
*/
exports.isTreatedAsSelfClosingTagByTiddlyWiki = function(tagName) {
return [
"area","base","br","col","command","embed","hr","img","input","keygen","link","meta","param","source","track","wbr"
].indexOf(tagName) !== -1;
}
/*
Resolve a path relative to a root filepath
sourcepath: relative filepath
rootpath: absolute filepath
*/
exports.resolvePath = function(sourcepath,rootpath) {
const src = sourcepath.split("/"),
root = rootpath.split("/");
// Remove the filename part of the root
root.splice(root.length-1,1);
// If the source path starts with ./ or ../ then it is relative to the root
if(src[0] === "." || src[0] === ".." ) {
// Process the source path bit by bit onto the end of the root path
while(src.length > 0) {
const c = src.shift();
if(c === "..") { // Slice off the last root entry for a double dot
if(root.length > 0) {
root.splice(root.length-1,1);
}
} else if(c !== ".") { // Ignore dots
root.push(c); // Copy other elements across
}
}
return root.join("/");
} else {
// If it isn't relative, just return the path
if(rootpath) {
return root.concat(src).join("/");
} else {
return src.join("/");
}
}
}
/*
Hash a string
*/
const crypto = require("crypto");
exports.hash = function(text,length) {
const hash = crypto.createHash("sha256");
hash.update(text);
const hashText = hash.digest("hex");
if(length === undefined) {
length = hashText.length;
}
return hashText.slice(0,length);
}
<|start_filename|>epub2twpub/flatten-tree.js<|end_filename|>
/*
Flatten a tree into an array of text blocks
*/
const {htmlEncode,isTreatedAsSelfClosingTagByTiddlyWiki} = require("./utils");
exports.flattenTree = function(nodes) {
const output = [],
visitNode = function(node) {
if(typeof node === "string") {
output.push(htmlEncode(node));
} else {
output.push("<" + node.tag);
const attributes = node.attributes || {},
attributeNames = Object.keys(attributes);
if(attributeNames.length > 0) {
attributeNames.forEach(attributeName => {
const attributeValue = attributes[attributeName];
if(typeof attributeValue === "string") {
output.push(" " + attributeName + "=\"" + htmlEncode(attributeValue) + "\"");
} else {
const propertyNames = Object.keys(attributeValue);
if(propertyNames.length > 0) {
output.push(" " + attributeName + "=\"");
propertyNames.forEach(propertyName => {
output.push(propertyName + ":" + htmlEncode(attributeValue[propertyName]) + ";");
});
output.push("\"");
}
}
});
}
output.push(">");
if(!isTreatedAsSelfClosingTagByTiddlyWiki(node.tag)) {
visitNodes(node.childNodes);
output.push("</" + node.tag + ">");
}
}
},
visitNodes = function(nodes) {
nodes = nodes || [];
for(const node of nodes) {
visitNode(node);
}
};
visitNodes(nodes);
return output.join("");
};
| TWPUB/TWPUB-Tools |
<|start_filename|>lib/protobuf/Elrond.pbjson.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: Elrond.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
import 'dart:core' as $core;
import 'dart:convert' as $convert;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use transactionMessageDescriptor instead')
const TransactionMessage$json = const {
'1': 'TransactionMessage',
'2': const [
const {'1': 'nonce', '3': 1, '4': 1, '5': 4, '10': 'nonce'},
const {'1': 'value', '3': 2, '4': 1, '5': 9, '10': 'value'},
const {'1': 'receiver', '3': 3, '4': 1, '5': 9, '10': 'receiver'},
const {'1': 'sender', '3': 4, '4': 1, '5': 9, '10': 'sender'},
const {'1': 'gas_price', '3': 5, '4': 1, '5': 4, '10': 'gasPrice'},
const {'1': 'gas_limit', '3': 6, '4': 1, '5': 4, '10': 'gasLimit'},
const {'1': 'data', '3': 7, '4': 1, '5': 9, '10': 'data'},
const {'1': 'chain_id', '3': 8, '4': 1, '5': 9, '10': 'chainId'},
const {'1': 'version', '3': 9, '4': 1, '5': 13, '10': 'version'},
],
};
/// Descriptor for `TransactionMessage`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List transactionMessageDescriptor = $convert.base64Decode('<KEY>');
@$core.Deprecated('Use signingInputDescriptor instead')
const SigningInput$json = const {
'1': 'SigningInput',
'2': const [
const {'1': 'private_key', '3': 1, '4': 1, '5': 12, '10': 'privateKey'},
const {'1': 'transaction', '3': 2, '4': 1, '5': 11, '6': '.TW.Elrond.Proto.TransactionMessage', '9': 0, '10': 'transaction'},
],
'8': const [
const {'1': 'message_oneof'},
],
};
/// Descriptor for `SigningInput`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List signingInputDescriptor = $convert.base64Decode('<KEY>');
@$core.Deprecated('Use signingOutputDescriptor instead')
const SigningOutput$json = const {
'1': 'SigningOutput',
'2': const [
const {'1': 'encoded', '3': 1, '4': 1, '5': 9, '10': 'encoded'},
const {'1': 'signature', '3': 2, '4': 1, '5': 9, '10': 'signature'},
],
};
/// Descriptor for `SigningOutput`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List signingOutputDescriptor = $convert.base64Decode('<KEY>');
<|start_filename|>lib/protobuf/Cosmos.pbenum.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: Cosmos.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
// ignore_for_file: UNDEFINED_SHOWN_NAME
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
class BroadcastMode extends $pb.ProtobufEnum {
static const BroadcastMode BLOCK = BroadcastMode._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'BLOCK');
static const BroadcastMode SYNC = BroadcastMode._(1, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'SYNC');
static const BroadcastMode ASYNC = BroadcastMode._(2, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'ASYNC');
static const $core.List<BroadcastMode> values = <BroadcastMode> [
BLOCK,
SYNC,
ASYNC,
];
static final $core.Map<$core.int, BroadcastMode> _byValue = $pb.ProtobufEnum.initByValue(values);
static BroadcastMode? valueOf($core.int value) => _byValue[value];
const BroadcastMode._($core.int v, $core.String n) : super(v, n);
}
<|start_filename|>lib/protobuf/Elrond.pb.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: Elrond.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
import 'dart:core' as $core;
import 'package:fixnum/fixnum.dart' as $fixnum;
import 'package:protobuf/protobuf.dart' as $pb;
class TransactionMessage extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TransactionMessage', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Elrond.Proto'), createEmptyInstance: create)
..a<$fixnum.Int64>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'nonce', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value')
..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'receiver')
..aOS(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'sender')
..a<$fixnum.Int64>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'gasPrice', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..a<$fixnum.Int64>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'gasLimit', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..aOS(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'data')
..aOS(8, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'chainId')
..a<$core.int>(9, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'version', $pb.PbFieldType.OU3)
..hasRequiredFields = false
;
TransactionMessage._() : super();
factory TransactionMessage({
$fixnum.Int64? nonce,
$core.String? value,
$core.String? receiver,
$core.String? sender,
$fixnum.Int64? gasPrice,
$fixnum.Int64? gasLimit,
$core.String? data,
$core.String? chainId,
$core.int? version,
}) {
final _result = create();
if (nonce != null) {
_result.nonce = nonce;
}
if (value != null) {
_result.value = value;
}
if (receiver != null) {
_result.receiver = receiver;
}
if (sender != null) {
_result.sender = sender;
}
if (gasPrice != null) {
_result.gasPrice = gasPrice;
}
if (gasLimit != null) {
_result.gasLimit = gasLimit;
}
if (data != null) {
_result.data = data;
}
if (chainId != null) {
_result.chainId = chainId;
}
if (version != null) {
_result.version = version;
}
return _result;
}
factory TransactionMessage.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory TransactionMessage.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
TransactionMessage clone() => TransactionMessage()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
TransactionMessage copyWith(void Function(TransactionMessage) updates) => super.copyWith((message) => updates(message as TransactionMessage)) as TransactionMessage; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static TransactionMessage create() => TransactionMessage._();
TransactionMessage createEmptyInstance() => create();
static $pb.PbList<TransactionMessage> createRepeated() => $pb.PbList<TransactionMessage>();
@$core.pragma('dart2js:noInline')
static TransactionMessage getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<TransactionMessage>(create);
static TransactionMessage? _defaultInstance;
@$pb.TagNumber(1)
$fixnum.Int64 get nonce => $_getI64(0);
@$pb.TagNumber(1)
set nonce($fixnum.Int64 v) { $_setInt64(0, v); }
@$pb.TagNumber(1)
$core.bool hasNonce() => $_has(0);
@$pb.TagNumber(1)
void clearNonce() => clearField(1);
@$pb.TagNumber(2)
$core.String get value => $_getSZ(1);
@$pb.TagNumber(2)
set value($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasValue() => $_has(1);
@$pb.TagNumber(2)
void clearValue() => clearField(2);
@$pb.TagNumber(3)
$core.String get receiver => $_getSZ(2);
@$pb.TagNumber(3)
set receiver($core.String v) { $_setString(2, v); }
@$pb.TagNumber(3)
$core.bool hasReceiver() => $_has(2);
@$pb.TagNumber(3)
void clearReceiver() => clearField(3);
@$pb.TagNumber(4)
$core.String get sender => $_getSZ(3);
@$pb.TagNumber(4)
set sender($core.String v) { $_setString(3, v); }
@$pb.TagNumber(4)
$core.bool hasSender() => $_has(3);
@$pb.TagNumber(4)
void clearSender() => clearField(4);
@$pb.TagNumber(5)
$fixnum.Int64 get gasPrice => $_getI64(4);
@$pb.TagNumber(5)
set gasPrice($fixnum.Int64 v) { $_setInt64(4, v); }
@$pb.TagNumber(5)
$core.bool hasGasPrice() => $_has(4);
@$pb.TagNumber(5)
void clearGasPrice() => clearField(5);
@$pb.TagNumber(6)
$fixnum.Int64 get gasLimit => $_getI64(5);
@$pb.TagNumber(6)
set gasLimit($fixnum.Int64 v) { $_setInt64(5, v); }
@$pb.TagNumber(6)
$core.bool hasGasLimit() => $_has(5);
@$pb.TagNumber(6)
void clearGasLimit() => clearField(6);
@$pb.TagNumber(7)
$core.String get data => $_getSZ(6);
@$pb.TagNumber(7)
set data($core.String v) { $_setString(6, v); }
@$pb.TagNumber(7)
$core.bool hasData() => $_has(6);
@$pb.TagNumber(7)
void clearData() => clearField(7);
@$pb.TagNumber(8)
$core.String get chainId => $_getSZ(7);
@$pb.TagNumber(8)
set chainId($core.String v) { $_setString(7, v); }
@$pb.TagNumber(8)
$core.bool hasChainId() => $_has(7);
@$pb.TagNumber(8)
void clearChainId() => clearField(8);
@$pb.TagNumber(9)
$core.int get version => $_getIZ(8);
@$pb.TagNumber(9)
set version($core.int v) { $_setUnsignedInt32(8, v); }
@$pb.TagNumber(9)
$core.bool hasVersion() => $_has(8);
@$pb.TagNumber(9)
void clearVersion() => clearField(9);
}
enum SigningInput_MessageOneof {
transaction,
notSet
}
class SigningInput extends $pb.GeneratedMessage {
static const $core.Map<$core.int, SigningInput_MessageOneof> _SigningInput_MessageOneofByTag = {
2 : SigningInput_MessageOneof.transaction,
0 : SigningInput_MessageOneof.notSet
};
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'SigningInput', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Elrond.Proto'), createEmptyInstance: create)
..oo(0, [2])
..a<$core.List<$core.int>>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'privateKey', $pb.PbFieldType.OY)
..aOM<TransactionMessage>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', subBuilder: TransactionMessage.create)
..hasRequiredFields = false
;
SigningInput._() : super();
factory SigningInput({
$core.List<$core.int>? privateKey,
TransactionMessage? transaction,
}) {
final _result = create();
if (privateKey != null) {
_result.privateKey = privateKey;
}
if (transaction != null) {
_result.transaction = transaction;
}
return _result;
}
factory SigningInput.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory SigningInput.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
SigningInput clone() => SigningInput()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
SigningInput copyWith(void Function(SigningInput) updates) => super.copyWith((message) => updates(message as SigningInput)) as SigningInput; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static SigningInput create() => SigningInput._();
SigningInput createEmptyInstance() => create();
static $pb.PbList<SigningInput> createRepeated() => $pb.PbList<SigningInput>();
@$core.pragma('dart2js:noInline')
static SigningInput getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<SigningInput>(create);
static SigningInput? _defaultInstance;
SigningInput_MessageOneof whichMessageOneof() => _SigningInput_MessageOneofByTag[$_whichOneof(0)]!;
void clearMessageOneof() => clearField($_whichOneof(0));
@$pb.TagNumber(1)
$core.List<$core.int> get privateKey => $_getN(0);
@$pb.TagNumber(1)
set privateKey($core.List<$core.int> v) { $_setBytes(0, v); }
@$pb.TagNumber(1)
$core.bool hasPrivateKey() => $_has(0);
@$pb.TagNumber(1)
void clearPrivateKey() => clearField(1);
@$pb.TagNumber(2)
TransactionMessage get transaction => $_getN(1);
@$pb.TagNumber(2)
set transaction(TransactionMessage v) { setField(2, v); }
@$pb.TagNumber(2)
$core.bool hasTransaction() => $_has(1);
@$pb.TagNumber(2)
void clearTransaction() => clearField(2);
@$pb.TagNumber(2)
TransactionMessage ensureTransaction() => $_ensure(1);
}
class SigningOutput extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'SigningOutput', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Elrond.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'encoded')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'signature')
..hasRequiredFields = false
;
SigningOutput._() : super();
factory SigningOutput({
$core.String? encoded,
$core.String? signature,
}) {
final _result = create();
if (encoded != null) {
_result.encoded = encoded;
}
if (signature != null) {
_result.signature = signature;
}
return _result;
}
factory SigningOutput.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory SigningOutput.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
SigningOutput clone() => SigningOutput()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
SigningOutput copyWith(void Function(SigningOutput) updates) => super.copyWith((message) => updates(message as SigningOutput)) as SigningOutput; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static SigningOutput create() => SigningOutput._();
SigningOutput createEmptyInstance() => create();
static $pb.PbList<SigningOutput> createRepeated() => $pb.PbList<SigningOutput>();
@$core.pragma('dart2js:noInline')
static SigningOutput getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<SigningOutput>(create);
static SigningOutput? _defaultInstance;
@$pb.TagNumber(1)
$core.String get encoded => $_getSZ(0);
@$pb.TagNumber(1)
set encoded($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasEncoded() => $_has(0);
@$pb.TagNumber(1)
void clearEncoded() => clearField(1);
@$pb.TagNumber(2)
$core.String get signature => $_getSZ(1);
@$pb.TagNumber(2)
set signature($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasSignature() => $_has(1);
@$pb.TagNumber(2)
void clearSignature() => clearField(2);
}
<|start_filename|>lib/protobuf/Cosmos.pb.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: Cosmos.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
import 'dart:core' as $core;
import 'package:fixnum/fixnum.dart' as $fixnum;
import 'package:protobuf/protobuf.dart' as $pb;
import 'Cosmos.pbenum.dart';
export 'Cosmos.pbenum.dart';
class Amount extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Amount', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'denom')
..aInt64(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'amount')
..hasRequiredFields = false
;
Amount._() : super();
factory Amount({
$core.String? denom,
$fixnum.Int64? amount,
}) {
final _result = create();
if (denom != null) {
_result.denom = denom;
}
if (amount != null) {
_result.amount = amount;
}
return _result;
}
factory Amount.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Amount.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Amount clone() => Amount()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Amount copyWith(void Function(Amount) updates) => super.copyWith((message) => updates(message as Amount)) as Amount; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Amount create() => Amount._();
Amount createEmptyInstance() => create();
static $pb.PbList<Amount> createRepeated() => $pb.PbList<Amount>();
@$core.pragma('dart2js:noInline')
static Amount getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Amount>(create);
static Amount? _defaultInstance;
@$pb.TagNumber(1)
$core.String get denom => $_getSZ(0);
@$pb.TagNumber(1)
set denom($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasDenom() => $_has(0);
@$pb.TagNumber(1)
void clearDenom() => clearField(1);
@$pb.TagNumber(2)
$fixnum.Int64 get amount => $_getI64(1);
@$pb.TagNumber(2)
set amount($fixnum.Int64 v) { $_setInt64(1, v); }
@$pb.TagNumber(2)
$core.bool hasAmount() => $_has(1);
@$pb.TagNumber(2)
void clearAmount() => clearField(2);
}
class Fee extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Fee', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..pc<Amount>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'amounts', $pb.PbFieldType.PM, subBuilder: Amount.create)
..a<$fixnum.Int64>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'gas', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..hasRequiredFields = false
;
Fee._() : super();
factory Fee({
$core.Iterable<Amount>? amounts,
$fixnum.Int64? gas,
}) {
final _result = create();
if (amounts != null) {
_result.amounts.addAll(amounts);
}
if (gas != null) {
_result.gas = gas;
}
return _result;
}
factory Fee.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Fee.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Fee clone() => Fee()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Fee copyWith(void Function(Fee) updates) => super.copyWith((message) => updates(message as Fee)) as Fee; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Fee create() => Fee._();
Fee createEmptyInstance() => create();
static $pb.PbList<Fee> createRepeated() => $pb.PbList<Fee>();
@$core.pragma('dart2js:noInline')
static Fee getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Fee>(create);
static Fee? _defaultInstance;
@$pb.TagNumber(1)
$core.List<Amount> get amounts => $_getList(0);
@$pb.TagNumber(2)
$fixnum.Int64 get gas => $_getI64(1);
@$pb.TagNumber(2)
set gas($fixnum.Int64 v) { $_setInt64(1, v); }
@$pb.TagNumber(2)
$core.bool hasGas() => $_has(1);
@$pb.TagNumber(2)
void clearGas() => clearField(2);
}
class Message_Send extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Message.Send', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fromAddress')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'toAddress')
..pc<Amount>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'amounts', $pb.PbFieldType.PM, subBuilder: Amount.create)
..aOS(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'typePrefix')
..hasRequiredFields = false
;
Message_Send._() : super();
factory Message_Send({
$core.String? fromAddress,
$core.String? toAddress,
$core.Iterable<Amount>? amounts,
$core.String? typePrefix,
}) {
final _result = create();
if (fromAddress != null) {
_result.fromAddress = fromAddress;
}
if (toAddress != null) {
_result.toAddress = toAddress;
}
if (amounts != null) {
_result.amounts.addAll(amounts);
}
if (typePrefix != null) {
_result.typePrefix = typePrefix;
}
return _result;
}
factory Message_Send.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Message_Send.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Message_Send clone() => Message_Send()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Message_Send copyWith(void Function(Message_Send) updates) => super.copyWith((message) => updates(message as Message_Send)) as Message_Send; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Message_Send create() => Message_Send._();
Message_Send createEmptyInstance() => create();
static $pb.PbList<Message_Send> createRepeated() => $pb.PbList<Message_Send>();
@$core.pragma('dart2js:noInline')
static Message_Send getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Message_Send>(create);
static Message_Send? _defaultInstance;
@$pb.TagNumber(1)
$core.String get fromAddress => $_getSZ(0);
@$pb.TagNumber(1)
set fromAddress($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasFromAddress() => $_has(0);
@$pb.TagNumber(1)
void clearFromAddress() => clearField(1);
@$pb.TagNumber(2)
$core.String get toAddress => $_getSZ(1);
@$pb.TagNumber(2)
set toAddress($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasToAddress() => $_has(1);
@$pb.TagNumber(2)
void clearToAddress() => clearField(2);
@$pb.TagNumber(3)
$core.List<Amount> get amounts => $_getList(2);
@$pb.TagNumber(4)
$core.String get typePrefix => $_getSZ(3);
@$pb.TagNumber(4)
set typePrefix($core.String v) { $_setString(3, v); }
@$pb.TagNumber(4)
$core.bool hasTypePrefix() => $_has(3);
@$pb.TagNumber(4)
void clearTypePrefix() => clearField(4);
}
class Message_Delegate extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Message.Delegate', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'delegatorAddress')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'validatorAddress')
..aOM<Amount>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'amount', subBuilder: Amount.create)
..aOS(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'typePrefix')
..hasRequiredFields = false
;
Message_Delegate._() : super();
factory Message_Delegate({
$core.String? delegatorAddress,
$core.String? validatorAddress,
Amount? amount,
$core.String? typePrefix,
}) {
final _result = create();
if (delegatorAddress != null) {
_result.delegatorAddress = delegatorAddress;
}
if (validatorAddress != null) {
_result.validatorAddress = validatorAddress;
}
if (amount != null) {
_result.amount = amount;
}
if (typePrefix != null) {
_result.typePrefix = typePrefix;
}
return _result;
}
factory Message_Delegate.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Message_Delegate.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Message_Delegate clone() => Message_Delegate()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Message_Delegate copyWith(void Function(Message_Delegate) updates) => super.copyWith((message) => updates(message as Message_Delegate)) as Message_Delegate; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Message_Delegate create() => Message_Delegate._();
Message_Delegate createEmptyInstance() => create();
static $pb.PbList<Message_Delegate> createRepeated() => $pb.PbList<Message_Delegate>();
@$core.pragma('dart2js:noInline')
static Message_Delegate getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Message_Delegate>(create);
static Message_Delegate? _defaultInstance;
@$pb.TagNumber(1)
$core.String get delegatorAddress => $_getSZ(0);
@$pb.TagNumber(1)
set delegatorAddress($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasDelegatorAddress() => $_has(0);
@$pb.TagNumber(1)
void clearDelegatorAddress() => clearField(1);
@$pb.TagNumber(2)
$core.String get validatorAddress => $_getSZ(1);
@$pb.TagNumber(2)
set validatorAddress($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasValidatorAddress() => $_has(1);
@$pb.TagNumber(2)
void clearValidatorAddress() => clearField(2);
@$pb.TagNumber(3)
Amount get amount => $_getN(2);
@$pb.TagNumber(3)
set amount(Amount v) { setField(3, v); }
@$pb.TagNumber(3)
$core.bool hasAmount() => $_has(2);
@$pb.TagNumber(3)
void clearAmount() => clearField(3);
@$pb.TagNumber(3)
Amount ensureAmount() => $_ensure(2);
@$pb.TagNumber(4)
$core.String get typePrefix => $_getSZ(3);
@$pb.TagNumber(4)
set typePrefix($core.String v) { $_setString(3, v); }
@$pb.TagNumber(4)
$core.bool hasTypePrefix() => $_has(3);
@$pb.TagNumber(4)
void clearTypePrefix() => clearField(4);
}
class Message_Undelegate extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Message.Undelegate', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'delegatorAddress')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'validatorAddress')
..aOM<Amount>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'amount', subBuilder: Amount.create)
..aOS(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'typePrefix')
..hasRequiredFields = false
;
Message_Undelegate._() : super();
factory Message_Undelegate({
$core.String? delegatorAddress,
$core.String? validatorAddress,
Amount? amount,
$core.String? typePrefix,
}) {
final _result = create();
if (delegatorAddress != null) {
_result.delegatorAddress = delegatorAddress;
}
if (validatorAddress != null) {
_result.validatorAddress = validatorAddress;
}
if (amount != null) {
_result.amount = amount;
}
if (typePrefix != null) {
_result.typePrefix = typePrefix;
}
return _result;
}
factory Message_Undelegate.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Message_Undelegate.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Message_Undelegate clone() => Message_Undelegate()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Message_Undelegate copyWith(void Function(Message_Undelegate) updates) => super.copyWith((message) => updates(message as Message_Undelegate)) as Message_Undelegate; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Message_Undelegate create() => Message_Undelegate._();
Message_Undelegate createEmptyInstance() => create();
static $pb.PbList<Message_Undelegate> createRepeated() => $pb.PbList<Message_Undelegate>();
@$core.pragma('dart2js:noInline')
static Message_Undelegate getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Message_Undelegate>(create);
static Message_Undelegate? _defaultInstance;
@$pb.TagNumber(1)
$core.String get delegatorAddress => $_getSZ(0);
@$pb.TagNumber(1)
set delegatorAddress($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasDelegatorAddress() => $_has(0);
@$pb.TagNumber(1)
void clearDelegatorAddress() => clearField(1);
@$pb.TagNumber(2)
$core.String get validatorAddress => $_getSZ(1);
@$pb.TagNumber(2)
set validatorAddress($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasValidatorAddress() => $_has(1);
@$pb.TagNumber(2)
void clearValidatorAddress() => clearField(2);
@$pb.TagNumber(3)
Amount get amount => $_getN(2);
@$pb.TagNumber(3)
set amount(Amount v) { setField(3, v); }
@$pb.TagNumber(3)
$core.bool hasAmount() => $_has(2);
@$pb.TagNumber(3)
void clearAmount() => clearField(3);
@$pb.TagNumber(3)
Amount ensureAmount() => $_ensure(2);
@$pb.TagNumber(4)
$core.String get typePrefix => $_getSZ(3);
@$pb.TagNumber(4)
set typePrefix($core.String v) { $_setString(3, v); }
@$pb.TagNumber(4)
$core.bool hasTypePrefix() => $_has(3);
@$pb.TagNumber(4)
void clearTypePrefix() => clearField(4);
}
class Message_BeginRedelegate extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Message.BeginRedelegate', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'delegatorAddress')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'validatorSrcAddress')
..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'validatorDstAddress')
..aOM<Amount>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'amount', subBuilder: Amount.create)
..aOS(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'typePrefix')
..hasRequiredFields = false
;
Message_BeginRedelegate._() : super();
factory Message_BeginRedelegate({
$core.String? delegatorAddress,
$core.String? validatorSrcAddress,
$core.String? validatorDstAddress,
Amount? amount,
$core.String? typePrefix,
}) {
final _result = create();
if (delegatorAddress != null) {
_result.delegatorAddress = delegatorAddress;
}
if (validatorSrcAddress != null) {
_result.validatorSrcAddress = validatorSrcAddress;
}
if (validatorDstAddress != null) {
_result.validatorDstAddress = validatorDstAddress;
}
if (amount != null) {
_result.amount = amount;
}
if (typePrefix != null) {
_result.typePrefix = typePrefix;
}
return _result;
}
factory Message_BeginRedelegate.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Message_BeginRedelegate.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Message_BeginRedelegate clone() => Message_BeginRedelegate()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Message_BeginRedelegate copyWith(void Function(Message_BeginRedelegate) updates) => super.copyWith((message) => updates(message as Message_BeginRedelegate)) as Message_BeginRedelegate; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Message_BeginRedelegate create() => Message_BeginRedelegate._();
Message_BeginRedelegate createEmptyInstance() => create();
static $pb.PbList<Message_BeginRedelegate> createRepeated() => $pb.PbList<Message_BeginRedelegate>();
@$core.pragma('dart2js:noInline')
static Message_BeginRedelegate getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Message_BeginRedelegate>(create);
static Message_BeginRedelegate? _defaultInstance;
@$pb.TagNumber(1)
$core.String get delegatorAddress => $_getSZ(0);
@$pb.TagNumber(1)
set delegatorAddress($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasDelegatorAddress() => $_has(0);
@$pb.TagNumber(1)
void clearDelegatorAddress() => clearField(1);
@$pb.TagNumber(2)
$core.String get validatorSrcAddress => $_getSZ(1);
@$pb.TagNumber(2)
set validatorSrcAddress($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasValidatorSrcAddress() => $_has(1);
@$pb.TagNumber(2)
void clearValidatorSrcAddress() => clearField(2);
@$pb.TagNumber(3)
$core.String get validatorDstAddress => $_getSZ(2);
@$pb.TagNumber(3)
set validatorDstAddress($core.String v) { $_setString(2, v); }
@$pb.TagNumber(3)
$core.bool hasValidatorDstAddress() => $_has(2);
@$pb.TagNumber(3)
void clearValidatorDstAddress() => clearField(3);
@$pb.TagNumber(4)
Amount get amount => $_getN(3);
@$pb.TagNumber(4)
set amount(Amount v) { setField(4, v); }
@$pb.TagNumber(4)
$core.bool hasAmount() => $_has(3);
@$pb.TagNumber(4)
void clearAmount() => clearField(4);
@$pb.TagNumber(4)
Amount ensureAmount() => $_ensure(3);
@$pb.TagNumber(5)
$core.String get typePrefix => $_getSZ(4);
@$pb.TagNumber(5)
set typePrefix($core.String v) { $_setString(4, v); }
@$pb.TagNumber(5)
$core.bool hasTypePrefix() => $_has(4);
@$pb.TagNumber(5)
void clearTypePrefix() => clearField(5);
}
class Message_WithdrawDelegationReward extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Message.WithdrawDelegationReward', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'delegatorAddress')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'validatorAddress')
..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'typePrefix')
..hasRequiredFields = false
;
Message_WithdrawDelegationReward._() : super();
factory Message_WithdrawDelegationReward({
$core.String? delegatorAddress,
$core.String? validatorAddress,
$core.String? typePrefix,
}) {
final _result = create();
if (delegatorAddress != null) {
_result.delegatorAddress = delegatorAddress;
}
if (validatorAddress != null) {
_result.validatorAddress = validatorAddress;
}
if (typePrefix != null) {
_result.typePrefix = typePrefix;
}
return _result;
}
factory Message_WithdrawDelegationReward.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Message_WithdrawDelegationReward.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Message_WithdrawDelegationReward clone() => Message_WithdrawDelegationReward()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Message_WithdrawDelegationReward copyWith(void Function(Message_WithdrawDelegationReward) updates) => super.copyWith((message) => updates(message as Message_WithdrawDelegationReward)) as Message_WithdrawDelegationReward; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Message_WithdrawDelegationReward create() => Message_WithdrawDelegationReward._();
Message_WithdrawDelegationReward createEmptyInstance() => create();
static $pb.PbList<Message_WithdrawDelegationReward> createRepeated() => $pb.PbList<Message_WithdrawDelegationReward>();
@$core.pragma('dart2js:noInline')
static Message_WithdrawDelegationReward getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Message_WithdrawDelegationReward>(create);
static Message_WithdrawDelegationReward? _defaultInstance;
@$pb.TagNumber(1)
$core.String get delegatorAddress => $_getSZ(0);
@$pb.TagNumber(1)
set delegatorAddress($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasDelegatorAddress() => $_has(0);
@$pb.TagNumber(1)
void clearDelegatorAddress() => clearField(1);
@$pb.TagNumber(2)
$core.String get validatorAddress => $_getSZ(1);
@$pb.TagNumber(2)
set validatorAddress($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasValidatorAddress() => $_has(1);
@$pb.TagNumber(2)
void clearValidatorAddress() => clearField(2);
@$pb.TagNumber(3)
$core.String get typePrefix => $_getSZ(2);
@$pb.TagNumber(3)
set typePrefix($core.String v) { $_setString(2, v); }
@$pb.TagNumber(3)
$core.bool hasTypePrefix() => $_has(2);
@$pb.TagNumber(3)
void clearTypePrefix() => clearField(3);
}
class Message_RawJSON extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Message.RawJSON', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'type')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value')
..hasRequiredFields = false
;
Message_RawJSON._() : super();
factory Message_RawJSON({
$core.String? type,
$core.String? value,
}) {
final _result = create();
if (type != null) {
_result.type = type;
}
if (value != null) {
_result.value = value;
}
return _result;
}
factory Message_RawJSON.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Message_RawJSON.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Message_RawJSON clone() => Message_RawJSON()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Message_RawJSON copyWith(void Function(Message_RawJSON) updates) => super.copyWith((message) => updates(message as Message_RawJSON)) as Message_RawJSON; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Message_RawJSON create() => Message_RawJSON._();
Message_RawJSON createEmptyInstance() => create();
static $pb.PbList<Message_RawJSON> createRepeated() => $pb.PbList<Message_RawJSON>();
@$core.pragma('dart2js:noInline')
static Message_RawJSON getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Message_RawJSON>(create);
static Message_RawJSON? _defaultInstance;
@$pb.TagNumber(1)
$core.String get type => $_getSZ(0);
@$pb.TagNumber(1)
set type($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasType() => $_has(0);
@$pb.TagNumber(1)
void clearType() => clearField(1);
@$pb.TagNumber(2)
$core.String get value => $_getSZ(1);
@$pb.TagNumber(2)
set value($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasValue() => $_has(1);
@$pb.TagNumber(2)
void clearValue() => clearField(2);
}
enum Message_MessageOneof {
sendCoinsMessage,
stakeMessage,
unstakeMessage,
restakeMessage,
withdrawStakeRewardMessage,
rawJsonMessage,
notSet
}
class Message extends $pb.GeneratedMessage {
static const $core.Map<$core.int, Message_MessageOneof> _Message_MessageOneofByTag = {
1 : Message_MessageOneof.sendCoinsMessage,
2 : Message_MessageOneof.stakeMessage,
3 : Message_MessageOneof.unstakeMessage,
4 : Message_MessageOneof.restakeMessage,
5 : Message_MessageOneof.withdrawStakeRewardMessage,
6 : Message_MessageOneof.rawJsonMessage,
0 : Message_MessageOneof.notSet
};
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Message', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..oo(0, [1, 2, 3, 4, 5, 6])
..aOM<Message_Send>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'sendCoinsMessage', subBuilder: Message_Send.create)
..aOM<Message_Delegate>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'stakeMessage', subBuilder: Message_Delegate.create)
..aOM<Message_Undelegate>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'unstakeMessage', subBuilder: Message_Undelegate.create)
..aOM<Message_BeginRedelegate>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'restakeMessage', subBuilder: Message_BeginRedelegate.create)
..aOM<Message_WithdrawDelegationReward>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'withdrawStakeRewardMessage', subBuilder: Message_WithdrawDelegationReward.create)
..aOM<Message_RawJSON>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'rawJsonMessage', subBuilder: Message_RawJSON.create)
..hasRequiredFields = false
;
Message._() : super();
factory Message({
Message_Send? sendCoinsMessage,
Message_Delegate? stakeMessage,
Message_Undelegate? unstakeMessage,
Message_BeginRedelegate? restakeMessage,
Message_WithdrawDelegationReward? withdrawStakeRewardMessage,
Message_RawJSON? rawJsonMessage,
}) {
final _result = create();
if (sendCoinsMessage != null) {
_result.sendCoinsMessage = sendCoinsMessage;
}
if (stakeMessage != null) {
_result.stakeMessage = stakeMessage;
}
if (unstakeMessage != null) {
_result.unstakeMessage = unstakeMessage;
}
if (restakeMessage != null) {
_result.restakeMessage = restakeMessage;
}
if (withdrawStakeRewardMessage != null) {
_result.withdrawStakeRewardMessage = withdrawStakeRewardMessage;
}
if (rawJsonMessage != null) {
_result.rawJsonMessage = rawJsonMessage;
}
return _result;
}
factory Message.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Message.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Message clone() => Message()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Message copyWith(void Function(Message) updates) => super.copyWith((message) => updates(message as Message)) as Message; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Message create() => Message._();
Message createEmptyInstance() => create();
static $pb.PbList<Message> createRepeated() => $pb.PbList<Message>();
@$core.pragma('dart2js:noInline')
static Message getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Message>(create);
static Message? _defaultInstance;
Message_MessageOneof whichMessageOneof() => _Message_MessageOneofByTag[$_whichOneof(0)]!;
void clearMessageOneof() => clearField($_whichOneof(0));
@$pb.TagNumber(1)
Message_Send get sendCoinsMessage => $_getN(0);
@$pb.TagNumber(1)
set sendCoinsMessage(Message_Send v) { setField(1, v); }
@$pb.TagNumber(1)
$core.bool hasSendCoinsMessage() => $_has(0);
@$pb.TagNumber(1)
void clearSendCoinsMessage() => clearField(1);
@$pb.TagNumber(1)
Message_Send ensureSendCoinsMessage() => $_ensure(0);
@$pb.TagNumber(2)
Message_Delegate get stakeMessage => $_getN(1);
@$pb.TagNumber(2)
set stakeMessage(Message_Delegate v) { setField(2, v); }
@$pb.TagNumber(2)
$core.bool hasStakeMessage() => $_has(1);
@$pb.TagNumber(2)
void clearStakeMessage() => clearField(2);
@$pb.TagNumber(2)
Message_Delegate ensureStakeMessage() => $_ensure(1);
@$pb.TagNumber(3)
Message_Undelegate get unstakeMessage => $_getN(2);
@$pb.TagNumber(3)
set unstakeMessage(Message_Undelegate v) { setField(3, v); }
@$pb.TagNumber(3)
$core.bool hasUnstakeMessage() => $_has(2);
@$pb.TagNumber(3)
void clearUnstakeMessage() => clearField(3);
@$pb.TagNumber(3)
Message_Undelegate ensureUnstakeMessage() => $_ensure(2);
@$pb.TagNumber(4)
Message_BeginRedelegate get restakeMessage => $_getN(3);
@$pb.TagNumber(4)
set restakeMessage(Message_BeginRedelegate v) { setField(4, v); }
@$pb.TagNumber(4)
$core.bool hasRestakeMessage() => $_has(3);
@$pb.TagNumber(4)
void clearRestakeMessage() => clearField(4);
@$pb.TagNumber(4)
Message_BeginRedelegate ensureRestakeMessage() => $_ensure(3);
@$pb.TagNumber(5)
Message_WithdrawDelegationReward get withdrawStakeRewardMessage => $_getN(4);
@$pb.TagNumber(5)
set withdrawStakeRewardMessage(Message_WithdrawDelegationReward v) { setField(5, v); }
@$pb.TagNumber(5)
$core.bool hasWithdrawStakeRewardMessage() => $_has(4);
@$pb.TagNumber(5)
void clearWithdrawStakeRewardMessage() => clearField(5);
@$pb.TagNumber(5)
Message_WithdrawDelegationReward ensureWithdrawStakeRewardMessage() => $_ensure(4);
@$pb.TagNumber(6)
Message_RawJSON get rawJsonMessage => $_getN(5);
@$pb.TagNumber(6)
set rawJsonMessage(Message_RawJSON v) { setField(6, v); }
@$pb.TagNumber(6)
$core.bool hasRawJsonMessage() => $_has(5);
@$pb.TagNumber(6)
void clearRawJsonMessage() => clearField(6);
@$pb.TagNumber(6)
Message_RawJSON ensureRawJsonMessage() => $_ensure(5);
}
class SigningInput extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'SigningInput', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..a<$fixnum.Int64>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'accountNumber', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'chainId')
..aOM<Fee>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fee', subBuilder: Fee.create)
..aOS(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'memo')
..a<$fixnum.Int64>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'sequence', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..a<$core.List<$core.int>>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'privateKey', $pb.PbFieldType.OY)
..pc<Message>(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'messages', $pb.PbFieldType.PM, subBuilder: Message.create)
..e<BroadcastMode>(8, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'mode', $pb.PbFieldType.OE, defaultOrMaker: BroadcastMode.BLOCK, valueOf: BroadcastMode.valueOf, enumValues: BroadcastMode.values)
..hasRequiredFields = false
;
SigningInput._() : super();
factory SigningInput({
$fixnum.Int64? accountNumber,
$core.String? chainId,
Fee? fee,
$core.String? memo,
$fixnum.Int64? sequence,
$core.List<$core.int>? privateKey,
$core.Iterable<Message>? messages,
BroadcastMode? mode,
}) {
final _result = create();
if (accountNumber != null) {
_result.accountNumber = accountNumber;
}
if (chainId != null) {
_result.chainId = chainId;
}
if (fee != null) {
_result.fee = fee;
}
if (memo != null) {
_result.memo = memo;
}
if (sequence != null) {
_result.sequence = sequence;
}
if (privateKey != null) {
_result.privateKey = privateKey;
}
if (messages != null) {
_result.messages.addAll(messages);
}
if (mode != null) {
_result.mode = mode;
}
return _result;
}
factory SigningInput.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory SigningInput.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
SigningInput clone() => SigningInput()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
SigningInput copyWith(void Function(SigningInput) updates) => super.copyWith((message) => updates(message as SigningInput)) as SigningInput; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static SigningInput create() => SigningInput._();
SigningInput createEmptyInstance() => create();
static $pb.PbList<SigningInput> createRepeated() => $pb.PbList<SigningInput>();
@$core.pragma('dart2js:noInline')
static SigningInput getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<SigningInput>(create);
static SigningInput? _defaultInstance;
@$pb.TagNumber(1)
$fixnum.Int64 get accountNumber => $_getI64(0);
@$pb.TagNumber(1)
set accountNumber($fixnum.Int64 v) { $_setInt64(0, v); }
@$pb.TagNumber(1)
$core.bool hasAccountNumber() => $_has(0);
@$pb.TagNumber(1)
void clearAccountNumber() => clearField(1);
@$pb.TagNumber(2)
$core.String get chainId => $_getSZ(1);
@$pb.TagNumber(2)
set chainId($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasChainId() => $_has(1);
@$pb.TagNumber(2)
void clearChainId() => clearField(2);
@$pb.TagNumber(3)
Fee get fee => $_getN(2);
@$pb.TagNumber(3)
set fee(Fee v) { setField(3, v); }
@$pb.TagNumber(3)
$core.bool hasFee() => $_has(2);
@$pb.TagNumber(3)
void clearFee() => clearField(3);
@$pb.TagNumber(3)
Fee ensureFee() => $_ensure(2);
@$pb.TagNumber(4)
$core.String get memo => $_getSZ(3);
@$pb.TagNumber(4)
set memo($core.String v) { $_setString(3, v); }
@$pb.TagNumber(4)
$core.bool hasMemo() => $_has(3);
@$pb.TagNumber(4)
void clearMemo() => clearField(4);
@$pb.TagNumber(5)
$fixnum.Int64 get sequence => $_getI64(4);
@$pb.TagNumber(5)
set sequence($fixnum.Int64 v) { $_setInt64(4, v); }
@$pb.TagNumber(5)
$core.bool hasSequence() => $_has(4);
@$pb.TagNumber(5)
void clearSequence() => clearField(5);
@$pb.TagNumber(6)
$core.List<$core.int> get privateKey => $_getN(5);
@$pb.TagNumber(6)
set privateKey($core.List<$core.int> v) { $_setBytes(5, v); }
@$pb.TagNumber(6)
$core.bool hasPrivateKey() => $_has(5);
@$pb.TagNumber(6)
void clearPrivateKey() => clearField(6);
@$pb.TagNumber(7)
$core.List<Message> get messages => $_getList(6);
@$pb.TagNumber(8)
BroadcastMode get mode => $_getN(7);
@$pb.TagNumber(8)
set mode(BroadcastMode v) { setField(8, v); }
@$pb.TagNumber(8)
$core.bool hasMode() => $_has(7);
@$pb.TagNumber(8)
void clearMode() => clearField(8);
}
class SigningOutput extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'SigningOutput', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Cosmos.Proto'), createEmptyInstance: create)
..a<$core.List<$core.int>>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'signature', $pb.PbFieldType.OY)
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'json')
..hasRequiredFields = false
;
SigningOutput._() : super();
factory SigningOutput({
$core.List<$core.int>? signature,
$core.String? json,
}) {
final _result = create();
if (signature != null) {
_result.signature = signature;
}
if (json != null) {
_result.json = json;
}
return _result;
}
factory SigningOutput.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory SigningOutput.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
SigningOutput clone() => SigningOutput()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
SigningOutput copyWith(void Function(SigningOutput) updates) => super.copyWith((message) => updates(message as SigningOutput)) as SigningOutput; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static SigningOutput create() => SigningOutput._();
SigningOutput createEmptyInstance() => create();
static $pb.PbList<SigningOutput> createRepeated() => $pb.PbList<SigningOutput>();
@$core.pragma('dart2js:noInline')
static SigningOutput getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<SigningOutput>(create);
static SigningOutput? _defaultInstance;
@$pb.TagNumber(1)
$core.List<$core.int> get signature => $_getN(0);
@$pb.TagNumber(1)
set signature($core.List<$core.int> v) { $_setBytes(0, v); }
@$pb.TagNumber(1)
$core.bool hasSignature() => $_has(0);
@$pb.TagNumber(1)
void clearSignature() => clearField(1);
@$pb.TagNumber(2)
$core.String get json => $_getSZ(1);
@$pb.TagNumber(2)
set json($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasJson() => $_has(1);
@$pb.TagNumber(2)
void clearJson() => clearField(2);
}
<|start_filename|>lib/core/public_key.dart<|end_filename|>
part of flutter_trust_wallet_core;
class PublicKey {
static const int PublicKeyCompressedSize = 33;
static const int PublicKeyUncompressedSize = 65;
late Pointer<Void> _nativehandle;
PublicKey._(Pointer<Void> pointer) {
_nativehandle = pointer;
}
PublicKey.createWithData(Pointer<Void> data, int publicKeyType) {
_nativehandle = TWPublicKey.TWPublicKeyCreateWithData(data, publicKeyType);
}
static bool isValid(Uint8List data, int publivKeyType) {
return TWPublicKeyImpl.isValid(data, publivKeyType);
}
Uint8List data() {
return TWPublicKeyImpl.data(_nativehandle);
}
static Pointer<Void>? recover(Uint8List signature,Uint8List message){
return TWPublicKeyImpl.recover(signature, message);
}
bool isCompressed(){
return TWPublicKeyImpl.isCompressed(_nativehandle);
}
Pointer<Void> compressed(){
return TWPublicKeyImpl.compressed(_nativehandle);
}
Pointer<Void> unCompressed(){
return TWPublicKeyImpl.unCompressed(_nativehandle);
}
int keyType(){
return TWPublicKeyImpl.keyType(_nativehandle);
}
String description(){
return TWPublicKeyImpl.description(_nativehandle);
}
void delete(){
return TWPublicKeyImpl.delete(_nativehandle);
}
bool verify(Uint8List signature,Uint8List message){
return TWPublicKeyImpl.verify(_nativehandle, signature, message);
}
bool verifySchnorr(Uint8List signature,Uint8List message){
return TWPublicKeyImpl.verifySchnorr(_nativehandle, signature, message);
}
}
<|start_filename|>lib/protobuf/Solana.pb.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: Solana.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
import 'dart:core' as $core;
import 'package:fixnum/fixnum.dart' as $fixnum;
import 'package:protobuf/protobuf.dart' as $pb;
class Transfer extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Transfer', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Solana.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'recipient')
..a<$fixnum.Int64>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..hasRequiredFields = false
;
Transfer._() : super();
factory Transfer({
$core.String? recipient,
$fixnum.Int64? value,
}) {
final _result = create();
if (recipient != null) {
_result.recipient = recipient;
}
if (value != null) {
_result.value = value;
}
return _result;
}
factory Transfer.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Transfer.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Transfer clone() => Transfer()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Transfer copyWith(void Function(Transfer) updates) => super.copyWith((message) => updates(message as Transfer)) as Transfer; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Transfer create() => Transfer._();
Transfer createEmptyInstance() => create();
static $pb.PbList<Transfer> createRepeated() => $pb.PbList<Transfer>();
@$core.pragma('dart2js:noInline')
static Transfer getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Transfer>(create);
static Transfer? _defaultInstance;
@$pb.TagNumber(1)
$core.String get recipient => $_getSZ(0);
@$pb.TagNumber(1)
set recipient($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasRecipient() => $_has(0);
@$pb.TagNumber(1)
void clearRecipient() => clearField(1);
@$pb.TagNumber(2)
$fixnum.Int64 get value => $_getI64(1);
@$pb.TagNumber(2)
set value($fixnum.Int64 v) { $_setInt64(1, v); }
@$pb.TagNumber(2)
$core.bool hasValue() => $_has(1);
@$pb.TagNumber(2)
void clearValue() => clearField(2);
}
class Stake extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Stake', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Solana.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'validatorPubkey')
..a<$fixnum.Int64>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..hasRequiredFields = false
;
Stake._() : super();
factory Stake({
$core.String? validatorPubkey,
$fixnum.Int64? value,
}) {
final _result = create();
if (validatorPubkey != null) {
_result.validatorPubkey = validatorPubkey;
}
if (value != null) {
_result.value = value;
}
return _result;
}
factory Stake.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Stake.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Stake clone() => Stake()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Stake copyWith(void Function(Stake) updates) => super.copyWith((message) => updates(message as Stake)) as Stake; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Stake create() => Stake._();
Stake createEmptyInstance() => create();
static $pb.PbList<Stake> createRepeated() => $pb.PbList<Stake>();
@$core.pragma('dart2js:noInline')
static Stake getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Stake>(create);
static Stake? _defaultInstance;
@$pb.TagNumber(1)
$core.String get validatorPubkey => $_getSZ(0);
@$pb.TagNumber(1)
set validatorPubkey($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasValidatorPubkey() => $_has(0);
@$pb.TagNumber(1)
void clearValidatorPubkey() => clearField(1);
@$pb.TagNumber(2)
$fixnum.Int64 get value => $_getI64(1);
@$pb.TagNumber(2)
set value($fixnum.Int64 v) { $_setInt64(1, v); }
@$pb.TagNumber(2)
$core.bool hasValue() => $_has(1);
@$pb.TagNumber(2)
void clearValue() => clearField(2);
}
class DeactivateStake extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DeactivateStake', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Solana.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'validatorPubkey')
..hasRequiredFields = false
;
DeactivateStake._() : super();
factory DeactivateStake({
$core.String? validatorPubkey,
}) {
final _result = create();
if (validatorPubkey != null) {
_result.validatorPubkey = validatorPubkey;
}
return _result;
}
factory DeactivateStake.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory DeactivateStake.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
DeactivateStake clone() => DeactivateStake()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
DeactivateStake copyWith(void Function(DeactivateStake) updates) => super.copyWith((message) => updates(message as DeactivateStake)) as DeactivateStake; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static DeactivateStake create() => DeactivateStake._();
DeactivateStake createEmptyInstance() => create();
static $pb.PbList<DeactivateStake> createRepeated() => $pb.PbList<DeactivateStake>();
@$core.pragma('dart2js:noInline')
static DeactivateStake getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<DeactivateStake>(create);
static DeactivateStake? _defaultInstance;
@$pb.TagNumber(1)
$core.String get validatorPubkey => $_getSZ(0);
@$pb.TagNumber(1)
set validatorPubkey($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasValidatorPubkey() => $_has(0);
@$pb.TagNumber(1)
void clearValidatorPubkey() => clearField(1);
}
class WithdrawStake extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'WithdrawStake', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Solana.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'validatorPubkey')
..a<$fixnum.Int64>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..hasRequiredFields = false
;
WithdrawStake._() : super();
factory WithdrawStake({
$core.String? validatorPubkey,
$fixnum.Int64? value,
}) {
final _result = create();
if (validatorPubkey != null) {
_result.validatorPubkey = validatorPubkey;
}
if (value != null) {
_result.value = value;
}
return _result;
}
factory WithdrawStake.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory WithdrawStake.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
WithdrawStake clone() => WithdrawStake()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
WithdrawStake copyWith(void Function(WithdrawStake) updates) => super.copyWith((message) => updates(message as WithdrawStake)) as WithdrawStake; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static WithdrawStake create() => WithdrawStake._();
WithdrawStake createEmptyInstance() => create();
static $pb.PbList<WithdrawStake> createRepeated() => $pb.PbList<WithdrawStake>();
@$core.pragma('dart2js:noInline')
static WithdrawStake getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<WithdrawStake>(create);
static WithdrawStake? _defaultInstance;
@$pb.TagNumber(1)
$core.String get validatorPubkey => $_getSZ(0);
@$pb.TagNumber(1)
set validatorPubkey($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasValidatorPubkey() => $_has(0);
@$pb.TagNumber(1)
void clearValidatorPubkey() => clearField(1);
@$pb.TagNumber(2)
$fixnum.Int64 get value => $_getI64(1);
@$pb.TagNumber(2)
set value($fixnum.Int64 v) { $_setInt64(1, v); }
@$pb.TagNumber(2)
$core.bool hasValue() => $_has(1);
@$pb.TagNumber(2)
void clearValue() => clearField(2);
}
class CreateTokenAccount extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'CreateTokenAccount', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Solana.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'mainAddress')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'tokenMintAddress')
..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'tokenAddress')
..hasRequiredFields = false
;
CreateTokenAccount._() : super();
factory CreateTokenAccount({
$core.String? mainAddress,
$core.String? tokenMintAddress,
$core.String? tokenAddress,
}) {
final _result = create();
if (mainAddress != null) {
_result.mainAddress = mainAddress;
}
if (tokenMintAddress != null) {
_result.tokenMintAddress = tokenMintAddress;
}
if (tokenAddress != null) {
_result.tokenAddress = tokenAddress;
}
return _result;
}
factory CreateTokenAccount.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory CreateTokenAccount.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
CreateTokenAccount clone() => CreateTokenAccount()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
CreateTokenAccount copyWith(void Function(CreateTokenAccount) updates) => super.copyWith((message) => updates(message as CreateTokenAccount)) as CreateTokenAccount; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static CreateTokenAccount create() => CreateTokenAccount._();
CreateTokenAccount createEmptyInstance() => create();
static $pb.PbList<CreateTokenAccount> createRepeated() => $pb.PbList<CreateTokenAccount>();
@$core.pragma('dart2js:noInline')
static CreateTokenAccount getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<CreateTokenAccount>(create);
static CreateTokenAccount? _defaultInstance;
@$pb.TagNumber(1)
$core.String get mainAddress => $_getSZ(0);
@$pb.TagNumber(1)
set mainAddress($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasMainAddress() => $_has(0);
@$pb.TagNumber(1)
void clearMainAddress() => clearField(1);
@$pb.TagNumber(2)
$core.String get tokenMintAddress => $_getSZ(1);
@$pb.TagNumber(2)
set tokenMintAddress($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasTokenMintAddress() => $_has(1);
@$pb.TagNumber(2)
void clearTokenMintAddress() => clearField(2);
@$pb.TagNumber(3)
$core.String get tokenAddress => $_getSZ(2);
@$pb.TagNumber(3)
set tokenAddress($core.String v) { $_setString(2, v); }
@$pb.TagNumber(3)
$core.bool hasTokenAddress() => $_has(2);
@$pb.TagNumber(3)
void clearTokenAddress() => clearField(3);
}
class TokenTransfer extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TokenTransfer', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Solana.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'tokenMintAddress')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'senderTokenAddress')
..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'recipientTokenAddress')
..a<$fixnum.Int64>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'amount', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..a<$core.int>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'decimals', $pb.PbFieldType.OU3)
..hasRequiredFields = false
;
TokenTransfer._() : super();
factory TokenTransfer({
$core.String? tokenMintAddress,
$core.String? senderTokenAddress,
$core.String? recipientTokenAddress,
$fixnum.Int64? amount,
$core.int? decimals,
}) {
final _result = create();
if (tokenMintAddress != null) {
_result.tokenMintAddress = tokenMintAddress;
}
if (senderTokenAddress != null) {
_result.senderTokenAddress = senderTokenAddress;
}
if (recipientTokenAddress != null) {
_result.recipientTokenAddress = recipientTokenAddress;
}
if (amount != null) {
_result.amount = amount;
}
if (decimals != null) {
_result.decimals = decimals;
}
return _result;
}
factory TokenTransfer.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory TokenTransfer.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
TokenTransfer clone() => TokenTransfer()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
TokenTransfer copyWith(void Function(TokenTransfer) updates) => super.copyWith((message) => updates(message as TokenTransfer)) as TokenTransfer; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static TokenTransfer create() => TokenTransfer._();
TokenTransfer createEmptyInstance() => create();
static $pb.PbList<TokenTransfer> createRepeated() => $pb.PbList<TokenTransfer>();
@$core.pragma('dart2js:noInline')
static TokenTransfer getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<TokenTransfer>(create);
static TokenTransfer? _defaultInstance;
@$pb.TagNumber(1)
$core.String get tokenMintAddress => $_getSZ(0);
@$pb.TagNumber(1)
set tokenMintAddress($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasTokenMintAddress() => $_has(0);
@$pb.TagNumber(1)
void clearTokenMintAddress() => clearField(1);
@$pb.TagNumber(2)
$core.String get senderTokenAddress => $_getSZ(1);
@$pb.TagNumber(2)
set senderTokenAddress($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasSenderTokenAddress() => $_has(1);
@$pb.TagNumber(2)
void clearSenderTokenAddress() => clearField(2);
@$pb.TagNumber(3)
$core.String get recipientTokenAddress => $_getSZ(2);
@$pb.TagNumber(3)
set recipientTokenAddress($core.String v) { $_setString(2, v); }
@$pb.TagNumber(3)
$core.bool hasRecipientTokenAddress() => $_has(2);
@$pb.TagNumber(3)
void clearRecipientTokenAddress() => clearField(3);
@$pb.TagNumber(4)
$fixnum.Int64 get amount => $_getI64(3);
@$pb.TagNumber(4)
set amount($fixnum.Int64 v) { $_setInt64(3, v); }
@$pb.TagNumber(4)
$core.bool hasAmount() => $_has(3);
@$pb.TagNumber(4)
void clearAmount() => clearField(4);
@$pb.TagNumber(5)
$core.int get decimals => $_getIZ(4);
@$pb.TagNumber(5)
set decimals($core.int v) { $_setUnsignedInt32(4, v); }
@$pb.TagNumber(5)
$core.bool hasDecimals() => $_has(4);
@$pb.TagNumber(5)
void clearDecimals() => clearField(5);
}
class CreateAndTransferToken extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'CreateAndTransferToken', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Solana.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'recipientMainAddress')
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'tokenMintAddress')
..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'recipientTokenAddress')
..aOS(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'senderTokenAddress')
..a<$fixnum.Int64>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'amount', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO)
..a<$core.int>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'decimals', $pb.PbFieldType.OU3)
..hasRequiredFields = false
;
CreateAndTransferToken._() : super();
factory CreateAndTransferToken({
$core.String? recipientMainAddress,
$core.String? tokenMintAddress,
$core.String? recipientTokenAddress,
$core.String? senderTokenAddress,
$fixnum.Int64? amount,
$core.int? decimals,
}) {
final _result = create();
if (recipientMainAddress != null) {
_result.recipientMainAddress = recipientMainAddress;
}
if (tokenMintAddress != null) {
_result.tokenMintAddress = tokenMintAddress;
}
if (recipientTokenAddress != null) {
_result.recipientTokenAddress = recipientTokenAddress;
}
if (senderTokenAddress != null) {
_result.senderTokenAddress = senderTokenAddress;
}
if (amount != null) {
_result.amount = amount;
}
if (decimals != null) {
_result.decimals = decimals;
}
return _result;
}
factory CreateAndTransferToken.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory CreateAndTransferToken.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
CreateAndTransferToken clone() => CreateAndTransferToken()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
CreateAndTransferToken copyWith(void Function(CreateAndTransferToken) updates) => super.copyWith((message) => updates(message as CreateAndTransferToken)) as CreateAndTransferToken; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static CreateAndTransferToken create() => CreateAndTransferToken._();
CreateAndTransferToken createEmptyInstance() => create();
static $pb.PbList<CreateAndTransferToken> createRepeated() => $pb.PbList<CreateAndTransferToken>();
@$core.pragma('dart2js:noInline')
static CreateAndTransferToken getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<CreateAndTransferToken>(create);
static CreateAndTransferToken? _defaultInstance;
@$pb.TagNumber(1)
$core.String get recipientMainAddress => $_getSZ(0);
@$pb.TagNumber(1)
set recipientMainAddress($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasRecipientMainAddress() => $_has(0);
@$pb.TagNumber(1)
void clearRecipientMainAddress() => clearField(1);
@$pb.TagNumber(2)
$core.String get tokenMintAddress => $_getSZ(1);
@$pb.TagNumber(2)
set tokenMintAddress($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasTokenMintAddress() => $_has(1);
@$pb.TagNumber(2)
void clearTokenMintAddress() => clearField(2);
@$pb.TagNumber(3)
$core.String get recipientTokenAddress => $_getSZ(2);
@$pb.TagNumber(3)
set recipientTokenAddress($core.String v) { $_setString(2, v); }
@$pb.TagNumber(3)
$core.bool hasRecipientTokenAddress() => $_has(2);
@$pb.TagNumber(3)
void clearRecipientTokenAddress() => clearField(3);
@$pb.TagNumber(4)
$core.String get senderTokenAddress => $_getSZ(3);
@$pb.TagNumber(4)
set senderTokenAddress($core.String v) { $_setString(3, v); }
@$pb.TagNumber(4)
$core.bool hasSenderTokenAddress() => $_has(3);
@$pb.TagNumber(4)
void clearSenderTokenAddress() => clearField(4);
@$pb.TagNumber(5)
$fixnum.Int64 get amount => $_getI64(4);
@$pb.TagNumber(5)
set amount($fixnum.Int64 v) { $_setInt64(4, v); }
@$pb.TagNumber(5)
$core.bool hasAmount() => $_has(4);
@$pb.TagNumber(5)
void clearAmount() => clearField(5);
@$pb.TagNumber(6)
$core.int get decimals => $_getIZ(5);
@$pb.TagNumber(6)
set decimals($core.int v) { $_setUnsignedInt32(5, v); }
@$pb.TagNumber(6)
$core.bool hasDecimals() => $_has(5);
@$pb.TagNumber(6)
void clearDecimals() => clearField(6);
}
enum SigningInput_TransactionType {
transferTransaction,
stakeTransaction,
deactivateStakeTransaction,
withdrawTransaction,
createTokenAccountTransaction,
tokenTransferTransaction,
createAndTransferTokenTransaction,
notSet
}
class SigningInput extends $pb.GeneratedMessage {
static const $core.Map<$core.int, SigningInput_TransactionType> _SigningInput_TransactionTypeByTag = {
3 : SigningInput_TransactionType.transferTransaction,
4 : SigningInput_TransactionType.stakeTransaction,
5 : SigningInput_TransactionType.deactivateStakeTransaction,
6 : SigningInput_TransactionType.withdrawTransaction,
7 : SigningInput_TransactionType.createTokenAccountTransaction,
8 : SigningInput_TransactionType.tokenTransferTransaction,
9 : SigningInput_TransactionType.createAndTransferTokenTransaction,
0 : SigningInput_TransactionType.notSet
};
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'SigningInput', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Solana.Proto'), createEmptyInstance: create)
..oo(0, [3, 4, 5, 6, 7, 8, 9])
..a<$core.List<$core.int>>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'privateKey', $pb.PbFieldType.OY)
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'recentBlockhash')
..aOM<Transfer>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transferTransaction', subBuilder: Transfer.create)
..aOM<Stake>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'stakeTransaction', subBuilder: Stake.create)
..aOM<DeactivateStake>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'deactivateStakeTransaction', subBuilder: DeactivateStake.create)
..aOM<WithdrawStake>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'withdrawTransaction', subBuilder: WithdrawStake.create)
..aOM<CreateTokenAccount>(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'createTokenAccountTransaction', subBuilder: CreateTokenAccount.create)
..aOM<TokenTransfer>(8, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'tokenTransferTransaction', subBuilder: TokenTransfer.create)
..aOM<CreateAndTransferToken>(9, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'createAndTransferTokenTransaction', subBuilder: CreateAndTransferToken.create)
..hasRequiredFields = false
;
SigningInput._() : super();
factory SigningInput({
$core.List<$core.int>? privateKey,
$core.String? recentBlockhash,
Transfer? transferTransaction,
Stake? stakeTransaction,
DeactivateStake? deactivateStakeTransaction,
WithdrawStake? withdrawTransaction,
CreateTokenAccount? createTokenAccountTransaction,
TokenTransfer? tokenTransferTransaction,
CreateAndTransferToken? createAndTransferTokenTransaction,
}) {
final _result = create();
if (privateKey != null) {
_result.privateKey = privateKey;
}
if (recentBlockhash != null) {
_result.recentBlockhash = recentBlockhash;
}
if (transferTransaction != null) {
_result.transferTransaction = transferTransaction;
}
if (stakeTransaction != null) {
_result.stakeTransaction = stakeTransaction;
}
if (deactivateStakeTransaction != null) {
_result.deactivateStakeTransaction = deactivateStakeTransaction;
}
if (withdrawTransaction != null) {
_result.withdrawTransaction = withdrawTransaction;
}
if (createTokenAccountTransaction != null) {
_result.createTokenAccountTransaction = createTokenAccountTransaction;
}
if (tokenTransferTransaction != null) {
_result.tokenTransferTransaction = tokenTransferTransaction;
}
if (createAndTransferTokenTransaction != null) {
_result.createAndTransferTokenTransaction = createAndTransferTokenTransaction;
}
return _result;
}
factory SigningInput.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory SigningInput.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
SigningInput clone() => SigningInput()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
SigningInput copyWith(void Function(SigningInput) updates) => super.copyWith((message) => updates(message as SigningInput)) as SigningInput; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static SigningInput create() => SigningInput._();
SigningInput createEmptyInstance() => create();
static $pb.PbList<SigningInput> createRepeated() => $pb.PbList<SigningInput>();
@$core.pragma('dart2js:noInline')
static SigningInput getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<SigningInput>(create);
static SigningInput? _defaultInstance;
SigningInput_TransactionType whichTransactionType() => _SigningInput_TransactionTypeByTag[$_whichOneof(0)]!;
void clearTransactionType() => clearField($_whichOneof(0));
@$pb.TagNumber(1)
$core.List<$core.int> get privateKey => $_getN(0);
@$pb.TagNumber(1)
set privateKey($core.List<$core.int> v) { $_setBytes(0, v); }
@$pb.TagNumber(1)
$core.bool hasPrivateKey() => $_has(0);
@$pb.TagNumber(1)
void clearPrivateKey() => clearField(1);
@$pb.TagNumber(2)
$core.String get recentBlockhash => $_getSZ(1);
@$pb.TagNumber(2)
set recentBlockhash($core.String v) { $_setString(1, v); }
@$pb.TagNumber(2)
$core.bool hasRecentBlockhash() => $_has(1);
@$pb.TagNumber(2)
void clearRecentBlockhash() => clearField(2);
@$pb.TagNumber(3)
Transfer get transferTransaction => $_getN(2);
@$pb.TagNumber(3)
set transferTransaction(Transfer v) { setField(3, v); }
@$pb.TagNumber(3)
$core.bool hasTransferTransaction() => $_has(2);
@$pb.TagNumber(3)
void clearTransferTransaction() => clearField(3);
@$pb.TagNumber(3)
Transfer ensureTransferTransaction() => $_ensure(2);
@$pb.TagNumber(4)
Stake get stakeTransaction => $_getN(3);
@$pb.TagNumber(4)
set stakeTransaction(Stake v) { setField(4, v); }
@$pb.TagNumber(4)
$core.bool hasStakeTransaction() => $_has(3);
@$pb.TagNumber(4)
void clearStakeTransaction() => clearField(4);
@$pb.TagNumber(4)
Stake ensureStakeTransaction() => $_ensure(3);
@$pb.TagNumber(5)
DeactivateStake get deactivateStakeTransaction => $_getN(4);
@$pb.TagNumber(5)
set deactivateStakeTransaction(DeactivateStake v) { setField(5, v); }
@$pb.TagNumber(5)
$core.bool hasDeactivateStakeTransaction() => $_has(4);
@$pb.TagNumber(5)
void clearDeactivateStakeTransaction() => clearField(5);
@$pb.TagNumber(5)
DeactivateStake ensureDeactivateStakeTransaction() => $_ensure(4);
@$pb.TagNumber(6)
WithdrawStake get withdrawTransaction => $_getN(5);
@$pb.TagNumber(6)
set withdrawTransaction(WithdrawStake v) { setField(6, v); }
@$pb.TagNumber(6)
$core.bool hasWithdrawTransaction() => $_has(5);
@$pb.TagNumber(6)
void clearWithdrawTransaction() => clearField(6);
@$pb.TagNumber(6)
WithdrawStake ensureWithdrawTransaction() => $_ensure(5);
@$pb.TagNumber(7)
CreateTokenAccount get createTokenAccountTransaction => $_getN(6);
@$pb.TagNumber(7)
set createTokenAccountTransaction(CreateTokenAccount v) { setField(7, v); }
@$pb.TagNumber(7)
$core.bool hasCreateTokenAccountTransaction() => $_has(6);
@$pb.TagNumber(7)
void clearCreateTokenAccountTransaction() => clearField(7);
@$pb.TagNumber(7)
CreateTokenAccount ensureCreateTokenAccountTransaction() => $_ensure(6);
@$pb.TagNumber(8)
TokenTransfer get tokenTransferTransaction => $_getN(7);
@$pb.TagNumber(8)
set tokenTransferTransaction(TokenTransfer v) { setField(8, v); }
@$pb.TagNumber(8)
$core.bool hasTokenTransferTransaction() => $_has(7);
@$pb.TagNumber(8)
void clearTokenTransferTransaction() => clearField(8);
@$pb.TagNumber(8)
TokenTransfer ensureTokenTransferTransaction() => $_ensure(7);
@$pb.TagNumber(9)
CreateAndTransferToken get createAndTransferTokenTransaction => $_getN(8);
@$pb.TagNumber(9)
set createAndTransferTokenTransaction(CreateAndTransferToken v) { setField(9, v); }
@$pb.TagNumber(9)
$core.bool hasCreateAndTransferTokenTransaction() => $_has(8);
@$pb.TagNumber(9)
void clearCreateAndTransferTokenTransaction() => clearField(9);
@$pb.TagNumber(9)
CreateAndTransferToken ensureCreateAndTransferTokenTransaction() => $_ensure(8);
}
class SigningOutput extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'SigningOutput', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TW.Solana.Proto'), createEmptyInstance: create)
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'encoded')
..hasRequiredFields = false
;
SigningOutput._() : super();
factory SigningOutput({
$core.String? encoded,
}) {
final _result = create();
if (encoded != null) {
_result.encoded = encoded;
}
return _result;
}
factory SigningOutput.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory SigningOutput.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
SigningOutput clone() => SigningOutput()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
SigningOutput copyWith(void Function(SigningOutput) updates) => super.copyWith((message) => updates(message as SigningOutput)) as SigningOutput; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static SigningOutput create() => SigningOutput._();
SigningOutput createEmptyInstance() => create();
static $pb.PbList<SigningOutput> createRepeated() => $pb.PbList<SigningOutput>();
@$core.pragma('dart2js:noInline')
static SigningOutput getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<SigningOutput>(create);
static SigningOutput? _defaultInstance;
@$pb.TagNumber(1)
$core.String get encoded => $_getSZ(0);
@$pb.TagNumber(1)
set encoded($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasEncoded() => $_has(0);
@$pb.TagNumber(1)
void clearEncoded() => clearField(1);
}
<|start_filename|>lib/dart_impl/tw_any_address_impl.dart<|end_filename|>
part of trust_wallet_core_ffi;
class TWAnyAddressImpl extends TWAnyAddress {
static bool isValid(String address, int coinType) {
final _address = TWStringImpl.toTWString(address);
final result = TWAnyAddress.TWAnyAddressIsValid(_address, coinType) >= 1;
TWStringImpl.delete(_address);
return result;
}
static Pointer<Void> createWithString(String address, int coinType) {
final _address = TWStringImpl.toTWString(address);
final result = TWAnyAddress.TWAnyAddressCreateWithString(_address, coinType);
TWStringImpl.delete(_address);
return result;
}
static Pointer<Void> createWithPublicKey(Pointer<Void> publicKey, int coinType) {
final result = TWAnyAddress.TWAnyAddressCreateWithPublicKey(publicKey, coinType);
return result;
}
static Uint8List data(Pointer<Void> anyAddress) {
final addressData = TWAnyAddress.TWAnyAddressData(anyAddress);
final result = TWData.TWDataBytes(addressData).asTypedList(TWData.TWDataSize(addressData));
return result;
}
static String description(Pointer<Void> anyAddress) {
final twString = TWAnyAddress.TWAnyAddressDescription(anyAddress);
return TWStringImpl.toDartString(twString);
}
}
| yitokenlabs/flutter_trust_wallet_core_lib_include |
<|start_filename|>dev-env/build.js<|end_filename|>
// Native
import fs from 'fs-extra';
import { exec } from 'child_process'
// npm
import clc from 'cli-color';
// package
import makeWebpackConfig from './webpack/config';
import webpackBuild from './webpack/build';
import Manifest from './manifest'
import * as paths from './paths'
// Clear release direcotry
fs.removeSync(paths.release)
fs.mkdirSync(paths.release)
// Create manifest
const manifest = new Manifest({manifest: paths.manifest, build: paths.build})
manifest.run()
// Build webpack
const webpackConfig = makeWebpackConfig(manifest)
const building = webpackBuild(webpackConfig)
building.then(() => {
console.error(clc.green("Building done"))
// Build extension
// TODO try detect system and Chrome path. Default is OSX :)
const chromeBinaryPath = process.env.CHROME_BIN || '/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome'
console.log(clc.yellow(`Packing extension into '${paths.build}'`))
exec(`\$('${chromeBinaryPath}' --pack-extension=${paths.build})`, (error, stdout, stderr) => {
console.log(clc.green('Done'));
if(stdout)
console.log(clc.yellow('stdout: ' + stdout));
if(stderr)
console.log(clc.red('stderr: ' + stderr));
if(error !== null)
console.log(clc.red('exec error: ' + error));
})
}).catch((reason) => {
console.error(clc.red("Building failed"))
console.error(clc.red(reason.stack))
})
<|start_filename|>dev-env/paths.js<|end_filename|>
import path from 'path'
export const root = path.normalize(path.join(__dirname, ".."))
export const packageJson = path.normalize(path.join(root, "package.json"))
export const src = path.normalize(path.join(root, "src"))
export const release = path.normalize(path.join(root, "release"))
export const build = process.env.NODE_ENV == "development"
? path.normalize(path.join(root, "build"))
: path.normalize(path.join(release, "build"))
export const manifest = path.normalize(path.join(src, "manifest.json"))
<|start_filename|>dev-env/webpack/build.js<|end_filename|>
'use strict';
var webpack = require('webpack');
module.exports = function(webpackConfig) {
return new Promise((resolve, reject) => {
webpack(webpackConfig, function(fatalError, stats) {
var jsonStats = stats.toJson();
// We can save jsonStats to be analyzed with
// http://webpack.github.io/analyse or
// https://github.com/robertknight/webpack-bundle-size-analyzer.
// var fs = require('fs');
// fs.writeFileSync('./bundle-stats.json', JSON.stringify(jsonStats));
var buildError = fatalError || jsonStats.errors[0] || jsonStats.warnings[0];
if (buildError) {
reject(buildError)
} else {
resolve(jsonStats)
}
})
})
}
| cafe4it/AliExport |
<|start_filename|>src/WelcomeView.lua<|end_filename|>
package.path = package.path .. ";/app/bin/elementarylua/?.lua;/app/share/lua/5.1/?.lua"
package.cpath = package.cpath .. ";/app/lib/lua/5.1/?.so"
local lgi = require 'lgi'
local Gtk = lgi.require('Gtk')
local Granite = lgi.require('Granite')
WelcomeView = {}
function WelcomeView.new()
local welcome = Granite.WidgetsWelcome {
title = "Welcome to ElementaryLua",
subtitle = "It's just a template"
}
welcome:append("network-workgroup", "Lua Official Page", "The Official Website of Lua.")
welcome:append ("applications-development", "LGI Bindings", "GTK Bindings for Lua.")
welcome:append ("distributor-logo", "Elementary Docs", "Documentation Guide for Devs.")
local welcome_context = welcome:get_style_context():remove_class("view")
function welcome:on_activated(index)
if index == 0 then
os.execute("xdg-open https://www.lua.org/")
elseif index == 1 then
os.execute("xdg-open https://github.com/pavouk/lgi")
elseif index == 2 then
os.execute("xdg-open https://docs.elementary.io/develop/")
end
end
return welcome
end
<|start_filename|>Makefile<|end_filename|>
.PHONY: all install uninstall
PREFIX ?= /app
install:
install -D -m 0755 com.github.jeysonflores.elementarylua $(PREFIX)/bin/com.github.jeysonflores.elementarylua
install -D -m 0644 src/Application.lua $(PREFIX)/bin/elementarylua/src/Application.lua
install -D -m 0644 src/MainWindow.lua $(PREFIX)/bin/elementarylua/src/MainWindow.lua
install -D -m 0644 src/WelcomeView.lua $(PREFIX)/bin/elementarylua/src/WelcomeView.lua
install -D -m 0644 data/com.github.jeysonflores.elementarylua.gschema.xml $(PREFIX)/share/glib-2.0/schemas/com.github.jeysonflores.elementarylua.gschema.xml
install -D -m 0644 data/com.github.jeysonflores.elementarylua.desktop $(PREFIX)/share/applications/com.github.jeysonflores.elementarylua.desktop
install -D -m 0644 data/assets/icons/com.github.jeysonflores.elementarylua.svg $(PREFIX)/share/icons/hicolor/scalable/apps/com.github.jeysonflores.elementarylua.svg
glib-compile-schemas $(PREFIX)/share/glib-2.0/schemas/
uninstall:
rm -f $(PREFIX)/bin/com.github.jeysonflores.elementarylua
rm -f $(PREFIX)/bin/elementarylua/src/Application.lua
rm -f $(PREFIX)/bin/elementarylua/src/MainWindow.lua
rm -f $(PREFIX)/bin/elementarylua/src/WelcomeView.lua
rm -f $(PREFIX)/share/glib-2.0/schemas/com.github.jeysonflores.elementarylua.gschema.xml
rm -f $(PREFIX)/share/icons/hicolor/scalable/apps/com.github.jeysonflores.elementarylua.svg
gtk-update-icon-cache $(PREFIX)/share/icons/hicolor/share/icons/hicolor
glib-compile-schemas $(PREFIX)/share/glib-2.0/schemas/
<|start_filename|>src/MainWindow.lua<|end_filename|>
package.path = package.path .. ";/app/bin/elementarylua/?.lua;/app/share/lua/5.1/?.lua"
package.cpath = package.cpath .. ";/app/lib/lua/5.1/?.so"
local lgi = require 'lgi'
local Gtk = lgi.require('Gtk')
local Handy = lgi.require('Handy')
local Gio = lgi.require('Gio')
require "src.WelcomeView"
MainWindow = {}
function MainWindow.new()
local title_button = Gtk.Button {
label = "ElementaryLua",
can_focus = false
}
local title_button_context = title_button:get_style_context():add_class("keycap")
local titlebar = Gtk.HeaderBar {
custom_title = title_button,
decoration_layout = "close:",
show_close_button = true
}
local titlebar_context = titlebar:get_style_context():add_class("flat")
local main_window = Gtk.Window {}
main_window:set_titlebar(titlebar)
main_window:add(WelcomeView.new())
local main_window_context = main_window:get_style_context():add_class("rounded")
function main_window:on_delete_event()
local settings = Gio.Settings {
schema_id = "com.github.jeysonflores.elementarylua"
}
local root_x, root_y = main_window:get_position()
local width, height = main_window:get_size()
settings:set_int("pos-x", root_x)
settings:set_int("pos-y", root_y)
settings:set_int("window-width", width)
settings:set_int("window-height", height)
Gtk:main_quit()
end
return main_window
end
<|start_filename|>src/Application.lua<|end_filename|>
package.path = package.path .. ";/app/bin/elementarylua/?.lua;/app/share/lua/5.1/?.lua"
package.cpath = package.cpath .. ";/app/lib/lua/5.1/?.so"
local lgi = require 'lgi'
local Gtk = lgi.require('Gtk')
local Gio = lgi.require('Gio')
local Granite = lgi.require('Granite')
local GLib = lgi.require('GLib')
require "src.MainWindow"
Application = {}
function Application.new()
local app = Gtk.Application {
application_id = "com.github.jeysonflores.elementarylua"
}
function app:on_activate()
local main_window = MainWindow.new()
main_window.application = self
local settings = Gio.Settings {
schema_id = "com.github.jeysonflores.elementarylua"
}
local granite_settings = Granite.Settings{}
local gtk_settings = Gtk.Settings:get_default()
if granite_settings.prefers_color_scheme == "DARK" then
gtk_settings.gtk_application_prefer_dark_theme = true
else
gtk_settings.gtk_application_prefer_dark_theme = false
end
granite_settings.on_notify["prefers-color-scheme"] = function(self, pspec)
if granite_settings.prefers_color_scheme == "DARK" then
gtk_settings.gtk_application_prefer_dark_theme = true
else
gtk_settings.gtk_application_prefer_dark_theme = false
end
end
main_window:resize(settings:get_int("window-width"), settings:get_int("window-height"))
main_window:move(settings:get_int("pos-x"), settings:get_int("pos-y"))
main_window:show_all()
end
return app
end
| JeysonFlores/ElementaryLua |
<|start_filename|>test/test.hs<|end_filename|>
import Control.Monad
import qualified Test.P.Applicative
import qualified Test.P.Bool
import qualified Test.P.Bifunctor.Trans
import qualified Test.P.Either
import qualified Test.P.Foldable
import qualified Test.P.Maybe
import qualified Test.P.Monoid
import qualified Test.P.Ord
import qualified Test.P.List
import qualified Test.P.Function
import System.Exit
import System.IO
main :: IO ()
main =
hSetBuffering stdout LineBuffering >> mapM id [
Test.P.Applicative.tests
, Test.P.Bifunctor.Trans.tests
, Test.P.Bool.tests
, Test.P.Either.tests
, Test.P.Foldable.tests
, Test.P.Maybe.tests
, Test.P.Monoid.tests
, Test.P.Ord.tests
, Test.P.List.tests
, Test.P.Function.tests
] >>= \rs -> when (not . all id $ rs) exitFailure
<|start_filename|>src/P/Maybe.hs<|end_filename|>
module P.Maybe (
fromMaybeM
, lazyMaybe'
, strictMaybe
, mcase
, mcase'
) where
import Control.Applicative
import P.Maybe.Strict
import Prelude
fromMaybeM :: Applicative f => f a -> Maybe a -> f a
fromMaybeM = flip maybe pure
strictMaybe :: Maybe a -> Maybe' a
strictMaybe Nothing = Nothing'
strictMaybe (Just x) = Just' x
lazyMaybe' :: Maybe' a -> Maybe a
lazyMaybe' Nothing' = Nothing
lazyMaybe' (Just' x) = Just x
mcase :: Maybe a -> b -> (a -> b) -> b
mcase m b = flip (maybe b) m
mcase' :: Maybe' a -> b -> (a -> b) -> b
mcase' m b = flip (maybe' b) m
<|start_filename|>src/P/Applicative.hs<|end_filename|>
module P.Applicative (
ApplicativeMonoid (..)
, valueOrEmpty
, emptyOrValue
, orEmpty
, eitherA
, (<<>>)
) where
import Control.Applicative
import Data.Semigroup
import Prelude
valueOrEmpty :: Alternative f => Bool -> a -> f a
valueOrEmpty b a = if b then pure a else empty
emptyOrValue :: Alternative f => Bool -> a -> f a
emptyOrValue = valueOrEmpty . not
orEmpty :: (Alternative f, Monoid a) => f a -> f a
orEmpty f = f <|> pure mempty
-- | Combine two alternatives.
eitherA :: (Alternative f) => f a -> f b -> f (Either a b)
eitherA a b = (Left <$> a) <|> (Right <$> b)
-- | Applicative mappend
(<<>>) :: (Semigroup a, Applicative f) => f a -> f a -> f a
(<<>>) = liftA2 (<>)
-- | wrapper for monoids in an applicative context
newtype ApplicativeMonoid m a =
ApplicativeMonoid { unApplicativeMonoid :: m a }
deriving (Show, Eq)
instance (Semigroup a, Applicative m) => Semigroup (ApplicativeMonoid m a) where
ApplicativeMonoid a <> ApplicativeMonoid b = ApplicativeMonoid (a <<>> b)
instance (Semigroup a, Monoid a, Applicative m) => Monoid (ApplicativeMonoid m a) where
mempty = ApplicativeMonoid (pure mempty)
mappend (ApplicativeMonoid a) (ApplicativeMonoid b) = ApplicativeMonoid (a <<>> b)
<|start_filename|>test/Test/P/Monoid.hs<|end_filename|>
{-# LANGUAGE TemplateHaskell #-}
module Test.P.Monoid where
import P.Monoid
import Test.QuickCheck
prop_valueOrZero_true :: [Int] -> Property
prop_valueOrZero_true a = valueOrZero True a === a
prop_valueOrZero_false :: [Int] -> Property
prop_valueOrZero_false a = valueOrZero False a === []
prop_valueOrZero_empty :: Bool -> Property
prop_valueOrZero_empty b = valueOrZero b "" === zeroOrValue b ""
prop_zeroOrValue :: Bool -> [Int] -> Property
prop_zeroOrValue b a = a /= [] ==> valueOrZero b a /= zeroOrValue b a
return []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>src/P/Bifunctor/Trans.hs<|end_filename|>
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE KindSignatures #-}
module P.Bifunctor.Trans (
BifunctorTrans(..)
) where
import Control.Monad.Trans.Except (ExceptT(..), runExceptT)
import qualified Control.Monad.Trans.Writer.Lazy as Lazy
import qualified Control.Monad.Trans.Writer.Strict as Strict
import Data.Either (Either(..))
import Data.Function ((.), id)
import Data.Functor (Functor(..))
------------------------------------------------------------------------
-- | You can define a 'BifunctorTrans' by either defining 'bimapT' or by
-- defining both 'firstT' and 'secondT'.
--
-- If you supply 'bimapT', you should ensure that:
--
-- @'bimapT' 'id' 'id' == 'id'@
--
-- If you supply 'first' and 'second', ensure:
--
-- @
-- 'firstT' 'id' == 'id'
-- 'secondT' 'id' == 'id'
-- @
--
-- If you supply both, you should also ensure:
--
-- @'bimapT' f g == 'firstT' f '.' 'secondT' g@
--
-- These ensure by parametricity:
--
-- @
-- 'bimapT' (f '.' g) (h '.' i) == 'bimapT' f h '.' 'bimapT' g i
-- 'firstT' (f '.' g) == 'firstT' f '.' 'firstT' g
-- 'secondT' (f '.' g) == 'secondT' f '.' 'secondT' g
-- @
class BifunctorTrans (t :: * -> (* -> *) -> * -> *) where
bimapT :: Functor f => (x -> y) -> (a -> b) -> t x f a -> t y f b
bimapT f g =
firstT f . secondT g
{-# INLINE bimapT #-}
firstT :: Functor f => (x -> y) -> t x f a -> t y f a
firstT f =
bimapT f id
{-# INLINE firstT #-}
secondT :: Functor f => (a -> b) -> t x f a -> t x f b
secondT =
bimapT id
{-# INLINE secondT #-}
{-# MINIMAL bimapT | firstT, secondT #-}
------------------------------------------------------------------------
instance BifunctorTrans ExceptT where
bimapT f g =
let h (Left x) = Left (f x)
h (Right a) = Right (g a)
{-# INLINE h #-}
in ExceptT . fmap h . runExceptT
{-# INLINE bimapT #-}
instance BifunctorTrans Lazy.WriterT where
bimapT f g =
let h (a, x) = (g a, f x)
{-# INLINE h #-}
in Lazy.WriterT . fmap h . Lazy.runWriterT
{-# INLINE bimapT #-}
instance BifunctorTrans Strict.WriterT where
bimapT f g =
let h (a, x) = (g a, f x)
{-# INLINE h #-}
in Strict.WriterT . fmap h . Strict.runWriterT
{-# INLINE bimapT #-}
<|start_filename|>src/P/Ord.hs<|end_filename|>
module P.Ord (
maxOn
, sortOn
) where
import Data.List (sortBy)
import Data.Ord (comparing)
maxOn :: (Ord o) => (a -> o) -> a -> a -> a
maxOn f x y = if f x > f y then x else y
sortOn :: (Ord o) => (a -> o) -> [a] -> [a]
sortOn = sortBy . comparing
<|start_filename|>src/P/Functor.hs<|end_filename|>
{-# LANGUAGE NoImplicitPrelude #-}
module P.Functor (
module X
, with
, (<$$>)
, fmap2
) where
import Data.Functor as X (Functor(..), ($>), (<$>), void)
import Data.Function((.))
with :: Functor f => f a -> (a -> b) -> f b
with xs f =
fmap f xs
{-# INLINE with #-}
(<$$>) :: (Functor g, Functor f) => (a -> b) -> f (g a) -> f (g b)
(<$$>) = fmap . fmap
{-# INLINE (<$$>) #-}
fmap2 :: (Functor g, Functor f) => (a -> b) -> f (g a) -> f (g b)
fmap2 = fmap . fmap
{-# INLINE fmap2 #-}
<|start_filename|>test/Test/P/List.hs<|end_filename|>
{-# LANGUAGE TemplateHaskell #-}
module Test.P.List where
import P.List
import Data.Function (on)
import qualified Data.List as L
import Data.Ord (comparing)
import Test.QuickCheck
import Test.QuickCheck.Function
prop_ordNub :: (Ord a, Show a) => [a] -> Property
prop_ordNub a =
ordNub a === L.nub a
prop_ordNubBy :: (Ord a, Show a) => [[a]] -> Property
prop_ordNubBy a =
ordNubBy (comparing length) a === L.nubBy ((==) `on` length) a
prop_sortNub :: (Ord a, Show a) => [a] -> Property
prop_sortNub a =
sortNub a === L.sort (L.nub a)
prop_lastMaybe :: (Eq a, Show a) => a -> [a] -> Property
prop_lastMaybe a l =
lastMaybe (l ++ [a]) === Just a
prop_lastMaybe_empty :: Property
prop_lastMaybe_empty =
lastMaybe [] === (Nothing :: Maybe ())
prop_count :: [Int] -> Fun Int Bool -> Property
prop_count list (Fun _ predicate) =
count predicate list === length (filter predicate list)
return []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>test/Test/P/Applicative.hs<|end_filename|>
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Test.P.Applicative where
import Control.Applicative ((<$>), pure)
import Data.Bool (Bool(..))
import Data.Function (($))
import Data.Functor.Identity (Identity(..))
import Data.Int (Int)
import Data.Maybe (Maybe(..))
import Data.Monoid (Sum(..))
import P.Applicative
import Prelude (Eq(..))
import System.IO (IO)
import Test.QuickCheck
import Test.QuickCheck.Property.Monoid (prop_Monoid, T(..))
import Test.QuickCheck.Property.Common (eq)
prop_valueOrEmpty_true :: Int -> Property
prop_valueOrEmpty_true a = valueOrEmpty True a === Just a
prop_valueOrEmpty_false :: Int -> Property
prop_valueOrEmpty_false a = valueOrEmpty False a === Nothing
prop_emptyOrValue :: Bool -> Int -> Bool
prop_emptyOrValue b a = (valueOrEmpty b a :: Maybe Int) /= emptyOrValue b a
prop_orEmpty :: Int -> Property
prop_orEmpty i =
let s = Sum i
in
(orEmpty (Just s) === Just s) .&&.
(orEmpty (Nothing :: Maybe (Sum Int)) === Just (Sum 0))
prop_applicative_monoid = eq $ prop_Monoid (T :: T (ApplicativeMonoid Identity (Sum Int)))
instance Arbitrary a => Arbitrary (Identity a) where
arbitrary = Identity <$> arbitrary
instance Arbitrary (m a) => Arbitrary (ApplicativeMonoid m a) where
arbitrary = ApplicativeMonoid <$> arbitrary
instance Arbitrary a => Arbitrary (Sum a) where
arbitrary = Sum <$> arbitrary
pure []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>src/P/List.hs<|end_filename|>
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
module P.List (
count
, ordNub
, ordNubBy
, sortNub
, lastMaybe
) where
import Data.Bool (Bool)
import Data.Eq (Eq(..))
import Data.Function ((.))
import Data.Int (Int)
import Data.List (reverse, length, filter)
import Data.List.NonEmpty ()
import Data.Maybe (Maybe, listToMaybe)
import Data.Ord (Ord(..), Ordering(..))
import qualified Data.Set as Set
-- | /O(n log n)/ Remove duplicate elements from a list.
--
-- Unlike 'Data.List.nub', this version requires 'Ord' and runs in
-- /O(n log n)/ instead of /O(n²)/. Like 'Data.List.nub' the output
-- order is identical to the input order.
-- See 'sortNub' for `nub` behaviour with sorted output.
--
-- > ordNub "foo bar baz" == "fo barz"
-- > ordNub [3,2,1,2,1] == [3,2,1]
-- > List.take 3 (ordNub [4,5,6,undefined]) == [4,5,6]
-- > ordNub xs == List.nub xs
--
ordNub :: Ord a => [a] -> [a]
ordNub =
ordNubBy compare
-- | /O(n log n)/ Behaves exactly like 'ordNub' except it uses a user-supplied
-- comparison function.
--
-- > ordNubBy (comparing length) ["foo","bar","quux"] == ["foo","quux"]
-- > ordNubBy (comparing fst) [("foo", 10),("foo", 20),("bar", 30)] == [("foo", 10),("bar", 30)]
--
ordNubBy :: (a -> a -> Ordering) -> [a] -> [a]
ordNubBy f =
let
loop seen = \case
[] ->
[]
x : xs ->
let
y =
UserOrd f x
in
if Set.member y seen then
loop seen xs
else
x : loop (Set.insert y seen) xs
in
loop Set.empty
-- | /O(n log n)/ Sort and remove duplicate elements from a list.
--
-- Unlike 'Data.List.nub', this version requires 'Ord' and runs in
-- /O(n log n)/ instead of /O(n²)/. The output list is returned in
-- sorted order.
--
-- > sortNub [3,2,1,2,1] == [1,2,3]
-- > sortNub xs == List.sort (List.nub xs)
--
sortNub :: Ord a => [a] -> [a]
sortNub = Set.toList . Set.fromList
lastMaybe :: [a] -> Maybe a
lastMaybe = listToMaybe . reverse
-- | count the number of elements satisfying a predicate in a list
count :: (a -> Bool) -> [a] -> Int
count predicate = length . filter predicate
--
-- Some machinery so we can use Data.Set with a custom comparator.
--
data UserOrd a =
UserOrd (a -> a -> Ordering) a
instance Eq (UserOrd a) where
(==) (UserOrd f x) (UserOrd _ y) =
f x y == EQ
instance Ord (UserOrd a) where
compare (UserOrd f x) (UserOrd _ y) =
f x y
<|start_filename|>test/Test/P/Maybe.hs<|end_filename|>
{-# LANGUAGE TemplateHaskell #-}
module Test.P.Maybe where
import P.Maybe
import Test.QuickCheck
prop_fromMaybeM_identity :: (Eq a, Show a) => Maybe a -> Property
prop_fromMaybeM_identity a = fromMaybeM Nothing a === a
prop_fromMaybeM_just :: (Eq a, Show a) => a -> Property
prop_fromMaybeM_just a = fromMaybeM (Left ()) (Just a) === Right a
prop_fromMaybeM_nothing :: (Eq a, Show a) => a -> Property
prop_fromMaybeM_nothing a = fromMaybeM (Left a) (Nothing :: Maybe ()) === Left a
return []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>src/P/Foldable.hs<|end_filename|>
{-# LANGUAGE NoImplicitPrelude #-}
module P.Foldable (
findMapM
, head
) where
import Control.Monad
import Data.Foldable
import Data.Function ((.))
import Data.Maybe
findMapM :: (Monad m, Foldable f) => (a -> m (Maybe b)) -> f a -> m (Maybe b)
findMapM f = foldr (\a a' -> f a >>= maybe a' (return . Just)) (return Nothing)
head :: (Foldable f) => f a -> Maybe a
head = foldr (\x _ -> return x) Nothing
<|start_filename|>test/Test/P/Bifunctor/Trans.hs<|end_filename|>
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Test.P.Bifunctor.Trans where
import Control.Monad.Trans.Except (ExceptT(..), runExceptT)
import qualified Control.Monad.Trans.Writer.Lazy as Lazy
import qualified Control.Monad.Trans.Writer.Strict as Strict
import Data.Tuple (swap)
import Test.QuickCheck
import Test.QuickCheck.Function
import Test.P.Bifunctor.Trans.Laws
prop_bifunctor_ExceptT x f g =
bifunctorLaws
ExceptT
runExceptT
(x :: Either Int Int)
(f :: Fun Int Int)
(g :: Fun Int Int)
prop_bifunctor_StrictWriterT x f g =
bifunctorLaws
(Strict.WriterT . fmap swap)
(fmap swap . Strict.runWriterT)
(x :: (Int, Int))
(f :: Fun Int Int)
(g :: Fun Int Int)
prop_bifunctor_LazyWriterT x f g =
bifunctorLaws
(Lazy.WriterT . fmap swap)
(fmap swap . Lazy.runWriterT)
(x :: (Int, Int))
(f :: Fun Int Int)
(g :: Fun Int Int)
return []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>test/Test/P/Function.hs<|end_filename|>
{-# LANGUAGE TemplateHaskell #-}
module Test.P.Function where
import P.Function
import Test.QuickCheck
prop_plus1 :: Int -> Property
prop_plus1 n = (n > 0) ==>
applyN n (+1) 0 === n
return []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>src/P/Bool.hs<|end_filename|>
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoImplicitPrelude #-}
module P.Bool (
andA
, orA
, whenM
, unlessM
, ifM
, guardM
#if __GLASGOW_HASKELL__ >= 708
, Data.Bool.bool
#else
, bool
#endif
) where
import Control.Applicative (Applicative, liftA2)
import Control.Monad (Monad(..), MonadPlus, (=<<), when, unless, guard)
import Data.Bool (Bool, (&&), (||))
import Data.Function (flip)
#if __GLASGOW_HASKELL__ >= 708
import qualified Data.Bool
#else
bool :: a -> a -> Bool -> a
bool f t p = if p then t else f
#endif
whenM :: Monad m => m Bool -> m () -> m ()
whenM p m =
p >>= flip when m
unlessM :: Monad m => m Bool -> m () -> m ()
unlessM p m =
p >>= flip unless m
ifM :: Monad m => m Bool -> m a -> m a -> m a
ifM p x y =
p >>= \b -> if b then x else y
guardM :: MonadPlus m => m Bool -> m ()
guardM f = guard =<< f
-- | Logical disjunction.
orA :: Applicative f => f Bool -> f Bool -> f Bool
orA =
liftA2 (||)
-- | Logical conjunction.
andA :: Applicative f => f Bool -> f Bool -> f Bool
andA =
liftA2 (&&)
infixl 8 `andA`, `orA`
<|start_filename|>test/Test/P/Either.hs<|end_filename|>
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.P.Either where
import Control.Monad (return)
import Data.Either (Either(..))
import qualified Data.Either as E
import Data.Function (($))
import Data.Maybe (Maybe(..))
import Prelude (String)
import System.IO (IO)
import P (Bool, Int)
import P.Either
import Test.QuickCheck
prop_maybeToLeft_just :: Int -> String -> Property
prop_maybeToLeft_just l r = maybeToLeft r (Just l) === Left l
prop_maybeToLeft_nothing :: Int -> Property
prop_maybeToLeft_nothing r = maybeToLeft r (Nothing :: Maybe String) === Right r
prop_maybeToRight_just :: Int -> String -> Property
prop_maybeToRight_just l r = maybeToRight l (Just r) === Right r
prop_maybeToRight_nothing :: Int -> Property
prop_maybeToRight_nothing l = maybeToRight l (Nothing :: Maybe String) === Left l
prop_rightToMaybe_right :: Int -> Property
prop_rightToMaybe_right r = rightToMaybe (Right r) === Just r
prop_rightToMaybe_left :: Int -> Property
prop_rightToMaybe_left r = rightToMaybe (Left r) === (Nothing :: Maybe String)
prop_leftToMaybe_right :: Int -> Property
prop_leftToMaybe_right r = leftToMaybe (Left r) === Just r
prop_leftToMaybe_left :: Int -> Property
prop_leftToMaybe_left r = leftToMaybe (Right r) === (Nothing :: Maybe String)
prop_right :: Int -> String -> Property
prop_right l r = maybeToRight l (rightToMaybe $ Right r) === Right r
prop_left :: Int -> String -> Property
prop_left l r = maybeToLeft r (leftToMaybe $ Left l) === Left l
prop_maybe :: Int -> String -> Property
prop_maybe l r = rightToMaybe (maybeToRight l $ Just r) === Just r
prop_nothing :: Int -> String -> Property
prop_nothing l r = leftToMaybe (maybeToLeft r $ Just l) === Just l
prop_flipEither :: Either Int String -> Property
prop_flipEither e@(Left x) = flipEither e === Right x
prop_flipEither e@(Right x) = flipEither e === Left x
prop_lefts_list :: [Either Int ()] -> Property
prop_lefts_list xs = lefts xs === E.lefts xs
prop_rights_list :: [Either () Int] -> Property
prop_rights_list xs = rights xs === E.rights xs
prop_partitionEithers_list :: [Either () Int] -> Property
prop_partitionEithers_list xs = partitionEithers xs === E.partitionEithers xs
return []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>test/Test/P/Ord.hs<|end_filename|>
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Test.P.Ord where
import P.Ord
import Data.List (sortBy)
import Data.Ord (comparing)
import Test.QuickCheck
prop_maxOn_id a b = maxOn id a b === max a b
prop_sortOn_id as = sortOn snd as === (sortBy . comparing $ snd) as
return []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>src/P/Either.hs<|end_filename|>
{-# LANGUAGE NoImplicitPrelude #-}
module P.Either (
maybeToLeft
, maybeToRight
, leftToMaybe
, lefts
, rightToMaybe
, rights
, ecase
, flipEither
, partitionEithers
) where
import Data.Either (Either(..), either)
import qualified Data.Either as Either
import Data.Foldable (Foldable, toList)
import Data.Function ((.), flip, const)
import Data.Maybe (Maybe(..), maybe)
maybeToLeft :: r -> Maybe l -> Either l r
maybeToLeft r = maybe (Right r) Left
maybeToRight :: l -> Maybe r -> Either l r
maybeToRight l = maybe (Left l) Right
leftToMaybe :: Either l r -> Maybe l
leftToMaybe = either Just (const Nothing)
rightToMaybe :: Either l r -> Maybe r
rightToMaybe = either (const Nothing) Just
ecase :: Either l r -> (l -> b) -> (r -> b) -> b
ecase e l = flip (either l) e
flipEither :: Either a b -> Either b a
flipEither = either Right Left
lefts :: (Foldable f) => f (Either a b) -> [a]
lefts =
Either.lefts . toList
{-# SPECIALIZE lefts :: [Either a b] -> [a] #-}
rights :: (Foldable f) => f (Either a b) -> [b]
rights =
Either.rights . toList
{-# SPECIALIZE rights :: [Either a b] -> [b] #-}
partitionEithers :: (Foldable f) => f (Either a b) -> ([a], [b])
partitionEithers =
Either.partitionEithers . toList
{-# SPECIALIZE partitionEithers :: [Either a b] -> ([a], [b]) #-}
<|start_filename|>test/Test/P/Bool.hs<|end_filename|>
{-# LANGUAGE TemplateHaskell #-}
module Test.P.Bool where
import Control.Monad.Trans.Writer
import P.Bool
import Test.QuickCheck
prop_bool :: Bool -> Int -> Int -> Property
prop_bool p x y =
(if p then x else y) === bool y x p
prop_whenM_positive :: Int -> Int -> Property
prop_whenM_positive x y =
execWriter (whenM (w True [x]) (w () [y])) === [x, y]
prop_whenM_negative :: Int -> Int -> Property
prop_whenM_negative x y =
execWriter (whenM (w False [x]) (w () [y])) === [x]
prop_unlessM_positive :: Int -> Int -> Property
prop_unlessM_positive x y =
execWriter (unlessM (w True [x]) (w () [y]))=== [x]
prop_unlessM_negative :: Int -> Int -> Property
prop_unlessM_negative x y =
execWriter (unlessM (w False [x]) (w () [y])) === [x, y]
prop_whenM_unlessM_exclusive :: Bool -> Int -> Property
prop_whenM_unlessM_exclusive p x =
(execWriter $ do
unlessM (w p []) (w () [x])
whenM (w p []) (w () [x])) === [x]
prop_ifM_positive :: Int -> Int -> Int -> Int -> Int -> Property
prop_ifM_positive x y z a b =
runWriter (ifM (w True [x]) (w a [y]) (w b [z])) === (a, [x, y])
prop_ifM_negative :: Int -> Int -> Int -> Int -> Int -> Property
prop_ifM_negative x y z a b =
runWriter (ifM (w False [x]) (w a [y]) (w b [z])) === (b, [x, z])
prop_ifM_exclusive :: Bool -> Int -> Int -> Int -> Property
prop_ifM_exclusive p x y a =
runWriter (ifM (w p [x]) (w a [y]) (w a [y])) === (a, [x, y])
w :: a -> w -> Writer w a
w = curry writer
return []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>src/P.hs<|end_filename|>
{-# LANGUAGE CPP #-}
module P (
module X
) where
import Prelude as X (
Enum
, Bounded
, minBound
, maxBound
, ($!)
, seq
)
import P.Applicative as X
import P.Bifunctor.Trans as X
import P.Bool as X
import P.Either as X
import P.Foldable as X
import P.Functor as X
import P.Maybe as X
import P.Maybe.Strict as X
import P.Monad as X
import P.Monoid as X
import P.Ord as X
import P.List as X
import P.Function as X
import P.Debug as X
import P.Show as X
import Control.Applicative as X (
Applicative(..)
, Alternative(..)
, Const(..)
, WrappedMonad(..)
, WrappedArrow(..)
, ZipList(..)
, (<**>)
, liftA
, liftA2
, liftA3
, optional
)
import Control.DeepSeq as X (
NFData(..)
, ($!!)
, deepseq
, force
)
import Data.Eq as X
import Data.Bifunctor as X (Bifunctor(..))
import Data.Bitraversable as X (
Bitraversable (..)
, bisequenceA
, bifor
)
import Data.Bifoldable as X (
Bifoldable (..)
, bisequence_
, bifor_
)
import Data.Bool as X
import Data.Char as X (Char)
import Data.List as X (
intercalate
, isPrefixOf
, drop
, splitAt
, break
, filter
, reverse
#if (__GLASGOW_HASKELL__ < 710)
, length
, null
#endif
)
import Data.List.NonEmpty as X (nonEmpty)
import Data.Maybe as X hiding (fromJust)
import Data.Either as X hiding (
lefts
, partitionEithers
, rights
)
import Data.Int as X
import Data.Ord as X
import Data.Tuple as X
import Data.Traversable as X
import Data.Text as X (Text)
import Data.Foldable as X hiding (
foldr1
, foldl1
, maximum
, maximumBy
, minimum
, minimumBy
)
import GHC.Num as X
import GHC.Real as X
import GHC.Float as X
import Text.Show as X
import Text.Read as X (Read, reads, readMaybe, readEither)
<|start_filename|>test/Test/P/Bifunctor/Trans/Laws.hs<|end_filename|>
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RankNTypes #-}
module Test.P.Bifunctor.Trans.Laws (
bifunctorLaws
) where
import Data.Functor.Identity (Identity(..))
import P.Bifunctor.Trans
import Test.QuickCheck
import Test.QuickCheck.Function
bifunctorLaws
:: forall
(p :: * -> * -> *)
(t :: * -> (* -> *) -> * -> *)
(f :: * -> *)
x y a b.
(BifunctorTrans t, Functor f)
=> (Eq (p x a), Show (p x a))
=> (Eq (p y b), Show (p y b))
=> (forall z c. Identity (p z c) -> t z f c)
-> (forall z c. t z f c -> Identity (p z c))
-> p x a
-> Fun x y
-> Fun a b
-> Property
bifunctorLaws mkT runT p (Fun _ f) (Fun _ g) =
conjoin [
functionEq mkT runT p (bimapT id id) id
, functionEq mkT runT p (firstT id) id
, functionEq mkT runT p (secondT id) id
, functionEq mkT runT p (bimapT f g) (firstT f . secondT g)
]
functionEq
:: (Eq q, Show q)
=> (Identity p -> t)
-> (s -> Identity q)
-> p
-> (t -> s)
-> (t -> s)
-> Property
functionEq mkT runT p f g =
runIdentity (runT (f (mkT (Identity p)))) ===
runIdentity (runT (g (mkT (Identity p))))
<|start_filename|>src/P/Function.hs<|end_filename|>
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoImplicitPrelude #-}
module P.Function (
-- * Prelude re-exports
id
, const
, (.)
, flip
, ($)
-- * Other combinators
, (&)
, fix
, on
-- * Extensions
, applyN
) where
import Data.Int (Int)
import Data.List (foldr, replicate)
import Data.Function (id, const, (.), flip, ($))
import Data.Function (fix, on)
#if (__GLASGOW_HASKELL__ >= 710)
import Data.Function ((&))
#else
infixl 1 &
-- | '&' is a reverse application operator. This provides notational
-- convenience. Its precedence is one higher than that of the forward
-- application operator '$', which allows '&' to be nested in '$'.
(&) :: a -> (a -> b) -> b
x & f =
f x
#endif
applyN :: Int -> (a -> a) -> a -> a
applyN n f =
foldr (.) id (replicate n f)
<|start_filename|>test/Test/P/Foldable.hs<|end_filename|>
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.P.Foldable where
import Control.Applicative (pure)
import Control.Monad.State (StateT(..), State, runState)
import Data.Bool (Bool)
import Data.Either (Either(..))
import Data.Function (($), const)
import Data.Int (Int)
import qualified Data.List as List
import Data.Maybe (Maybe(..))
import P.Foldable
import Prelude (Eq(..), Num(..))
import System.IO (IO)
import Test.QuickCheck
prop_findMapM_first :: Int -> [Int] -> Property
prop_findMapM_first x xs =
runState (findMapM found (x : xs)) 0 === (Just $ x * 2, 1)
prop_findMapM_last :: Int -> [Int] -> Property
prop_findMapM_last x xs = List.notElem x xs ==>
let f z = if z == x then found z else notfound
in runState (findMapM f (xs List.++ [x])) 0 === (Just $ x * 2, List.length xs + 1)
prop_findMapM_effects :: [Int] -> Property
prop_findMapM_effects xs =
runState (findMapM (const notfound) xs) 0 === (Nothing, List.length xs)
prop_head_either_left :: Int -> Property
prop_head_either_left r = head (Left r) === (Nothing :: Maybe ())
prop_head_either_right :: Int -> Property
prop_head_either_right r = head (Right r) === pure r
prop_head_list_nonempty :: Int -> [Int] -> Property
prop_head_list_nonempty x xs = head (x:xs) === pure x
prop_head_list_empty :: Property
prop_head_list_empty = head ([] :: [Int]) === Nothing
found :: Int -> State Int (Maybe Int)
found z = StateT $ \n -> pure (Just $ z * 2, n + 1)
notfound :: State Int (Maybe Int)
notfound = StateT $ \n -> pure (Nothing, n + 1)
pure []
tests :: IO Bool
tests = $quickCheckAll
<|start_filename|>src/P/Debug.hs<|end_filename|>
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoImplicitPrelude #-}
module P.Debug (
-- * Functions for development/debuggung only
-- | Cannot be used in production code, but might be useful
-- during development or debugging
undefined
, error
, trace
, traceM
, traceIO
) where
import qualified Prelude as P
import qualified Debug.Trace as T
#if MIN_VERSION_base(4,9,0)
import GHC.Stack (HasCallStack)
#endif
{-# WARNING undefined "Do not use 'undefined' in production code" #-}
#if MIN_VERSION_base(4,9,0)
undefined :: HasCallStack => a
#else
undefined :: a
#endif
undefined = P.undefined
{-# WARNING error "Do not use 'error' in production code" #-}
#if MIN_VERSION_base(4,9,0)
error :: HasCallStack => P.String -> a
#else
error :: P.String -> a
#endif
error = P.error
{-# WARNING trace "Do not use 'trace' in production code" #-}
trace :: P.String -> a -> a
trace = T.trace
{-# WARNING traceM "Do not use 'traceM' in production code" #-}
traceM :: P.Monad m => P.String -> m ()
traceM = T.traceM
{-# WARNING traceIO "Do not use 'traceIO' in production code" #-}
traceIO :: P.String -> P.IO ()
traceIO = T.traceIO
| tmcgilchrist/p |
<|start_filename|>materialdrawer/src/main/java/com/mikepenz/materialdrawer/util/DrawerUtils.kt<|end_filename|>
@file:JvmName("DrawerUtils")
package com.mikepenz.materialdrawer.util
import android.annotation.SuppressLint
import android.content.Context
import android.content.res.ColorStateList
import android.graphics.Color
import android.graphics.drawable.*
import android.os.Build
import android.view.Gravity
import android.view.View
import android.view.ViewGroup
import android.widget.LinearLayout
import android.widget.RelativeLayout
import androidx.annotation.AttrRes
import androidx.annotation.DimenRes
import androidx.appcompat.content.res.AppCompatResources
import androidx.core.graphics.drawable.DrawableCompat
import androidx.core.view.ViewCompat
import androidx.drawerlayout.widget.DrawerLayout
import com.google.android.material.shape.MaterialShapeDrawable
import com.google.android.material.shape.ShapeAppearanceModel
import com.mikepenz.materialdrawer.R
import com.mikepenz.materialdrawer.model.AbstractDrawerItem
import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem
import com.mikepenz.materialdrawer.widget.MaterialDrawerSliderView
/**
* helpful functions for working with the [MaterialDrawerSliderView]
*/
/**
* helper function to handle the onClick of the footer
*/
internal fun onFooterDrawerItemClick(sliderView: MaterialDrawerSliderView, drawerItem: IDrawerItem<*>, v: View, fireOnClick: Boolean?) {
val checkable = drawerItem.isSelectable
if (checkable) {
sliderView.resetStickyFooterSelection()
v.isActivated = true
v.isSelected = true
//remove the selection in the list
sliderView.selectExtension.deselect()
//find the position of the clicked footer item
if (sliderView.stickyFooterView != null && sliderView.stickyFooterView is LinearLayout) {
val footer = sliderView.stickyFooterView as LinearLayout
for (i in 0 until footer.childCount) {
if (footer.getChildAt(i) === v) {
sliderView.currentStickyFooterSelection = i
break
}
}
}
}
if (fireOnClick != null) {
var consumed = false
if (fireOnClick) {
if (drawerItem is AbstractDrawerItem<*, *> && drawerItem.onDrawerItemClickListener != null) {
consumed = drawerItem.onDrawerItemClickListener?.invoke(v, drawerItem, -1)
?: false
}
if (sliderView.onDrawerItemClickListener != null) {
consumed = sliderView.onDrawerItemClickListener?.invoke(v, drawerItem, -1)
?: false
}
}
if (!consumed) {
//close the drawer after click
sliderView.closeDrawerDelayed()
}
}
}
/**
* helper function to handle the headerView
*/
internal fun handleHeaderView(sliderView: MaterialDrawerSliderView) {
//use the AccountHeader if set
sliderView.accountHeader?.let {
if (sliderView.accountHeaderSticky) {
sliderView.stickyHeaderView = it
} else {
sliderView._headerDivider = it.dividerBelowHeader
sliderView._headerPadding = it.paddingBelowHeader
sliderView.headerView = it
}
}
//sticky header view
sliderView.stickyHeaderView?.let {
sliderView.findViewById<View>(R.id.material_drawer_sticky_header)?.let { header ->
sliderView.removeView(header)
}
//add the sticky footer view and align it to the bottom
val layoutParams = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT)
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_TOP, 1)
it.id = R.id.material_drawer_sticky_header
sliderView.addView(it, 0, layoutParams)
//now align the recyclerView below the stickyFooterView ;)
val layoutParamsListView = sliderView.recyclerView.layoutParams as RelativeLayout.LayoutParams
layoutParamsListView.addRule(RelativeLayout.BELOW, R.id.material_drawer_sticky_header)
sliderView.recyclerView.layoutParams = layoutParamsListView
if (sliderView.stickyHeaderShadow) {
//add a shadow
if (Build.VERSION.SDK_INT >= 21) {
it.background = ColorDrawable(Color.WHITE) // set a background color or the elevation will not work, this is meant to be
it.elevation = sliderView.context.resources.getDimensionPixelSize(R.dimen.material_drawer_sticky_header_elevation).toFloat()
} else {
val view = View(sliderView.context)
view.setBackgroundResource(R.drawable.material_drawer_shadow_bottom)
sliderView.addView(view, RelativeLayout.LayoutParams.MATCH_PARENT, sliderView.context.resources.getDimensionPixelSize(R.dimen.material_drawer_sticky_header_elevation))
//now align the shadow below the stickyHeader ;)
val lps = view.layoutParams as RelativeLayout.LayoutParams
lps.addRule(RelativeLayout.BELOW, R.id.material_drawer_sticky_header)
view.layoutParams = lps
}
}
if (Build.VERSION.SDK_INT >= 21) {
sliderView.elevation = 0f
}
//remove the padding of the recyclerView again we have the header on top of it
sliderView.recyclerView.setPadding(0, 0, 0, 0)
}
}
/**
* small helper to rebuild the FooterView
*/
internal fun rebuildStickyFooterView(sliderView: MaterialDrawerSliderView) {
sliderView.stickyFooterView?.let {
it.removeAllViews()
//create the divider
if (sliderView.stickyFooterDivider) {
addStickyFooterDivider(it.context, it)
}
//fill the footer with items
fillStickyDrawerItemFooter(sliderView, it) { v ->
(v.getTag(R.id.material_drawer_item) as? IDrawerItem<*>)?.let { drawerItem ->
onFooterDrawerItemClick(sliderView, drawerItem, v, true)
}
}
it.visibility = View.VISIBLE
} ?: run {
//there was no footer yet. now just create one
handleFooterView(sliderView) { v ->
(v.getTag(R.id.material_drawer_item) as? IDrawerItem<*>)?.let { drawerItem ->
onFooterDrawerItemClick(sliderView, drawerItem, v, true)
}
}
}
sliderView.setStickyFooterSelection(sliderView.currentStickyFooterSelection, false)
}
/**
* helper function to handle the footerView
*/
internal fun handleFooterView(sliderView: MaterialDrawerSliderView, onClickListener: View.OnClickListener) {
val ctx = sliderView.context
//use the StickyDrawerItems if set
if (sliderView.stickyDrawerItems.size > 0) {
sliderView._stickyFooterView = buildStickyDrawerItemFooter(sliderView, onClickListener)
}
//sticky footer view
sliderView.stickyFooterView?.let {
//add the sticky footer view and align it to the bottom
val layoutParams = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT)
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM, 1)
it.id = R.id.material_drawer_sticky_footer
sliderView.addView(it, layoutParams)
/**
if ((sliderView.mTranslucentNavigationBar || drawer.mFullscreen) && Build.VERSION.SDK_INT >= 19) {
it.setPadding(0, 0, 0, UIUtils.getNavigationBarHeight(ctx))
}
**/
//now align the recyclerView above the stickyFooterView ;)
val layoutParamsListView = sliderView.recyclerView.layoutParams as RelativeLayout.LayoutParams
layoutParamsListView.addRule(RelativeLayout.ABOVE, R.id.material_drawer_sticky_footer)
sliderView.recyclerView.layoutParams = layoutParamsListView
//handle shadow on top of the sticky footer
if (sliderView.stickyFooterShadow) {
sliderView.stickyFooterShadowView = View(ctx).also { stickyFooterShadowView ->
stickyFooterShadowView.setBackgroundResource(R.drawable.material_drawer_shadow_top)
sliderView.addView(stickyFooterShadowView, RelativeLayout.LayoutParams.MATCH_PARENT, ctx.resources.getDimensionPixelSize(R.dimen.material_drawer_sticky_footer_elevation))
//now align the shadow below the stickyHeader ;)
val lps = stickyFooterShadowView.layoutParams as RelativeLayout.LayoutParams
lps.addRule(RelativeLayout.ABOVE, R.id.material_drawer_sticky_footer)
stickyFooterShadowView.layoutParams = lps
}
}
//remove the padding of the recyclerView again we have the footer below it
sliderView.recyclerView.setPadding(sliderView.recyclerView.paddingLeft, sliderView.recyclerView.paddingTop, sliderView.recyclerView.paddingRight, ctx.resources.getDimensionPixelSize(R.dimen.material_drawer_padding))
}
}
/**
* build the sticky footer item view
*/
internal fun buildStickyDrawerItemFooter(sliderView: MaterialDrawerSliderView, onClickListener: View.OnClickListener): ViewGroup {
//create the container view
val linearLayout = LinearLayout(sliderView.context)
linearLayout.layoutParams = LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)
linearLayout.orientation = LinearLayout.VERTICAL
//set the background color to the drawer background color (if it has alpha the shadow won't be visible)
//linearLayout.background = sliderView.background
//create the divider
if (sliderView.stickyFooterDivider) {
addStickyFooterDivider(sliderView.context, linearLayout)
}
fillStickyDrawerItemFooter(sliderView, linearLayout, onClickListener)
return linearLayout
}
/**
* adds the shadow to the stickyFooter
*
* @param ctx
* @param footerView
*/
private fun addStickyFooterDivider(ctx: Context, footerView: ViewGroup) {
val divider = LinearLayout(ctx)
val dividerParams = LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)
divider.minimumHeight = ctx.resources.getDimensionPixelSize(R.dimen.material_drawer_sticky_footer_divider)
divider.orientation = LinearLayout.VERTICAL
divider.setBackgroundColor(ctx.getDividerColor())
footerView.addView(divider, dividerParams)
}
/**
* helper function to fill the sticky footer with its elements
*/
internal fun fillStickyDrawerItemFooter(sliderView: MaterialDrawerSliderView, container: ViewGroup, onClickListener: View.OnClickListener) {
//add all drawer items
for (drawerItem in sliderView.stickyDrawerItems) {
val view = drawerItem.generateView(container.context, container)
view.tag = drawerItem
if (drawerItem.isEnabled) {
//UIUtils.setBackground(view, UIUtils.getSelectableBackground(container.getContext(), selected_color, drawerItem.isSelectedBackgroundAnimated()));
view.setOnClickListener(onClickListener)
}
container.addView(view)
//for android API 17 --> Padding not applied via xml
setDrawerVerticalPadding(view)
}
//and really. don't ask about this. it won't set the padding if i don't set the padding for the container
container.setPadding(0, 0, 0, 0)
}
/**
* helper to extend the layoutParams of the drawer
*
* @param params
* @return
*/
@SuppressLint("RtlHardcoded")
fun processDrawerLayoutParams(drawer: MaterialDrawerSliderView, params: DrawerLayout.LayoutParams?): DrawerLayout.LayoutParams? {
if (params != null) {
val drawerLayout = drawer.drawerLayout ?: return null
val ctx = drawerLayout.context
val lp = drawerLayout.layoutParams as DrawerLayout.LayoutParams
if (lp.gravity == Gravity.RIGHT || lp.gravity == Gravity.END) {
params.rightMargin = 0
if (Build.VERSION.SDK_INT >= 17) {
params.marginEnd = 0
}
params.leftMargin = ctx.resources.getDimensionPixelSize(R.dimen.material_drawer_margin)
if (Build.VERSION.SDK_INT >= 17) {
params.marginEnd = ctx.resources.getDimensionPixelSize(R.dimen.material_drawer_margin)
}
}
val customWidth = drawer.customWidth ?: -1
if (customWidth > -1) {
params.width = customWidth
} else {
params.width = getOptimalDrawerWidth(ctx)
}
}
return params
}
/**
* helper function to get a person placeHolder drawable
*/
fun getPlaceHolder(context: Context): Drawable {
val accountDrawable = AppCompatResources.getDrawable(context, R.drawable.material_drawer_ico_account_layer) as LayerDrawable
val placeholderSize = context.resources.getDimensionPixelSize(R.dimen.material_drawer_profile_icon_placeholder)
if (Build.VERSION.SDK_INT >= 23) {
accountDrawable.setLayerWidth(0, placeholderSize)
accountDrawable.setLayerHeight(0, placeholderSize)
}
DrawableCompat.wrap(accountDrawable.getDrawable(0)).let {
DrawableCompat.setTint(it, context.getThemeColor(R.attr.colorPrimary))
accountDrawable.setDrawableByLayerId(R.id.background, it)
}
val iconSize = context.resources.getDimensionPixelSize(R.dimen.material_drawer_profile_icon_placeholder_icon)
if (Build.VERSION.SDK_INT >= 23) {
accountDrawable.setLayerWidth(1, iconSize)
accountDrawable.setLayerHeight(1, iconSize)
accountDrawable.setLayerGravity(1, Gravity.CENTER)
}
DrawableCompat.wrap(accountDrawable.getDrawable(1)).let {
DrawableCompat.setTint(it, context.getThemeColor(R.attr.colorAccent))
accountDrawable.setDrawableByLayerId(R.id.account, it)
}
return accountDrawable
//IconicsDrawable(ctx, MaterialDrawerFont.Icon.mdf_person).color(IconicsColor.colorInt(ctx.getThemeColor(R.attr.colorAccent))).backgroundColor(IconicsColor.colorInt(ctx.getThemeColor(R.attr.colorPrimary))).size(IconicsSize.dp(56)).padding(IconicsSize.dp(16))
}
/**
* helper to set the vertical padding to the DrawerItems
* this is required because on API Level 17 the padding is ignored which is set via the XML
*/
fun setDrawerVerticalPadding(view: View) {
val verticalPadding = view.context.resources.getDimensionPixelSize(R.dimen.material_drawer_vertical_padding)
view.setPadding(verticalPadding, 0, verticalPadding, 0)
}
/**
* Util method to theme the drawer item view's background (and foreground if possible)
*
* @param ctx the context to use
* @param view the view to theme
* @param selectedColor the selected color to use
* @param animate true if we want to animate the StateListDrawable
* @param shapeAppearanceModel defines the shape appearance to use for items starting API 21
* @param paddingTopBottomRes padding on top and bottom of the drawable for selection drawable
* @param paddingStartRes padding to the beginning of the selection drawable
* @param paddingEndRes padding to the end of the selection drawable
* @param highlightColorRes the color for the highlight to use (e.g. touch the item, when it get's filled)
*/
fun themeDrawerItem(
ctx: Context,
view: View,
selectedColor: Int,
animate: Boolean,
shapeAppearanceModel: ShapeAppearanceModel,
@DimenRes paddingTopBottomRes: Int = R.dimen.material_drawer_item_background_padding_top_bottom,
@DimenRes paddingStartRes: Int = R.dimen.material_drawer_item_background_padding_start,
@DimenRes paddingEndRes: Int = R.dimen.material_drawer_item_background_padding_end,
@AttrRes highlightColorRes: Int = R.attr.colorControlHighlight,
/* a hint for the drawable if it should already be selected at the very moment */
isSelected: Boolean = false
) {
val selected: Drawable
val unselected: Drawable
// Material 2.0 styling
val paddingTopBottom = ctx.resources.getDimensionPixelSize(paddingTopBottomRes)
val paddingStart = ctx.resources.getDimensionPixelSize(paddingStartRes)
val paddingEnd = ctx.resources.getDimensionPixelSize(paddingEndRes)
// define normal selected background
val gradientDrawable = MaterialShapeDrawable(shapeAppearanceModel)
gradientDrawable.fillColor = ColorStateList.valueOf(selectedColor)
selected = InsetDrawable(gradientDrawable, paddingStart, paddingTopBottom, paddingEnd, paddingTopBottom)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// define mask for ripple
val gradientMask = MaterialShapeDrawable(shapeAppearanceModel)
gradientMask.fillColor = ColorStateList.valueOf(Color.BLACK)
val mask = InsetDrawable(gradientMask, paddingStart, paddingTopBottom, paddingEnd, paddingTopBottom)
unselected = RippleDrawable(ColorStateList(arrayOf(intArrayOf()), intArrayOf(ctx.getThemeColor(highlightColorRes))), null, mask)
} else {
// define touch drawable
val touchDrawable = MaterialShapeDrawable(shapeAppearanceModel)
touchDrawable.fillColor = ColorStateList.valueOf(ctx.getThemeColor(highlightColorRes))
val touchInsetDrawable = InsetDrawable(touchDrawable, paddingStart, paddingTopBottom, paddingEnd, paddingTopBottom)
val unselectedStates = StateListDrawable()
//if possible and wanted we enable animating across states
if (animate) {
val duration = ctx.resources.getInteger(android.R.integer.config_shortAnimTime)
unselectedStates.setEnterFadeDuration(duration)
unselectedStates.setExitFadeDuration(duration)
}
unselectedStates.addState(intArrayOf(android.R.attr.state_pressed), touchInsetDrawable)
unselectedStates.addState(intArrayOf(), ColorDrawable(Color.TRANSPARENT))
unselected = unselectedStates
}
val states = StateListDrawable()
//if possible and wanted we enable animating across states
if (animate) {
val duration = ctx.resources.getInteger(android.R.integer.config_shortAnimTime)
states.setEnterFadeDuration(duration)
states.setExitFadeDuration(duration)
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
states.addState(intArrayOf(android.R.attr.state_selected), selected)
states.addState(intArrayOf(), ColorDrawable(Color.TRANSPARENT))
ViewCompat.setBackground(view, states)
view.foreground = unselected
} else {
states.addState(intArrayOf(android.R.attr.state_selected), selected)
states.addState(intArrayOf(), unselected)
ViewCompat.setBackground(view, states)
}
if (isSelected && animate) {
states.state = intArrayOf(android.R.attr.state_selected)
states.jumpToCurrentState()
}
}
/**
* helper to create a stateListDrawable for the icon
*/
internal fun getIconStateList(icon: Drawable, selectedIcon: Drawable): StateListDrawable {
val iconStateListDrawable = StateListDrawable()
iconStateListDrawable.addState(intArrayOf(android.R.attr.state_selected), selectedIcon)
iconStateListDrawable.addState(intArrayOf(), icon)
return iconStateListDrawable
}
/**
* helper to calculate the optimal drawer width
*/
fun getOptimalDrawerWidth(context: Context): Int {
val possibleMinDrawerWidth = context.getScreenWidth() - context.getActionBarHeight()
val maxDrawerWidth = context.resources.getDimensionPixelSize(R.dimen.material_drawer_width)
return possibleMinDrawerWidth.coerceAtMost(maxDrawerWidth)
} | mikepenz/MaterialDrawer |
<|start_filename|>src/stridedview.jl<|end_filename|>
# StridedView
struct StridedView{T,N,A<:DenseArray,F<:Union{FN,FC,FA,FT}} <: AbstractStridedView{T,N,F}
parent::A
size::NTuple{N,Int}
strides::NTuple{N,Int}
offset::Int
op::F
end
function StridedView(parent::Array{S},
size::NTuple{N,Int} = size(parent),
strides::NTuple{N,Int} = strides(parent),
offset::Int = 0,
op::F = identity) where {S, N, F}
T = Base.promote_op(op, S)
# reshape array to vector in order to reduce number of element types
StridedView{T,N,Vector{S},F}(reshape(parent, length(parent)), size,
_normalizestrides(size, strides), offset, op)
end
function StridedView(parent::A,
size::NTuple{N,Int} = size(parent),
strides::NTuple{N,Int} = strides(parent),
offset::Int = 0,
op::F = identity) where {A<:DenseArray, N, F}
T = Base.promote_op(op, eltype(parent))
StridedView{T,N,A,F}(parent, size, strides, offset, op)
end
StridedView(a::StridedView) = a
StridedView(a::Adjoint) = StridedView(a')'
StridedView(a::Transpose) = transpose(StridedView(transpose(a)))
StridedView(a::Base.SubArray) = sview(StridedView(a.parent), a.indices...)
StridedView(a::Base.ReshapedArray) = sreshape(StridedView(a.parent), a.dims)
# Methods for StridedView
Base.size(a::StridedView) = a.size
Base.strides(a::StridedView) = a.strides
Base.stride(a::StridedView{<:Any, 0}, n::Int) = 1
Base.stride(a::StridedView{<:Any, N}, n::Int) where N =
(n <= N) ? a.strides[n] : a.strides[N]*a.size[N]
offset(a::StridedView) = a.offset
Base.parent(a::StridedView) = a.parent
function blasstrides(a::StridedView{<:Any,2})
# canonialize strides to make compatible with gemm
if size(a, 2) == 1 && stride(a, 1) == 1
return StridedView(a.parent, a.size, (1, size(a,1)), a.offset, a.op)
else
return a
end
end
Base.similar(a::StridedView, ::Type{T}, dims::NTuple{N,Int}) where {N,T} =
StridedView(similar(a.parent, T, dims))
Base.copy(a::StridedView) = copyto!(similar(a), a)
Base.unsafe_convert(::Type{Ptr{T}}, a::StridedView{T}) where T =
pointer(a.parent, a.offset+1)
Base.dataids(a::StridedView) = Base.dataids(a.parent)
Base.IndexStyle(::Type{<:StridedView}) = Base.IndexCartesian()
# Indexing with N integer arguments
@inline function Base.getindex(a::StridedView{<:Any,N}, I::Vararg{Int,N}) where N
@boundscheck checkbounds(a, I...)
@inbounds r = a.op(a.parent[a.offset+_computeind(I, a.strides)])
return r
end
@inline function Base.setindex!(a::StridedView{<:Any,N}, v, I::Vararg{Int,N}) where N
@boundscheck checkbounds(a, I...)
@inbounds a.parent[a.offset+_computeind(I, a.strides)] = a.op(v)
return a
end
# force inlining so that view typically does not need to be created
@inline function Base.getindex(a::StridedView{<:Any,N},
I::Vararg{SliceIndex,N}) where N
StridedView(a.parent, _computeviewsize(a.size, I), _computeviewstrides(a.strides, I),
a.offset + _computeviewoffset(a.strides, I), a.op)
end
@propagate_inbounds Base.getindex(a::StridedView, I::ParentIndex) =
a.op(getindex(a.parent, I.i))
@propagate_inbounds Base.setindex!(a::StridedView, v, I::ParentIndex) =
(setindex!(a.parent, a.op(v), I.i); return a)
# Specialized methods for `StridedView` which produce views/share data
Base.conj(a::StridedView{<:Real}) = a
Base.conj(a::StridedView) = StridedView(a.parent, a.size, a.strides, a.offset, _conj(a.op))
@inline function Base.permutedims(a::StridedView{<:Any,N}, p) where {N}
_isperm(N, p) || throw(ArgumentError("Invalid permutation of length $N: $p"))
newsize = TupleTools._permute(a.size, p)
newstrides = TupleTools._permute(a.strides, p)
return StridedView(a.parent, newsize, newstrides, a.offset, a.op)
end
_isperm(N::Integer, p::AbstractVector) = (length(p) == N && isperm(p))
_isperm(N::Integer, p::NTuple{M,Integer}) where M = (M == N && TupleTools.isperm(p))
_isperm(N::Integer, p) = false
LinearAlgebra.transpose(a::StridedView{<:Number,2}) = permutedims(a, (2,1))
LinearAlgebra.adjoint(a::StridedView{<:Number,2}) = permutedims(conj(a), (2,1))
LinearAlgebra.adjoint(a::StridedView{<:Any,2}) = # act recursively, like Base
permutedims(StridedView(a.parent, a.size, a.strides, a.offset, _adjoint(a.op)), (2,1))
LinearAlgebra.transpose(a::StridedView{<:Any,2}) = # act recursively, like Base
permutedims(StridedView(a.parent, a.size, a.strides, a.offset, _transpose(a.op)), (2,1))
Base.map(::FC, a::StridedView{<:Real}) = a
Base.map(::FT, a::StridedView{<:Number}) = a
Base.map(::FA, a::StridedView{<:Number}) = conj(a)
Base.map(::FC, a::StridedView) =
StridedView(a.parent, a.size, a.strides, a.offset, _conj(a.op))
Base.map(::FT, a::StridedView) =
StridedView(a.parent, a.size, a.strides, a.offset, _transpose(a.op))
Base.map(::FA, a::StridedView) =
StridedView(a.parent, a.size, a.strides, a.offset, _adjoint(a.op))
@inline function sreshape(a::StridedView, newsize::Dims)
if any(isequal(0), newsize)
any(isequal(0), size(a)) || throw(DimensionMismatch())
newstrides = one.(newsize)
else
newstrides = _computereshapestrides(newsize, _simplify(size(a), strides(a))...)
end
StridedView(a.parent, newsize, newstrides, a.offset, a.op)
end
| amilsted/Strided.jl |
<|start_filename|>lithophane.js<|end_filename|>
#!/usr/bin/env node
// generic utilities
// takes an RGB value and converts it to a single grayscale value
var convertToGrayscale = function(r,g,b) {
return (r * 0.2989) + (g * 0.5870) + (b * 0.1140)
}
// allows you to normalize numbers on a scale
// in this case, we take grayscale values from 0 to 255
// and scale them to be from 0 to 15.
//
// Ideally, you could write your own function here to tweak the
// grayscale values as needed if a linear approach doesn't work
// when the lithophanes are printed.
//
var normalizeValue = function (val, from, to) {
return (to-1) - Math.floor(val * to / from);
}
// image processing functions
// function that takes an ndarray of pixel data and converts it to a
// simpler array of data that represents the physical height of the values.
//
var getImageHeights = function(d) {
var result = {
data: []
};
// GIFs pass in an extra parameter for frames which
// we're ignoring, but it requires us to use different
// values for grabbing the data. Other than that, the
// logic is the same
if (d.dimension == 4) {
result.width = d.shape[1]
result.height = d.shape[2]
for (var i=0; i<result.width; i++) {
result.data.push([]);
// for each pixel, get its grayscale value, normalize it from 0-15
// and add it to the array
for (var j=0; j<result.height; j++) {
var g = convertToGrayscale(d.get(0,i,j,0), d.get(0,i,j,1), d.get(0,i,j,2));
result.data[i][j] = normalizeValue(g, 256, 16);
}
}
} else {
result.width = d.shape[0]
result.height = d.shape[1]
for (var i=0; i<result.width; i++) {
result.data.push([]);
// for each pixel, get its grayscale value, normalize it from 0-15
// and add it to the array
for (var j=0; j<result.height; j++) {
var g = convertToGrayscale(d.get(i,j,0), d.get(i,j,1), d.get(i,j,2));
result.data[i][j] = normalizeValue(g, 256, 16);
}
}
}
return result;
}
var getModelAreas = function(d) {
// these values are based on settings in the 3D printer
var border = 1.0
var base = 0.4 // means the model will always have a 0.4mm base
var scale = 0.2 // each of the 10 layers will be 0.2 mm in height
var zenith = base + (scale*10);
var maxWidth = scale*d.width;
var maxHeight = scale*d.height;
var areas = [];
for (var w=0; w<d.width; w++) {
for (var h=0; h<d.height; h++) {
// for each pixel, you're creating a box that's as high as its grayscale
// value. Rather than make tens of thousands of boxes and union them all
// we're individually mapping each of the faces of the resulting object
var x0 = w*scale;
var x1 = x0 + scale;
var y0 = h*scale;
var y1 = y0 + scale;
var z0 = 0;
var z1 = parseFloat((base + (scale * d.data[w][h])).toFixed(1));
// back face (bottom of the model)
areas.push([[x0,y0,z0],[x1,y0,z0],[x1,y1,z0],[x0,y1,z0]])
// front face
areas.push([[x0,y0,z1],[x0,y1,z1],[x1,y1,z1],[x1,y0,z1]])
// top border wall
if (h == 0) {
var t = [];
t.push([x1,y0,z0])
t.push([x0,y0,z0]);
if (w > 0) {
if (d.data[w][h] > d.data[w-1][h]) {
t.push([x0, y0, (scale * d.data[w-1][h]) ])
}
}
t.push([x0,y0,z1])
t.push([x1,y0,z1])
if (w<(d.width-1)) {
if (d.data[w][h] > d.data[w+1][h]) {
t.push([x1, y0, (scale * d.data[w+1][h]) ])
}
}
areas.push(t);
}
// left border wall
if (w == 0) {
var t = [];
t.push([x0,y1,z1])
t.push([x0,y0,z1]);
if (h > 0) {
if (d.data[w][h] > d.data[w][h-1]) {
t.push([x0, y0, (scale * d.data[w][h-1]) ])
}
}
t.push([x0,y0,z0])
t.push([x0,y1,z0])
if (h<(d.height-1)) {
if (d.data[w][h] > d.data[w][h+1]) {
t.push([x0, y1, (scale * d.data[w][h+1]) ])
}
}
areas.push(t);
}
// bottom face of each pixel
if (h == (d.height-1)) {
// if we're on the last row, treat it like a border
var t = [];
t.push([x1,y1,z1])
t.push([x0,y1,z1]);
if (w > 0) {
if (d.data[w][h] > d.data[w-1][h]) {
t.push([x0, y1, (scale * d.data[w-1][h]) ])
}
}
t.push([x0,y1,z0])
t.push([x1,y1,z0])
if (w<(d.width-1)) {
if (d.data[w][h] > d.data[w+1][h]) {
t.push([x1, y1, (scale * d.data[w+1][h]) ])
}
}
areas.push(t);
} else {
// just connect it to the next pixel
if (d.data[w][h] != d.data[w][h+1]) {
var z2 = base + (scale * d.data[w][h+1])
areas.push([[x1, y1, z1], [x0,y1,z1], [x0,y1,z2], [x1,y1,z2]])
}
}
// right face of each pixel
if (w == (d.width-1)) {
// if we're on the last row, make it a solid right border
var t = [];
t.push([x1,y1,z0])
t.push([x1,y0,z0]);
if (h > 0) {
if (d.data[w][h] > d.data[w][h-1]) {
t.push([x1, y1, (scale * d.data[w][h-1]) ])
}
}
t.push([x1,y0,z1])
t.push([x1,y1,z1])
if (h<(d.height-1)) {
if (d.data[w][h] > d.data[w][h+1]) {
t.push([x1, y1, (scale * d.data[w][h+1]) ])
}
}
areas.push(t);
} else {
// just connect it to the next pixel
if (d.data[w][h] != d.data[w+1][h] ) {
var z2 = base + (scale * d.data[w+1][h])
areas.push([[x1, y0, z1], [x1, y1, z1], [x1, y1, z2], [x1, y0, z2] ])
}
}
}
}
return areas;
}
// takes an array of point arrays and converts them into sets of triangles
//
// TODO: right now I only have this hardcoded for flat areas containing 3-7
// points around the perimeter, so this should probably be re-written as
// a more generic algorithm that can take arrays of n points and convert to
// triangles
//
var areasToTriangles = function(areas) {
triangles = [];
var routes = [
0,
0,
0,
[[0,1,2]],
[[0,1,2], [0,2,3]],
[[0,1,4], [1,2,4], [2,3,4]],
[[0,1,2], [2,3,5], [3,4,5], [5,0,2]]
]
for (var a in areas) {
var l = areas[a].length;
if ((l >= 3) && (l <= 6)) {
for (var i in routes[l]) {
triangles.push([
areas[a][routes[l][i][0]],
areas[a][routes[l][i][1]],
areas[a][routes[l][i][2]]
]);
}
}
}
return triangles;
}
// takes an array of triangles and writes them to a file
// using the standard STL ASCII format
var createASCIISTL = function (triangles) {
var str = "solid lithograph\n"
for (var i in triangles) {
str += " facet normal 0.0 0.0 0.0\n"
str += " outer loop\n"
str += " vertex " + triangles[i][0][0] + " " + triangles[i][0][1] + " " + triangles[i][0][2] + "\n"
str += " vertex " + triangles[i][1][0] + " " + triangles[i][1][1] + " " + triangles[i][1][2] + "\n"
str += " vertex " + triangles[i][2][0] + " " + triangles[i][2][1] + " " + triangles[i][2][2] + "\n"
str += " endloop\n"
str += " endfacet\n"
}
str += "endsolid"
var stream = fs.createWriteStream(program.outputFile, { flags : 'w' })
stream.write(str)
}
// takes an array of triangles and writes them to a file
// using the standard STL binary format
var createBinarySTL = function (triangles) {
var buffLength = 84 + (50 * triangles.length)
var b = new Buffer(buffLength)
// these 80 bytes are always ignored so you can put
// whatever string you want in this space
b.write('NodeJS Binary STL Writer', 0)
b.writeUInt32LE(triangles.length, 80);
var offset = 84
for (var i in triangles) {
b.writeFloatLE(0.0, offset);
b.writeFloatLE(0.0, offset+4);
b.writeFloatLE(0.0, offset+8);
b.writeFloatLE(triangles[i][0][0], offset+12);
b.writeFloatLE(triangles[i][0][1], offset+16);
b.writeFloatLE(triangles[i][0][2], offset+20);
b.writeFloatLE(triangles[i][1][0], offset+24);
b.writeFloatLE(triangles[i][1][1], offset+28);
b.writeFloatLE(triangles[i][1][2], offset+32);
b.writeFloatLE(triangles[i][2][0], offset+36);
b.writeFloatLE(triangles[i][2][1], offset+40);
b.writeFloatLE(triangles[i][2][2], offset+44);
b.writeUInt16LE(0, offset+48);
offset += 50
}
var stream = fs.createWriteStream(program.outputFile, { flags : 'w' });
stream.write(b)
}
// Process the image
var processImage = function(err, pixels) {
if(err) {
console.log("Couldn't find that image. Is the path correct?")
return
}
// convert the image into an array of heights representing grayscale values
var heightData = getImageHeights(pixels);
// convert those heights into a series of 3D rects in space
var areas = getModelAreas(heightData);
// parse those rects into triangles for rendering
var triangles = areasToTriangles(areas);
// output the triangles into STL format
if (program.ascii) {
createASCIISTL(triangles);
} else {
createBinarySTL(triangles);
}
}
//
//
// Main Program flow
//
//
// import required libraries
// used to write the STL file locally
var fs = require('fs');
// used to read the pixel information from the images
var getPixels = require('get-pixels');
// used to make this a command line utility
var program = require('commander');
// set the command-line options
program
.version('0.0.1')
.option('-i, --image [path]', 'Path to image file (required)')
.option('-o, --output-file [path]', 'STL output file (defaults to lithophane.stl)', String, 'lithophane.stl')
.option('-a, --ascii', 'Export STL as ASCII instead of binary')
.parse(process.argv);
// process the image if it exists
if (!program.image) {
console.log("You must include an image path as a parameter. See ./lithophane.js -h for more details.")
} else {
getPixels(program.image, processImage)
}
| wubbahed/lithophane |
<|start_filename|>src/ContentBlocks/BlockQuote/BlockQuotePreview.js<|end_filename|>
import React, { PropTypes } from 'react';
import Box from 'grommet/components/Box';
import BlockQuote from './BlockQuote';
export default function BlockQuotePreview ({ content, source }) {
return (
<Box>
<BlockQuote content={content} source={source} />
</Box>
);
};
BlockQuotePreview.propTypes = {
content: PropTypes.string
};
<|start_filename|>src/ContentBlocks/BlockCarouselWithContent/BlockCarouselWithContentPreview.js<|end_filename|>
import React, { PropTypes } from 'react';
import Box from 'grommet/components/Box';
import Carousel from 'grommet/components/Carousel';
import Image from 'grommet/components/Image';
export default function BlockCarouselPreview ({ carousel }) {
const slides = carousel.map((slide, index) =>
<Box key={`slide-${index}`} full="horizontal">
<Image src={slide.image} full="horizontal" />
</Box>
);
return (
<Box colorIndex="light-1" direction="row" pad={{ between: 'medium' }}
full="horizontal">
<Carousel>
{slides}
</Carousel>
</Box>
);
};
BlockCarouselPreview.propTypes = {
carousel: PropTypes.array
};
<|start_filename|>src/ContentBlocks/BlockCard/BlockCardPreview.js<|end_filename|>
import React, { PropTypes } from 'react';
import Anchor from 'grommet/components/Anchor';
import Box from 'grommet/components/Box';
import Card from 'grommet/components/Card';
import Markdown from 'grommet/components/Markdown';
export default function BlockCardPreview ({ content, image, card }) {
const { heading, label, linkText } = card;
return (
<Box colorIndex="light-1" direction="row" pad={{ between: 'medium' }}>
<Box>
<Card
colorIndex="light-2"
thumbnail={image.path}
heading={heading}
label={label}
link={
<Anchor href="#" label={linkText} primary={true} />
}
/>
</Box>
<Box>
<Markdown content={content} components={{
'p': { 'props': { 'margin': 'none' } }
}}/>
</Box>
</Box>
);
};
BlockCardPreview.propTypes = {
content: PropTypes.string,
image: PropTypes.string
};
| karatechops/brand-central-content-blocks |
<|start_filename|>buildScripts/copyExamples.js<|end_filename|>
'use strict';
const cwd = process.cwd(),
fs = require('fs-extra'),
path = require('path'),
examplesPath = path.join(cwd, 'examples'),
startDate = new Date(),
srcPath = [
'../node_modules/neo.mjs/src/',
'../../node_modules/neo.mjs/src/',
'../../../node_modules/neo.mjs/src/',
'../../../../node_modules/neo.mjs/src/',
'../../../../../node_modules/neo.mjs/src/'
],
srcRegex = [
/..\/src\//gi,
/..\/..\/src\//gi,
/..\/..\/..\/src\//gi,
/..\/..\/..\/..\/src\//gi,
/..\/..\/..\/..\/..\/src\//gi
];
// copy the examples folder
fs.mkdirpSync(examplesPath);
fs.copySync(path.join(cwd, 'node_modules/neo.mjs/examples'), examplesPath);
const isFile = fileName => {
return fs.lstatSync(fileName).isFile()
};
const parseFolder = (folderPath, index) => {
let content, i, itemPath, prefix;
fs.readdirSync(folderPath).forEach(itemName => {
itemPath = path.join(folderPath, itemName);
if (isFile(itemPath)) {
if (itemName === 'neo-config.json') {
content = require(itemPath);
prefix = '';
for (i=0; i < index; i++) {
prefix += '../'
}
Object.assign(content, {
appPath : prefix + content.appPath,
mainPath : '../node_modules/neo.mjs/src/Main.mjs',
workerBasePath: `${prefix}../node_modules/neo.mjs/src/worker/`
});
fs.writeFileSync(itemPath, JSON.stringify(content, null, 4));
} else if (itemName.endsWith('.mjs')) {
content = fs.readFileSync(itemPath, 'utf8').replace(srcRegex[index], srcPath[index]);
fs.writeFileSync(itemPath, content, 'utf8');
}
} else {
parseFolder(itemPath, index + 1);
}
});
};
parseFolder(examplesPath, 0);
const processTime = (Math.round((new Date - startDate) * 100) / 100000).toFixed(2);
console.log(`Total time: ${processTime}s`);
process.exit();
| neomjs/theming-demo |
<|start_filename|>jest-config/jest-global-teardown.js<|end_filename|>
const { teardown: teardownDevServer } = require('jest-dev-server');
module.exports = async function globalTeardown() {
// Stop server when all test complete
// try {
// await teardownDevServer();
// console.log('Stop server when all test-case was completed!');
// } catch (error) {
// console.log(`Can't stop server!`);
// console.log(error);
// }
};
<|start_filename|>jest-config/jest-global-setup.js<|end_filename|>
const { setup: setupDevServer } = require('jest-dev-server');
module.exports = async function globalSetup() {
// Start a sever before run test
// try {
// await setupDevServer({
// command: `node ./bin/www`,
// launchTimeout: 50000,
// port: 3000,
// });
// console.log('Start server before run test!');
// } catch (error) {
// console.log(`Can't start server!`);
// console.log(error);
// }
};
<|start_filename|>webpack.config.js<|end_filename|>
var Webpack = require('webpack');
var Path = require('path');
var MiniCssExtractPlugin = require("mini-css-extract-plugin");
var ExtractTextPlugin = require('extract-text-webpack-plugin');
// `CheckerPlugin` is optional. Use it if you want async error reporting.
// We need this plugin to detect a `--watch` mode. It may be removed later
// after https://github.com/webpack/webpack/issues/3460 will be resolved.
// var CheckerPlugin = require('awesome-typescript-loader')
var TARGET_PATH = __dirname + '/build';
var config = {
dev: {
name: 'React Client Package',
entry: {
'guest': './src/client/guest',
'member': './src/client/member',
'admin': './src/client/admin'
},
// watch: true,
output: {
filename: '[name].js',
path: Path.resolve(TARGET_PATH + '/src/public/js')
},
// Enable sourcemaps for debugging webpack's output.
devtool: 'source-map',
watch: true,
mode: 'development',
module: {
rules: [{
// All files with a '.ts' or '.tsx' extension will be handled by 'awesome-typescript-loader'.
test: /\.tsx?$/,
loader: "awesome-typescript-loader"
}, {
// All output '.js' files will have any sourcemaps re-processed by 'source-map-loader'.
// { enforce: "pre", test: /\.js$/, loader: "source-map-loader" },
test: /\.css$/,
loader: ExtractTextPlugin.extract('style-loader', 'css-loader?modules&importLoaders=1&localIdentName=[name]__[local]___[hash:base64:5]!postcss-loader')
}, {
test: /\.svg$/,
loader: "url-loader?limit=10000&mimetype=image/svg+xml"
}]
},
resolve: {
// modulesDirectories: ['node_modules', 'components'],
extensions: ['.webpack.js', '.web.js', '.ts', '.tsx', '.js', '.jsx']
},
plugins: [
new Webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV)
}),
// new webpack.NoErrorsPlugin(),
// new CheckerPlugin(),
],
// When importing a module whose path matches one of the following, just
// assume a corresponding global variable exists and use that instead.
// This is important because it allows us to avoid bundling all of our
// dependencies, which allows browsers to cache those libraries between builds.
externals: {
// "react": "React",
// "react-dom": "ReactDOM"
},
},
//
// Production Setting
//
production: {
name: 'React Client Package',
entry: {
'guest': ['@babel/polyfill', './src/polyfills.js', './src/client/guest'],
'member': ['@babel/polyfill', './src/polyfills.js', './src/client/member'],
'admin': ['@babel/polyfill', './src/polyfills.js', './src/client/admin'],
},
// watch: true,
output: {
filename: '[name].js',
path: Path.resolve(TARGET_PATH + '/src/public/js')
},
// Enable sourcemaps for debugging webpack's output.
watch: false,
mode: 'production',
module: {
rules: [{
test: /\.tsx?$/,
loader: "awesome-typescript-loader"
},
{
test: /\.(sa|sc|c)ss$/,
use: [
MiniCssExtractPlugin.loader,
'css-loader',
'postcss-loader',
'sass-loader',
]
}, {
test: /\.js$/,
use: [{
loader: 'babel-loader',
options: {
presets: ['@babel/preset-env']
}
}],
// exclude: [
// Path.resolve(__dirname, './node_modules'),
// Path.resolve(__dirname, './node_modules/@material-ui/'),
// Path.resolve(__dirname, './node_modules/lodash/'),
// Path.resolve(__dirname, './node_modules/react'),
// Path.resolve(__dirname, './node_modules/jss'),
// Path.resolve(__dirname, './node_modules/css-vendor'),
// Path.resolve(__dirname, './node_modules/react-text-mask'),
// ]
exclude: {
include: Path.resolve(__dirname, './node_modules/'),
exclude: [
Path.resolve(__dirname, './node_modules/aws-appsync/'),
Path.resolve(__dirname, './node_modules/query-string/')
]
},
}
]
},
resolve: {
// modulesDirectories: ['node_modules', 'components'],
extensions: ['.webpack.js', '.web.js', '.ts', '.tsx', '.js', '.jsx'],
modules: [
Path.resolve(__dirname, 'src'),
'node_modules'
]
},
plugins: [
new Webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify('production')
}),
// new webpack.NoErrorsPlugin(),
// new MiniCssExtractPlugin({
// filename: "[name].css",
// chunkFilename: "[id].css"
// }),
// new ExtractTextPlugin({
// filename: 'hoge-style.css',
// allChunks: true
// }),
],
// When importing a module whose path matches one of the following, just
// assume a corresponding global variable exists and use that instead.
// This is important because it allows us to avoid bundling all of our
// dependencies, which allows browsers to cache those libraries between builds.
externals: {
// "react": "React",
// "react-dom": "ReactDOM"
},
}
};
module.exports = config;
<|start_filename|>src/client/localization/ja/guest.json<|end_filename|>
{
"userMessagesUnread": "こんにちは <1>{{displayName}}</1>, あなたは {{count}} 個の未読メッセージがあります。",
"currentDate": "現在の日付 : {{date, YYYY/MM/DD}}",
"Goemon": "五右衛門"
}
<|start_filename|>src/polyfills.js<|end_filename|>
// require('babel-polyfill');
// fetch() polyfill
//require('whatwg-fetch');
require('isomorphic-fetch');
// DOM4 ployfill
// require('dom4');
// URLSearchParams polyfill
// if (typeof URLSearchParams === 'undefined') {
// window.URLSearchParams = require('url-search-params');
// }
<|start_filename|>src/client/localization/en/guest.json<|end_filename|>
{
"userMessagesUnread": "Hello <1>{{displayName}}</1>, you have {{count}} unread message..",
"currentDate": "CurrentDate : {{date, MM/DD/YYYY}}",
"Goemon": "Goemon"
}
<|start_filename|>jest-config/jest-setup-test-framework.js<|end_filename|>
// Increase Timeout in async function for circleCI build
jest.setTimeout(30000);
<|start_filename|>gulpfile.js<|end_filename|>
/*jshint esversion: 6 */
var del = require('del'),
gulp = require('gulp'),
nodemon = require('gulp-nodemon'),
plumber = require('gulp-plumber'),
sass = require('gulp-sass'),
cssmin = require('gulp-cssmin'),
webpack = require('webpack'),
gulpWebpack = require('webpack-stream'),
webpackConfig = require('./webpack.config.js'),
browserSync = require('browser-sync').create(),
notifier = require('node-notifier'),
eslint = require('gulp-eslint'),
tslint = require('gulp-tslint'),
jest = require('@jest/core');
var PRODUCT = JSON.parse(process.env.PROD_ENV || '0');
var tslintconfig = require('./tslint.json');
var targetPath = './build';
var exec = require('child_process').exec;
//
// copy-asserts
//
gulp.task('copy-assets', () => {
return gulp.src(
['src/public/**/*', '!src/public/**/*.scss', 'src/views/**/*', 'config/**/*'], {
base: './src'
}
).pipe(gulp.dest(targetPath));
});
//
// css
//
gulp.task('css', () => {
if (PRODUCT == 'production') {
return gulp.src('./src/public/css/**/*.scss')
.pipe(plumber())
.pipe(sass())
.pipe(cssmin())
.pipe(gulp.dest('./build/public/css'));
} else {
return gulp.src('./src/public/css/**/*.scss')
.pipe(plumber())
.pipe(sass())
.pipe(gulp.dest('./build/public/css'));
}
});
//
// lint
//
gulp.task("lint", () => {
return gulp.src(["src/**/*.ts", "!src/**/*.d.ts"])
.pipe(tslint(tslintconfig))
.pipe(tslint.report());
});
//
// webpack
//
gulp.task('webpack', () => {
return gulp.src('build')
.pipe(plumber({
errorHandler: (error) => {
notifier.notify({
message: error.message,
title: error.plugin,
sound: 'Glass'
});
}
}))
.pipe(gulpWebpack(Object.assign({}, webpackConfig.dev, {
watch: false,
}), webpack))
.pipe(gulp.dest('build/public/js'))
.pipe(browserSync.stream());
});
//
// webpack:production
//
gulp.task('webpack:production', () => {
return gulp.src('build')
.pipe(plumber({
errorHandler: (error) => {
notifier.notify({
message: error.message,
title: error.plugin,
sound: 'Glass'
});
}
}))
.pipe(gulpWebpack(Object.assign({}, webpackConfig.production, {
watch: false,
}), webpack))
.pipe(gulp.dest('build/public/js'));
});
//
// Clean
//
gulp.task('clean', del.bind(null, ['.tmp', 'dist', 'build', 'coverage']));
//
// tcs
//
gulp.task('tsc', function (cb) {
exec('node ./node_modules/typescript/bin/tsc', function (err, stdout, stderr) {
console.log(stdout);
console.log(stderr);
cb(err);
return;
});
});
//
// nodemon
//
gulp.task('nodemon', (callback) => {
var called = false;
return nodemon({
verbose: false,
script: './bin/www',
delay: "2500",
ext: 'js html css ejs ico txt pdf json',
ignore: [
'build/client/*',
'build/public/*',
'build/__test__/*',
'*.test.ts',
'*.test.js',
'*.ts',
'*.tsx',
'*.json',
'node_modules'
]
})
.on('start', () => {
if (!called) {
called = true;
setTimeout(() => {
browserSync.init(null, {
proxy: 'http://localhost:3000',
port: 7000
});
}, 4000);
callback();
}
console.log('nodemon started.');
})
.on('restart', (hoge) => {
console.log('nodemon restarting... by ' + hoge);
// when server reboot
setTimeout(() => {
browserSync.reload();
}, 3000);
})
.on('crash', function () {
console.error('Application has crashed!\n');
process.exit();
})
.once('quit', function () {
console.error('Application has stopped\n');
process.exit();
});
});
//
// start
//
gulp.task('start', gulp.series('nodemon'));
//
// browser-reload
//
gulp.task('browser-reload', () => {
return browserSync.reload();
});
//
// build
//
gulp.task('build', gulp.series(gulp.parallel('copy-assets', 'tsc', 'css', 'lint'), 'webpack'));
//
// rebuild
//
gulp.task('rebuild', gulp.series('clean', 'build'));
//
// build:production
//
gulp.task('build:production', gulp.series('clean', 'copy-assets', 'tsc', 'css', 'lint', 'webpack:production'));
//
// webpack:watch
//
gulp.task('webpack:watch', () => {
return gulp.src('build')
.pipe(gulpWebpack(Object.assign({}, webpackConfig.dev, {
watch: true,
}), webpack))
.pipe(gulp.dest('build/public/js'))
.pipe(browserSync.stream());
});
// Watchs
gulp.task('watch', (done) => {
gulp.watch('./src/public/css/*.scss')
.on('change', function (path) {
gulp.src(path)
.pipe(plumber())
.pipe(sass())
.pipe(gulp.dest('./build/public/css'));
console.log('File(scss) ' + path + ' was changed');
});
gulp.watch(
['./src/**', '!./src/client/**/*', '!./src/public/css/*', '!./src/**/*.test.ts'], gulp.series('tsc', 'copy-assets'))
.on('change', function (path) {
console.log('File(ts) ' + path + ' was changed');
});
gulp.watch('./src/**/*.test.ts', gulp.series('tsc', 'test'))
.on('change', function (path) {
console.log('File(test) ' + path + ' was changed');
});
gulp.watch('./build/public/css/*.css', gulp.series('browser-reload'))
.on('change', function (path) {
console.log('File(css) ' + path + ' was changed');
});
done();
});
//
// test
//
gulp.task('test', () => {
return jest.runCLI({}, [__dirname]);
});
//
// test:watch
//
gulp.task('test:watch', () => {
return jest.runCLI({
watch: true,
testRegex: "(/__tests__/.*|(\\.|/)(test|spec))\\.(tsx?)$",
}, [__dirname]);
});
//
// test:watchall
//
gulp.task('test:ts:watchall', () => {
return jest.runCLI({
watchAll: true,
testRegex: "(/__tests__/.*|(\\.|/)(test|spec))\\.(tsx?)$",
}, [__dirname]);
});
//
// jslint
//
gulp.task('jslint', () => {
return gulp.src(['./src/**/*.js', './src/**/*.jsx'])
.pipe(eslint({
useEslintrc: true
}))
.pipe(eslint.format())
.pipe(eslint.failAfterError());
});
//
// develop
//
gulp.task('develop',
gulp.series(gulp.parallel('copy-assets', 'tsc', 'css', 'lint'),
'watch', 'webpack', 'test', 'start', 'webpack:watch'));
//
// Default task
//
gulp.task('default', gulp.series('develop'));
<|start_filename|>jest-config/jest-prepare-binary.js<|end_filename|>
const { MongoMemoryServer } = require('mongodb-memory-server');
const path = require('path');
const downloadDir = path.join(process.cwd(), '.cache');
const mongoServer = new MongoMemoryServer({
binary: {
downloadDir,
}
});
console.log('Download mongodb binary');
mongoServer.getConnectionString().then(url => {
console.log(url);
process.exit();
}).catch(err => {
console.log(err);
process.exit();
});
| lunascape/hestia |
<|start_filename|>src/assetbundles/matrixmate/dist/css/MatrixMate.css<|end_filename|>
/**
* MatrixMate plugin for Craft CMS
*
* MatrixMate CSS
*
* @author Værsågod
* @copyright Copyright (c) 2019 Værsågod
* @link https://vaersaagod.no
* @package MatrixMate
* @since 1.0.0
*/
.matrixmate-buttons:first-child {
margin-top: 0 !important;
}
.matrixmate-buttons:last-child {
margin-bottom: 0 !important;
}
.matrixmate-buttons {
position: relative;
margin: 15px 0 24px 0;
min-height: 30px;
}
.matrixmate-buttons:before {
content: '.';
display: block;
height: 0;
clear: both;
visibility: hidden;
margin-bottom: -5px;
}
body.ltr .matrixmate-buttons > .btn, body.ltr .matrixmate-buttons > .btngroup {
float: left;
}
body.rtl .matrixmate-buttons > .btn, body.rtl .matrixmate-buttons > .btngroup {
float: right;
}
body.ltr .matrixmate-buttons > .btn, body.ltr .matrixmate-buttons > .btngroup {
margin: 0px 5px 0 0;
}
body.rtl .matrixmate-buttons > .btn, body.rtl .matrixmate-buttons > .btngroup {
margin: 0px 0 0 5px;
}
.matrixmate-buttons > .btn:first, .matrixmate-buttons > .btngroup:first {
margin-top: 5px !important;
}
.border-box + .matrixmate-buttons, .shadow-box + .matrixmate-buttons {
margin-top: 7px;
}
.matrix-field:not(.matrixmate-inited) > .buttons,
.matrix-field:not(.matrixmate-inited) > .blocks > .matrixblock > .fields {
visibility: hidden !important;
}
.field .field .matrix-field > .buttons,
.field .field .matrix-field > .blocks > .matrixblock > .fields {
visibility: visible !important;
}
.matrixmate-block-inited.collapsed.draghelper .matrixmate-tabs,
.matrixmate-block-inited.collapsed.draghelper .matrixmate-fields,
.matrixmate-block-inited.velocity-animating .matrixmate-tabs,
.matrixmate-block-inited.velocity-animating .matrixmate-fields,
.matrixmate-block-inited.collapsed .matrixmate-tabs,
.matrixmate-block-inited.collapsed .matrixmate-fields {
-webkit-opacity: 0 !important;
-moz-opacity: 0 !important;
opacity: 0 !important;
}
.matrixmate-fields .flex-fields > :before {
content: none !important;
}
.matrixmate-block-inited.collapsed .matrixmate-tabs {
pointer-events: none;
}
.matrixmate-block-inited .matrixmate-tabs {
position: absolute;
top: 0;
right: 76px;
display: block;
font-size: 0;
}
.matrixmate-block-inited .matrixmate-tabs li {
display: inline-block;
font-size: 13px;
}
.matrixmate-block-inited .matrixmate-tabs li a {
display: block;
padding: 5px 10px;
color: currentColor;
white-space: nowrap;
opacity: 0.65;
border-left: 1px solid transparent;
border-right: 1px solid transparent;
border-bottom: 1px solid transparent;
}
.matrixmate-block-inited .matrixmate-tabs li a:not(.sel) {
background-color: transparent !important;
}
.matrixmate-block-inited .matrixmate-tabs li a:hover {
text-decoration: none;
opacity: 0.9;
}
.matrixmate-block-inited .matrixmate-tabs li a.sel {
color: currentColor;
opacity: 1;
background-color: inherit;
border-left: 1px solid #e3e5e8;
border-right: 1px solid #e3e5e8;
position: relative;
}
.matrixmate-block-inited.disabled .matrixmate-tabs {
right: 106px;
}
.matrixmate-block-inited.matrixmate-has-tabs > .titlebar {
border-bottom: 1px solid #E4E5E8;
}
.matrixmate-settings-menu h6 {
margin-top: 10px;
margin-bottom: 0;
padding-left: 10px;
}
body.ltr .matrixmate-settings-menu a.fieldtoggle:before {
left: -12px;
}
body.rtl .matrixmate-settings-menu a.fieldtoggle:before {
right: -12px;
}
.matrixmate-collapsed-menu ul {
padding-top: 5px;
}
html.noscroll .matrixmate-collapsed-menu,
html.noscroll .matrixmate-settings-menu {
max-height: 50%;
overflow-y: auto;
overflow-x: hidden;
}
.matrixmate-menu {
overflow: hidden;
max-height: none !important;
}
| timrosskamp/matrixmate |
<|start_filename|>src/index.js<|end_filename|>
/**
*
* @authors zx.wang (<EMAIL>)
* @date 2017-01-24 00:35:14
* @version $Id$
*/
function plugin(Vue) {
if (plugin.installed) {
return;
}
Object.defineProperties(Vue.prototype, {
$crumbs: {
get() {
// 获取路由信息
const route = this.$route;
const router = this.$router;
const currentpath = route.path.replace(/\//g, '');
let parent = route.meta.parent;
const crumbs = [];
const routesArr = [];
routesArr.push(route);
// 递归查找父节点路径,查找对应的路由信息
while (parent && parent.length > 0) {
let matched = router.match(parent);
routesArr.unshift(matched);
parent = matched.meta.parent || '';
}
// 循环组装面包屑url及名称
for (let i = 0; i < routesArr.length; i++) {
const breadcrumbs = routesArr[i].meta.breadcrumb;
if (breadcrumbs && breadcrumbs.length > 0) {
for (let j = 0; j < breadcrumbs.length; j++) {
const crumbspath = (breadcrumbs[j].url || routesArr[i].path).replace(/\//g, '');
const hidden = breadcrumbs[j].hidden;
if (currentpath === crumbspath && !!hidden) {
break;
}
crumbs.push(breadcrumbs[j]);
}
}
}
return crumbs;
}
}
});
Vue.component('breadcrumb', {
template: '<ul class="breadcrumb" v-if="$crumbs.length">' +
'<li v-for="(crumb, i) in $crumbs">' +
'<router-link v-if="i < $crumbs.length-1" :to="{ path: crumb.url }">{{ crumb.name }}</router-link>' +
'<a v-else>{{ crumb.name }}</a>' +
'</li>' +
'</ul>'
});
}
if (typeof window !== 'undefined' && window.Vue) {
window.Vue.use(plugin);
}
export default plugin;
<|start_filename|>bower.json<|end_filename|>
{
"name": "vue-crumbs",
"main": "dist/vue-crumbs.js",
"version": "1.0.0",
"description": "a simple and useful breadcrumbs for vue2.js",
"homepage": "https://github.com/jypblue/vue-crumbs#readme",
"license": "MIT",
"keywords": [
"breadcrumbs",
"vue",
"vue2.js"
],
"ignore": [
".*",
"build",
"example",
"package.json"
]
}
<|start_filename|>build/build.js<|end_filename|>
/**
*
* @authors zx.wang (<EMAIL>)
* @date 2017-01-23 23:52:38
* @version $Id$
*/
const fs = require('fs');
const rollup = require('rollup');
const uglify = require('uglify-js');
const babel = require('rollup-plugin-babel');
const alias = require('rollup-plugin-alias')
const package = require('../package.json');
const banner =
"/*!\n" +
" * vue-crumbs v" + package.version + "\n" +
" * https://github.com/jypblue/vue-crumbs\n" +
" * Released under the MIT License. \n" +
" */\n";
rollup.rollup({
entry: 'src/main.js',
plugins: [
alias({
'vue$': 'vue/dist/vue.esm.js'
}),
babel({
presets: 'es2015-loose-rollup'
})
]
}).then(function(bundle) {
return write('dist/vue-crumbs.js', bundle.generate({
format: 'umd',
banner: banner,
moduleName: 'VueCrumbs'
}).code, bundle);
}).then(function(bundle) {
return write('dist/vue-crumbs.min.js', banner + '\n' + uglify.minify('dist/vue-crumbs.js').code, bundle);
})
.then(function(bundle) {
return write('dist/vue-crumbs.es2015.js', bundle.generate({
banner: banner
}).code, bundle);
})
.then(function(bundle) {
return write('dist/vue-crumbs.common.js', bundle.generate({
format: 'cjs',
banner: banner
}).code, bundle);
})
.catch(logError);
function write(dest, code, bundle) {
return new Promise(function(resolve, reject) {
fs.writeFile(dest, code, function(err) {
if (err) return reject(err);
console.log(blue(dest) + ' ' + getSize(code));
resolve(bundle);
});
});
}
function getSize(code) {
return (code.length / 1024).toFixed(2) + 'kb';
}
function logError(e) {
console.log(e);
}
function blue(str) {
return '\x1b[1m\x1b[34m' + str + '\x1b[39m\x1b[22m';
}
<|start_filename|>src/breadcrumb.js<|end_filename|>
/**
*
* @authors zx.wang (<EMAIL>)
* @date 2017-01-24 16:22:31
* @version $Id$
* @describe out import crumbs.vue file
*/
import breadcrumb from './breadcrumb.vue';
const install = function(Vue) {
if (install.installed) return;
Vue.component('breadcrumb', breadcrumb);
};
if (typeof window !== 'undefined' && window.Vue) {
install(window.Vue);
}
export default {
version: '1.1.0',
install,
};
<|start_filename|>dist/vue-crumbs.js<|end_filename|>
/*!
* vue-crumbs v1.2.0
* https://github.com/jypblue/vue-crumbs
* Released under the MIT License.
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.VueCrumbs = factory());
}(this, (function () { 'use strict';
/**
*
* @authors zx.wang (<EMAIL>)
* @date 2017-05-25 16:35:14
* @version 1.2.0
*/
function plugin(Vue) {
if (plugin.installed) {
return;
}
Vue.component('breadcrumb', {
props: {
mode: {
type: String,
default: 'mix' // name // url
},
rightIcon: {
type: String,
default: 'arrow-right'
}
},
data: function () {
return {
crumbs: [],
currentPathStr: this.$route.path.replace(/\//g, '')
};
},
methods: {
fnUrlCrumbsArr: function () {
var routesArr = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : [];
var currentPathStr = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : "";
var crumbsArr = [];
for (var i = 0; i < routesArr.length; i++) {
var breadcrumb = routesArr[i].meta.breadcrumb;
if (!!breadcrumb) {
for (var j = 0; j < breadcrumb.length; j++) {
var crumbsPathStr = (breadcrumb[j].url || routesArr[i].path).replace(/\//g, '');
var hidden = breadcrumb[j].hidden;
if (currentPathStr === crumbsPathStr && !!hidden) {
break;
}
crumbsArr.push(breadcrumb[j]);
}
}
}
return crumbsArr;
},
fnRoutesArr: function () {
var routesArr = [];
routesArr.push(this.$route);
var parent = this.$route.meta.parent;
while (!!parent) {
var matched = this.$router.match(parent);
routesArr.unshift(matched);
parent = matched.meta.parent;
}
return routesArr;
},
getUrlModeCrumbs: function () {
var routesArr = this.fnRoutesArr();
return this.fnUrlCrumbsArr(routesArr, this.currentPathStr);
},
fnNameCrumbsArr: function (routesNameArr, currentPathStr) {
var crumbsArr = [];
for (var i = 0; i < routesNameArr.length; i++) {
var metaCrumbs = routesNameArr[i].route.meta.breadcrumb;
metaCrumbs = Array.isArray(metaCrumbs) ? metaCrumbs[0] : metaCrumbs;
if (!!metaCrumbs) {
var breadcrumb = {
url: routesNameArr[i].route.path,
icon: metaCrumbs.icon,
name: metaCrumbs.name
};
var crumbsPathStr = breadcrumb.url.replace(/\//g, '');
var hidden = breadcrumb.hidden;
if (currentPathStr === crumbsPathStr && !!hidden) {
break;
}
crumbsArr.push(breadcrumb);
}
}
return crumbsArr;
},
fnRoutesNameArr: function () {
var routesNameArr = [];
var currentMatched = this.$router.resolve({ 'name': this.$route.name, 'params': this.$route.params });
routesNameArr.push(currentMatched);
var parent = this.$route.meta.parent;
while (!!parent) {
var matched = this.$router.resolve({ 'name': parent, 'params': this.$route.params });
routesNameArr.unshift(matched);
parent = matched.route.meta.parent;
}
return routesNameArr;
},
getNameModeCrumbs: function () {
return this.fnNameCrumbsArr(this.fnRoutesNameArr(), this.currentPathStr);
},
getMixModeCrumbs: function () {
var routesArr = [];
var routesNameArr = [];
var currentMatched = this.$router.resolve({ 'name': this.$route.name, 'params': this.$route.params });
routesNameArr.push(currentMatched);
routesArr.push(this.$route);
var parent = this.$route.meta.parent;
while (!!parent) {
if (parent.indexOf('/') !== -1) {
var matched = this.$router.match(parent);
routesArr.unshift(matched);
parent = matched.meta.parent;
} else {
var _matched = this.$router.resolve({ 'name': parent, 'params': this.$route.params });
routesNameArr.unshift(_matched);
parent = _matched.route.meta.parent;
}
}
if (this.$route.name && routesNameArr.length >= routesArr.length) {
return this.fnNameCrumbsArr(routesNameArr, this.currentPathStr);
} else {
return this.fnUrlCrumbsArr(routesArr, this.currentPathStr);
}
},
changeCrumbs: function () {
switch (this.mode) {
case 'name':
this.crumbs = this.getNameModeCrumbs();
break;
case 'url':
this.crumbs = this.getUrlModeCrumbs();
break;
default:
this.crumbs = this.getMixModeCrumbs();
break;
}
}
},
watch: {
$route: 'changeCrumbs'
},
mounted: function () {
this.changeCrumbs();
},
template: '<ul>' + '<li v-for="(item ,i) in crumbs" >' + '<router-link v-if="i < crumbs.length - 1" :to="{ path: item.url}">' + '<i :class="item.icon" v-if="!!item.icon"></i>' + '<span>{{ item.name }}</span>' + '</router-link>' + '<i :class="rightIcon" v-if="i !== crumbs.length - 1 && rightIcon"></i>' + '<a v-else>' + '<i :class="item.icon" v-if="!!item.icon"></i>' + '<span>{{ item.name }}</span>' + '</a>' + '</li>' + '</ul>'
});
}
if (typeof window !== 'undefined' && window.Vue) {
window.Vue.use(plugin);
}
return plugin;
})));
| jypblue/vue-crumbs |
<|start_filename|>lib/validators.js<|end_filename|>
const ajvUtils = require('./helpers/ajv-utils');
const schemaUtils = require('./helpers/schema-utils');
const responseAdapter = require('./helpers/response-adapter');
const { messages } = require('./helpers/common');
const { filterMissingProps } = require('./helpers/object-utils');
const { setCoverage } = require('./helpers/coverage');
module.exports.schemaValidator = function schemaValidator(obj, options = {}) {
const { apiDefinitionsPath } = options;
const buildSchemaOptions = options.buildSchemaOptions || {};
// load the schema
if (!apiDefinitionsPath) {
throw new Error(messages.REQUIRED_API_DEFINITIONS_PATH);
}
const schema = schemaUtils.getSchemaByFilesPath(apiDefinitionsPath, buildSchemaOptions);
// parse the response object
const parsedResponse = responseAdapter.parseResponse(obj);
// validate the response object contains the required props
validateRequiredProps(parsedResponse);
// extract the request path schema
const { request, response } = parsedResponse;
const { path, method } = parsedResponse.request;
const { status } = parsedResponse.response;
const validator = schemaUtils.getValidatorByPathMethodAndCode(schema, request, response);
if (!(validator instanceof Object) || !(validator.validate instanceof Function)) {
throw new Error(`schema not found for ${JSON.stringify({ path, method, status })}`);
}
// validate
const predicate = validator.validate(response);
const { actual, expected } = ajvUtils.parseErrors(validator.errors, response);
// mark API as covered
setCoverage({ path, method, status });
return {
predicate,
actual,
expected,
errors: validator.errors,
matchMsg: messages.EXPECTED_RESPONSE_TO_MATCH_SCHEMA,
noMatchMsg: messages.EXPECTED_RESPONSE_TO_NOT_MATCH_SCHEMA,
};
};
module.exports.statusValidator = function statusValidator(expectedStatus, obj) {
// parse the response object
const parsedResponse = responseAdapter.parseResponse(obj);
// validate the response object is valid
if (!(parsedResponse instanceof Object)) {
throw new Error(messages.FAILED_TO_EXTRACT_RESPONSE_DETAILS);
}
const { response } = parsedResponse;
const { status, body } = response;
if (!status) {
throw new Error("required properties for validating schema are missing: 'status'");
}
// validate
return {
predicate: status === expectedStatus,
actual: { status, body },
expected: { status: expectedStatus },
matchMsg: `expected http status code ${status} to be ${expectedStatus}`,
noMatchMsg: `expected http status code ${status} to not be ${expectedStatus}`,
};
};
function validateRequiredProps(parsedResponse) {
if (!(parsedResponse instanceof Object)) {
throw new Error(messages.FAILED_TO_EXTRACT_RESPONSE_DETAILS);
}
const { request, response } = parsedResponse;
const missingRequestProps = filterMissingProps(request, ['path', 'method']);
const missingResponseProps = filterMissingProps(response, ['status']);
if (missingRequestProps.length > 0 || missingResponseProps.length > 0) {
const missingProps = missingRequestProps
.concat(missingResponseProps)
.map((prop) => `'${prop}'`)
.toString();
throw new Error(`required properties for validating schema are missing: ${missingProps}`);
}
}
<|start_filename|>lib/plugins/chai/index.js<|end_filename|>
const schemaMatcher = require('./schema-matcher');
const statusCodeMatcher = require('./status-matcher');
const coverage = require('../../helpers/coverage');
const { messages } = require('../../helpers/common');
module.exports = function getChaiPlugin(options) {
if (!(options instanceof Object) || !options.apiDefinitionsPath) {
throw new Error(messages.REQUIRED_API_DEFINITIONS_PATH);
}
coverage.init(options);
return function apiSchemaPlugin(chai) {
const { Assertion } = chai;
schemaMatcher(Assertion, options);
statusCodeMatcher(Assertion);
};
};
<|start_filename|>lib/helpers/common.js<|end_filename|>
module.exports = {
messages: {
FAILED_TO_EXTRACT_RESPONSE_DETAILS: 'failed to extract response details',
EXPECTED_RESPONSE_TO_MATCH_SCHEMA: 'expected response to match API schema',
EXPECTED_RESPONSE_TO_NOT_MATCH_SCHEMA: 'expected response to not match API schema',
REQUIRED_API_DEFINITIONS_PATH: "'apiDefinitionsPath' is required",
DUPLICATE_API_DEFINITION: 'same api definition exist in two seperated files',
},
};
<|start_filename|>test/coverage.test.js<|end_filename|>
const path = require('path');
const fs = require('fs');
const chalk = require('chalk');
const { expect } = require('chai');
const sinon = require('sinon');
const coverage = require('../lib/helpers/coverage');
const coverageTable = require('../lib/helpers/coverage-table');
const apiDefinitionsPath = path.join(__dirname, 'data', 'schema.yaml');
describe('coverage getReport', () => {
afterEach(() => {
process.removeAllListeners('beforeExit');
});
it('partial coverage', () => {
const expected = [
{
route: '/v2/pet/:petId',
method: 'GET',
statuses: '200',
},
{ route: '/v2/pet/:petId', method: 'POST', statuses: '405' },
{
route: '/v2/pet/:petId',
method: 'DELETE',
statuses: '404',
},
];
coverage.init({ apiDefinitionsPath, reportCoverage: true });
coverage.setCoverage({ path: '/v2/pet', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 400 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 404 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 405 });
expect(coverage.getReport()).to.eql(expected);
});
it('full coverage', () => {
const expected = [];
coverage.init({ apiDefinitionsPath, reportCoverage: true });
coverage.setCoverage({ path: '/v2/pet', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 400 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 404 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'get', status: 200 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'delete', status: 404 });
expect(coverage.getReport()).to.eql(expected);
});
});
describe('coverage printReport', () => {
const sandbox = sinon.createSandbox();
afterEach(() => {
process.removeAllListeners('beforeExit');
sandbox.resetHistory();
sandbox.restore();
});
it('no covered definitions', () => {
const expected = '\u001b[36m\u001b[4m\u001b[1m*ROUTE*\u001b[22m\u001b[24m\u001b[39m | \u001b[32m\u001b[4m\u001b[1m*METHOD*\u001b[22m\u001b[24m\u001b[39m | \u001b[33m\u001b[4m\u001b[1m*STATUSES*\u001b[22m\u001b[24m\u001b[39m \n/v2/pet | POST | 405 \n/v2/pet | PUT | 400,404,405\n/v2/pet/:petId | GET | 200 \n/v2/pet/:petId | POST | 405 \n/v2/pet/:petId | DELETE | 404 ';
coverage.init({ apiDefinitionsPath, reportCoverage: true });
const coverageReport = coverage.getReport();
expect(coverageTable(coverageReport)).to.eql(expected);
});
it('no covered definitions with export and report', () => {
const expected = '[{"route":"/v2/pet","method":"POST","statuses":"405"},{"route":"/v2/pet","method":"PUT","statuses":"400,404,405"},{"route":"/v2/pet/:petId","method":"GET","statuses":"200"},{"route":"/v2/pet/:petId","method":"POST","statuses":"405"},{"route":"/v2/pet/:petId","method":"DELETE","statuses":"404"}]';
coverage.init({ apiDefinitionsPath, reportCoverage: true, exportCoverage: true });
process.emit('beforeExit');
const exportedReport = fs.readFileSync('./coverage.json').toString();
expect(exportedReport).to.eql(expected);
});
it('no covered definitions with export and without report', () => {
const expected = '[{"route":"/v2/pet","method":"POST","statuses":"405"},{"route":"/v2/pet","method":"PUT","statuses":"400,404,405"},{"route":"/v2/pet/:petId","method":"GET","statuses":"200"},{"route":"/v2/pet/:petId","method":"POST","statuses":"405"},{"route":"/v2/pet/:petId","method":"DELETE","statuses":"404"}]';
coverage.init({ apiDefinitionsPath, exportCoverage: true });
process.emit('beforeExit');
const exportedReport = fs.readFileSync('./coverage.json').toString();
expect(exportedReport).to.eql(expected);
});
it('full coverage', () => {
const expected = '';
coverage.init({ apiDefinitionsPath, reportCoverage: true });
coverage.setCoverage({ path: '/v2/pet', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 400 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 404 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'get', status: 200 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'delete', status: 404 });
const coverageReport = coverage.getReport();
expect(coverageTable(coverageReport)).to.eql(expected);
});
it('full coverage and with export', () => {
const expected = '[]';
coverage.init({ apiDefinitionsPath, reportCoverage: true, exportCoverage: true });
coverage.setCoverage({ path: '/v2/pet', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 400 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 404 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'get', status: 200 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'delete', status: 404 });
process.emit('beforeExit');
const exportedReport = fs.readFileSync('./coverage.json').toString();
expect(exportedReport).to.eql(expected);
});
it('error from export with report disabled', () => {
const writeFileSyncStub = sandbox.stub(fs, 'writeFileSync');
const consoleInfoStub = sandbox.stub(console, 'info');
writeFileSyncStub.throws(new Error('failed to read file'))
coverage.init({ apiDefinitionsPath, exportCoverage: true });
process.emit('beforeExit');
sinon.assert.calledThrice(consoleInfoStub);
sinon.assert.calledWith(consoleInfoStub.firstCall, chalk.red('Error writing report to file'));
sinon.assert.calledWith(consoleInfoStub.secondCall, chalk.red('failed to read file'));
});
it('error from export with report enabled and fully covered', () => {
const writeFileSyncStub = sandbox.stub(fs, 'writeFileSync');
const consoleInfoStub = sandbox.stub(console, 'info');
writeFileSyncStub.throws(new Error('failed to read file'))
coverage.init({ apiDefinitionsPath, reportCoverage: true, exportCoverage: true });
coverage.setCoverage({ path: '/v2/pet', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 400 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 404 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'get', status: 200 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'delete', status: 404 });
process.emit('beforeExit');
sinon.assert.callCount(consoleInfoStub, 5);
sinon.assert.calledWith(consoleInfoStub.firstCall, chalk.bold('* API definitions coverage report *'));
sinon.assert.calledWith(consoleInfoStub.secondCall, chalk.green('\nAll API definitions are covered\n'));
sinon.assert.calledWith(consoleInfoStub.thirdCall, chalk.red('Error writing report to file'));
sinon.assert.calledWith(consoleInfoStub.getCall(3), chalk.red('failed to read file'));
});
});
<|start_filename|>test/chai-plugin-multiple-yamls.test.js<|end_filename|>
const { expect, use } = require('chai');
const path = require('path');
const apiSchema = require('../lib/index');
const { request } = require('./helpers/response-generator');
const responses = require('./data/responses');
const apiDefinitionsPath = [path.join(__dirname, 'data', 'schema-split-a.yaml'), path.join(__dirname, 'data', 'schema-split-b.yaml')];
use(apiSchema.chaiPlugin({ apiDefinitionsPath }));
describe('Chai.js plugin schema multiple yamls', () => {
it('Response object matches the schema', async () => {
const response = await request({
status: 200,
body: { ...responses.body.valid.value },
headers: { ...responses.headers.valid.value },
});
expect(response).to.matchApiSchema();
});
it('Response object matches the schema', async () => {
expect({
method: 'get',
status: 200,
path: '/v2/pet/123',
body: { ...responses.body.valid.value },
headers: { ...responses.headers.valid.value },
}).to.matchApiSchema();
});
it('Response body does not match the schema', async () => {
const response = await request({
status: 200,
body: { ...responses.body.invalid.value },
headers: { ...responses.headers.valid.value },
});
expect(response).to.not.matchApiSchema();
});
it('successful', async () => {
const response = await request({ status: 200, simple: false });
expect(response).to.be.successful();
});
it('created', async () => {
const response = await request({ status: 201, simple: false });
expect(response).to.be.created();
});
it('badRequest', async () => {
const response = await request({ status: 400, simple: false });
expect(response).to.be.badRequest();
});
it('unauthorized', async () => {
const response = await request({ status: 401, simple: false });
expect(response).to.be.unauthorized();
});
it('forbidden', async () => {
const response = await request({ status: 403, simple: false });
expect(response).to.be.forbidden();
});
it('notFound', async () => {
const response = await request({ status: 404, simple: false });
expect(response).to.be.notFound();
});
it('serverError', async () => {
const response = await request({ status: 500, simple: false });
expect(response).to.be.serverError();
});
it('serviceUnavailable', async () => {
const response = await request({ status: 503, simple: false });
expect(response).to.be.serviceUnavailable();
});
it('gatewayTimeout', async () => {
const response = await request({ status: 504, simple: false });
expect(response).to.be.gatewayTimeout();
});
it('custom code', async () => {
const response = await request({ status: 204, simple: false });
expect(response).to.have.status(204);
});
it('No status code in response', () => {
expect(() => expect({ request: { method: 'get', path: '/pet/123' } }).to.be.gatewayTimeout()).to.throw("required properties for validating schema are missing: 'status'");
});
it('When pass two files with same api definition - should throw an error', () => {
expect(() => use(apiSchema.chaiPlugin({ apiDefinitionsPath: [apiDefinitionsPath[0], apiDefinitionsPath[0]] }))).to.throw('same api definition exist in two seperated files: /v2/pet');
});
});
<|start_filename|>lib/helpers/ajv-utils.js<|end_filename|>
const get = require('lodash.get');
const set = require('lodash.set');
module.exports.parseErrors = (errors, response) => ({
actual: mapValuesByDataPath(errors, response),
expected: mapErrorsByDataPath(errors),
});
function mapValuesByDataPath(errors, response) {
return errors && errors.reduce((prev, error) => {
if (!error.dataPath) {
return prev;
}
const { root, fullPath } = getDataPath(error.dataPath);
const value = get(response, fullPath);
if (root !== fullPath || root.match(/[[.]/)) {
return set({ ...prev }, fullPath, value);
}
return { ...prev };
}, {});
}
function mapErrorsByDataPath(errors) {
return errors && errors.reduce((prev, error) => {
if (!error.params) {
return prev;
}
const { missingProperty, additionalProperty } = error.params;
const { fullPath } = getDataPath(error.dataPath);
if (missingProperty) {
const message = error.message.replace(/ '.*$/, '');
return set({ ...prev }, `${fullPath}.${missingProperty}`, message);
}
if (additionalProperty) {
const { message } = error;
return set({ ...prev }, `${fullPath}.${additionalProperty}`, message);
}
return set({ ...prev }, fullPath, error.message);
}, {});
}
function getDataPath(dataPath) {
const fullPath = dataPath
.replace(/^\./, '')
.replace(/\\/, '');
const [root] = fullPath.split('.');
return { root, fullPath };
}
<|start_filename|>lib/plugins/should/schema-matcher.js<|end_filename|>
const validators = require('../../validators');
module.exports = (Assertion, options) => {
Assertion.add('matchApiSchema', function addApiSchemaMethod(apiDefinitionsPath) {
const {
predicate, actual, expected, matchMsg,
} = validators.schemaValidator(
this.obj,
{ ...options, apiDefinitionsPath: apiDefinitionsPath || options.apiDefinitionsPath },
);
this.params = {
message: matchMsg,
expected,
actual,
};
predicate.should.be.true();
});
};
<|start_filename|>lib/helpers/response-adapter.js<|end_filename|>
const get = require('lodash.get');
const urijs = require('uri-js');
function parseResponse(response) {
if (isFastifyResponse(response)) {
return parseFastifyResponse(response);
}
return parseGenericResponse(response);
}
function parseFastifyResponse(response) {
try {
const { res, req } = response.raw;
return {
request: {
method: getRequestMethod(req || res),
path: resolveUrlDataFastify(req.headers, req).path,
},
response: {
status: response.statusCode,
headers: response.headers,
body: response.json(),
},
};
} catch (error) {
return undefined;
}
}
function parseGenericResponse(response) {
try {
const { request, config } = response;
return {
request: {
method: getRequestMethod(config || request || response),
path: getRequestPath(request || response),
},
response: {
status: getResponseCode(response),
headers: getResponseHeaders(response),
body: getResponseBody(response),
},
};
} catch (error) {
return undefined;
}
}
function getRequestMethod(request) {
const { method } = request;
return method && method.toLowerCase();
}
function getRequestPath(request) {
// request promise
if (request.uri) {
return cleanPath(request.uri.pathname);
}
// axios
if (request.path) {
return cleanPath(request.path);
}
// supertest
if (get(request, 'req.path')) {
return cleanPath(request.req.path);
}
return undefined;
}
function getResponseHeaders(response) {
return response.headers;
}
function getResponseCode(response) {
// request-promise
if (response.statusCode) {
return response.statusCode;
}
// other
return response.status;
}
function getResponseBody(response) {
// request-promise/other
if (response.body) {
return response.body;
}
// axios
if (response.data) {
return response.data;
}
return undefined;
}
function cleanPath(path) {
return path
.split('?')[0] // clean query params
.replace(/\/*$/, ''); // clean trailing slashes
}
function isFastifyResponse(response) {
const userAgent = response && response.raw && response.raw.req && response.raw.req.headers['user-agent'];
if (userAgent) {
return userAgent.toLowerCase() === 'lightmyrequest';
}
return false;
}
function resolveUrlDataFastify(headers, req) {
const scheme = `${headers[':scheme'] ? `${headers[':scheme']}:` : ''}//`;
const host = headers[':authority'] || headers.host;
const path = headers[':path'] || req.url;
return urijs.parse(scheme + host + path);
}
module.exports = {
parseResponse,
};
<|start_filename|>lib/plugins/should/status-matcher.js<|end_filename|>
const validators = require('../../validators');
module.exports = (Assertion) => {
buildMatcher('successful', 200);
buildMatcher('created', 201);
buildMatcher('badRequest', 400);
buildMatcher('unauthorized', 401);
buildMatcher('forbidden', 403);
buildMatcher('notFound', 404);
buildMatcher('serverError', 500);
buildMatcher('serviceUnavailable', 503);
buildMatcher('gatewayTimeout', 504);
buildMatcher('status');
function buildMatcher(str, statusToTest) {
Assertion.add(str, function statusCodeMatcher(customStatus) {
const expectedStatus = statusToTest || customStatus;
const {
predicate, actual, expected, matchMsg,
} = validators.statusValidator(expectedStatus, this.obj);
this.params = {
message: matchMsg,
expected,
actual,
};
predicate.should.be.true();
});
}
};
<|start_filename|>lib/helpers/coverage.js<|end_filename|>
/* eslint-disable no-console */
const chalk = require('chalk');
const get = require('lodash.get');
const set = require('lodash.set');
const flatten = require('lodash.flatten');
const fs = require('fs');
const schemaUtils = require('./schema-utils');
const buildTable = require('./coverage-table');
let coverage;
let schema;
module.exports.init = ({
reportCoverage, apiDefinitionsPath, buildSchemaOptions, exportCoverage,
}) => {
coverage = {};
const buildSchemaOptionsOverride = buildSchemaOptions || {};
schema = schemaUtils.getSchemaByFilesPath(apiDefinitionsPath, buildSchemaOptionsOverride);
let parsedCoverage;
if (reportCoverage === true || exportCoverage === true) {
process.on('beforeExit', () => {
parsedCoverage = getReport();
});
}
if (reportCoverage === true) {
process.on('beforeExit', () => {
printReport(parsedCoverage);
});
}
if (exportCoverage === true) {
process.on('beforeExit', () => {
printReportToFile(parsedCoverage);
});
}
};
module.exports.setCoverage = ({ path, method, status }) => {
if (!schema) {
console.warn('Coverage was not initiated. If you want to calculate coverage, make sure to call init() on coverage helper. Skipping coverage calculation');
return;
}
const route = schemaUtils.pathMatcher(schema, path, method);
set(coverage, `[${route}|${method}|${status}]`, true);
};
function getReport() {
const uncoveredDefinitions = Object.keys(schema)
.map((route) => getUncoveredDefinitions(route));
return flatten(uncoveredDefinitions)
.filter((api) => !!api);
}
module.exports.getReport = getReport;
function getUncoveredDefinitions(route) {
return Object
.keys(schema[route])
.map((method) => {
const statuses = Object
.keys(schema[route][method].responses)
.filter((status) => !(get(coverage, `[${route}|${method}|${status}]`)))
.toString();
return statuses ? { route, method: method.toUpperCase(), statuses } : undefined;
});
}
function printReport(report) {
console.info(chalk.bold('* API definitions coverage report *'));
if (report.length === 0) {
console.info(chalk.green('\nAll API definitions are covered\n'));
} else {
console.info(chalk.red('\nUncovered API definitions found'));
const table = buildTable(report);
console.info(table);
}
}
function printReportToFile(report) {
try {
fs.writeFileSync('./coverage.json', JSON.stringify(report));
} catch (e) {
console.info(chalk.red('Error writing report to file'));
console.info(chalk.red(e.message));
console.info(chalk.red(e.stack));
}
}
<|start_filename|>test/chai-fastify.test.js<|end_filename|>
const fastify = require('fastify');
const { expect, use } = require('chai');
const path = require('path');
const apiSchema = require('../lib/index');
const responses = require('./data/responses');
const apiDefinitionsPath = path.join(__dirname, 'data', 'schema.yaml');
use(apiSchema.chaiPlugin({ apiDefinitionsPath }));
describe('Fastify response schema validation', () => {
let app;
afterEach(() => app.close());
async function initApp({ status, body, headers }) {
app = fastify({ logger: true });
app.get('/v2/pet/:petId', (req, reply) => {
reply
.status(status)
.headers(headers)
.send(body);
});
await app.ready();
}
it('Response body does matches the schema', async () => {
await initApp({
status: 200,
body: responses.body.valid.value,
headers: responses.headers.valid.value,
});
const response = await app.inject()
.get('/v2/pet/123')
.end();
expect(response).to.matchApiSchema();
});
it('Response body does not match the schema', async () => {
await initApp({
status: 200,
body: responses.body.invalid.value,
headers: responses.headers.valid.value,
});
const response = await app.inject()
.get('/v2/pet/123')
.end();
expect(response).not.to.matchApiSchema();
});
it('successful', async () => {
await initApp({
status: 200,
body: responses.body.valid.value,
headers: responses.headers.valid.value,
});
const response = await app.inject()
.get('/v2/pet/123')
.end();
expect(response)
.to.be.successful();
});
it('serverError', async () => {
await initApp({
status: 500,
});
const response = await app.inject()
.get('/v2/pet/123')
.end();
expect(response)
.to.be.serverError();
});
});
<|start_filename|>test/base64.test.js<|end_filename|>
const { expect } = require('chai');
const base64 = require('../lib/helpers/base64');
describe('base64', () => {
const str = '#%this.is.random_string%$%';
it('encode', () => {
expect(base64.encode(str)).to.eql('IyV0aGlzLmlzLnJhbmRvbV9zdHJpbmclJCU=');
});
it('decode', () => {
const encodedStr = base64.encode(str);
expect(base64.decode(encodedStr)).to.eql(str);
});
});
<|start_filename|>test/validation.test.js<|end_filename|>
const { expect } = require('chai');
const path = require('path');
const { schemaValidator, statusValidator } = require('../lib/index').validators;
const { request } = require('./helpers/response-generator');
const responses = require('./data/responses');
const apiDefinitionsPath = path.join(__dirname, 'data', 'schema.yaml');
const wrongApiDefinitionsPath = '/not/a/path';
const invalidApiDefinitionsPath = path.join(__dirname, 'data', 'invalid-schema.yaml');
describe('Schema validation', () => {
it('Response headers and body matches the schema', async () => {
const response = await request({
status: 200,
body: responses.body.valid.value,
headers: responses.headers.valid.value,
});
expect(schemaValidator(response, { apiDefinitionsPath })).to.be.like({
actual: null,
errors: null,
expected: null,
matchMsg: 'expected response to match API schema',
noMatchMsg: 'expected response to not match API schema',
predicate: true,
});
});
it('Response headers and body matches the schema with custom Content-Type', async () => {
const apiDefinitionsPathCustomContentType = path.join(__dirname, 'data', 'schema-custom-content-type.yaml');
const response = await request({
status: 200,
body: responses.body.valid.value,
headers: {
...responses.headers.valid.value,
'Content-Type': 'application/hal+json',
},
});
expect(schemaValidator(
response,
{ apiDefinitionsPath: apiDefinitionsPathCustomContentType },
)).to.be.like({
actual: null,
errors: null,
expected: null,
matchMsg: 'expected response to match API schema',
noMatchMsg: 'expected response to not match API schema',
predicate: true,
});
});
it('Correctly returns error for undefined response', async () => {
const apiDefinitionsPathCustomContentType = path.join(__dirname, 'data', 'schema-custom-content-type.yaml');
const response = await request({
status: 200,
body: responses.body.valid.value,
headers: responses.headers.valid.value,
});
expect(schemaValidator(
response,
{ apiDefinitionsPath: apiDefinitionsPathCustomContentType },
)).to.be.like({
actual: {},
errors: [
{
message: 'No schema defined for response content-type "application/json"',
},
],
expected: {},
matchMsg: 'expected response to match API schema',
noMatchMsg: 'expected response to not match API schema',
predicate: false,
});
});
it('Response body does not match the schema', async () => {
const response = await request({
status: 200,
body: responses.body.invalid.value,
headers: responses.headers.valid.value,
});
expect(schemaValidator(response, { apiDefinitionsPath })).to.be.like({
predicate: false,
matchMsg: 'expected response to match API schema',
noMatchMsg: 'expected response to not match API schema',
actual: responses.body.invalid.actual,
errors: responses.body.invalid.errors,
expected: responses.body.invalid.expected,
});
});
it('Response headers does not match the schema', async () => {
const response = await request({
status: 200,
body: responses.body.valid.value,
headers: responses.headers.invalid.value,
});
expect(schemaValidator(response, { apiDefinitionsPath })).to.be.like({
predicate: false,
matchMsg: 'expected response to match API schema',
noMatchMsg: 'expected response to not match API schema',
actual: responses.headers.invalid.actual,
errors: responses.headers.invalid.errors,
expected: responses.headers.invalid.expected,
});
});
it('Response object does not contain method', () => {
expect(expectationTester({ path: '/pet/123', status: 200, apiDefinitionsPath }))
.to.throw("required properties for validating schema are missing: 'method'");
});
it('Response object does not contain path', () => {
expect(expectationTester({ method: 'get', status: 200, apiDefinitionsPath }))
.to.throw("required properties for validating schema are missing: 'path'");
});
it('Response object does not contain status', () => {
expect(expectationTester({ method: 'get', path: '/pet/123', apiDefinitionsPath }))
.to.throw("required properties for validating schema are missing: 'status'");
});
it('API definitions path is not set', () => {
expect(expectationTester({
method: 'get', path: '/pet/123', status: 200,
})).to.throw("'apiDefinitionsPath' is required");
});
it('API definitions path not valid', () => {
expect(expectationTester({
method: 'get', path: '/pet/123', status: 200, apiDefinitionsPath: wrongApiDefinitionsPath,
})).to.throw("ENOENT: no such file or directory, open '/not/a/path'");
});
it('Schema not valid', () => {
expect(expectationTester({
method: 'get', path: '/pet/123', status: 200, apiDefinitionsPath: invalidApiDefinitionsPath,
})).to.throw('end of the stream or a document separator is expected');
});
it('API definitions file does not contain the request method', () => {
expect(expectationTester({
method: 'options', path: '/pet/123', status: 200, apiDefinitionsPath,
})).to.throw('schema not found for {"path":"/pet/123","method":"options","status":200}');
});
it('API definitions file does not contain the response status code', () => {
expect(expectationTester({
method: 'get', path: '/pet/123', status: 302, apiDefinitionsPath,
})).to.throw('schema not found for {"path":"/pet/123","method":"get","status":302}');
});
it('apiDefinitionsPath is missing', () => {
expect(() => schemaValidator()).to.throw("'apiDefinitionsPath' is required");
});
it('API definitions file does not contain the response status code', () => {
expect(() => schemaValidator(undefined, { apiDefinitionsPath }))
.to.throw('failed to extract response details');
});
it('API definitions file does not contain the request path', () => {
expect(expectationTester({
method: 'get', path: '/pet/123/test', status: 200, apiDefinitionsPath,
})).to.throw('schema not found for {"path":"/pet/123/test","method":"get","status":200}');
expect(expectationTester({
method: 'get', path: '/pet', status: 200, apiDefinitionsPath,
})).to.throw('schema not found for {"path":"/pet","method":"get","status":200}');
expect(expectationTester({
method: 'get', path: '/predators/123', status: 200, apiDefinitionsPath,
})).to.throw('schema not found for {"path":"/predators/123","method":"get","status":200}');
});
it('successful status code match', async () => {
const response = await request({ status: 200, body: responses.body.valid.value });
expect(statusValidator(200, response)).to.eql({
predicate: true,
actual: {
body: responses.body.valid.value,
status: 200,
},
expected: {
status: 200,
},
matchMsg: 'expected http status code 200 to be 200',
noMatchMsg: 'expected http status code 200 to not be 200',
});
});
it('unsuccessful status code match', async () => {
const response = await request({ status: 204, body: responses.body.valid.value });
expect(statusValidator(200, response)).to.eql({
predicate: false,
actual: {
body: responses.body.valid.value,
status: 204,
},
expected: {
status: 200,
},
matchMsg: 'expected http status code 204 to be 200',
noMatchMsg: 'expected http status code 204 to not be 200',
});
});
});
function expectationTester({
path, method, status, apiDefinitionsPath,
}) {
return function matchApiSchema() {
const response = {
request: { method, path },
statusCode: status,
};
schemaValidator(response, { apiDefinitionsPath });
};
}
<|start_filename|>lib/helpers/schema-utils.js<|end_filename|>
const get = require('lodash.get');
const apiSchemaBuilder = require('api-schema-builder');
const { messages } = require('./common');
const base64 = require('./base64');
const getSchema = (() => {
const schemas = {};
return (filePath, schemaBuilderOpts) => {
const encodedFilePath = base64.encode(filePath);
if (!schemas[encodedFilePath]) {
schemas[encodedFilePath] = apiSchemaBuilder.buildSchemaSync(filePath, schemaBuilderOpts);
}
return schemas[encodedFilePath];
};
})();
module.exports.getSchemaByFilesPath = (apiDefinitionsPath, buildSchemaOptions) => {
const filesPaths = Array.isArray(apiDefinitionsPath) ? apiDefinitionsPath : [apiDefinitionsPath];
const schemas = filesPaths.map((path) => getSchema(path, buildSchemaOptions));
// eslint-disable-next-line array-callback-return,consistent-return
const schema = schemas.reduce((acc, cur) => {
// eslint-disable-next-line no-restricted-syntax
for (const key of Object.keys(cur)) {
if (acc[key]) {
throw new Error(`${messages.DUPLICATE_API_DEFINITION}: ${key}`);
}
acc[key] = cur[key];
}
return acc;
}, {});
return schema;
};
module.exports.getValidatorByPathMethodAndCode = (schema, request, response) => {
const route = pathMatcher(schema, request.path, request.method);
return get(schema, `${route}.${request.method}.responses.${response.status}`);
};
module.exports.pathMatcher = pathMatcher;
function pathMatcher(routes, path, method) {
return Object
.keys(routes)
.sort((currentRoute, nextRoute) => {
const firstResult = calculateRouteScore(currentRoute);
const secondResult = calculateRouteScore(nextRoute);
return firstResult - secondResult;
})
.filter((route) => {
const routeArr = route.split('/');
const pathArr = path.split('/');
if (routeArr.length !== pathArr.length) return false;
return routeArr.every((seg, idx) => {
if (seg === pathArr[idx]) return true;
// if current path segment is param
if (seg.startsWith(':') && pathArr[idx]) return true;
return false;
});
}).filter(((route) => routes[route][String(method).toLowerCase()]))[0];
}
function calculateRouteScore(route) {
return route
// split to path segments
.split('/')
// mark path params locations
.map((pathSegment) => pathSegment.includes(':'))
// give weight to each path segment according to its location
.map((isPathParam, i, pathSegments) => isPathParam * (10 ** pathSegments.length - i))
.reduce((sum, seg) => sum + seg, 0); // summarize the path score
}
<|start_filename|>lib/helpers/coverage-table.js<|end_filename|>
const chalk = require('chalk');
const columnify = require('columnify');
module.exports = (data) => columnify(data, {
columnSplitter: ' | ',
minWidth: 10,
headingTransform(title) {
switch (title) {
case 'statuses':
return chalk.yellow.underline.bold(`*${title.toUpperCase()}*`);
case 'method':
return chalk.green.underline.bold(`*${title.toUpperCase()}*`);
default:
return chalk.cyan.underline.bold(`*${title.toUpperCase()}*`);
}
},
});
<|start_filename|>lib/plugins/jest/index.js<|end_filename|>
const schemaMatcher = require('./schema-matcher');
const statusCodeMatcher = require('./status-matcher');
const coverage = require('../../helpers/coverage');
const { messages } = require('../../helpers/common');
module.exports = function apiSchemaPlugin(options) {
if (!(options instanceof Object) || !options.apiDefinitionsPath) {
throw new Error(messages.REQUIRED_API_DEFINITIONS_PATH);
}
coverage.init(options);
schemaMatcher(options);
statusCodeMatcher();
};
<|start_filename|>test/helpers/response-generator.js<|end_filename|>
const nock = require('nock');
const rp = require('request-promise-native');
const axios = require('axios');
const request = require('supertest');
const defaults = {
path: '/v2/pet/123',
url: 'http://www.google.com',
method: 'get',
};
module.exports.request = (options = {}) => {
mock(options);
return rp({
method: options.method || defaults.method,
baseUrl: options.url || defaults.url,
uri: options.uri || options.path || defaults.path,
resolveWithFullResponse: true,
simple: options.simple,
json: true,
});
};
module.exports.axios = (options = {}) => {
mock(options);
return axios({
method: options.method || defaults.method,
baseURL: options.uri || defaults.url,
url: options.uri || defaults.path,
json: true,
});
};
module.exports.supertest = (options = {}) => {
mock(options);
const method = options.method || defaults.method;
const uri = options.uri || options.path || defaults.path;
return request(defaults.url)[method](uri);
};
module.exports.mock = mock;
function mock({ status, body, headers }) {
nock.cleanAll();
nock(/./).get(/./).reply(status, body, headers);
}
<|start_filename|>test/jest/jest-fastify.test.js<|end_filename|>
const fastify = require('fastify');
const path = require('path');
const { schemaValidator } = require('../../lib').validators;
const responses = require('../data/responses');
const apiSchema = require('../../lib');
const apiDefinitionsPath = path.join(__dirname, '../data', 'schema.yaml');
apiSchema.jestPlugin({ apiDefinitionsPath });
describe('Fastify response schema validation', () => {
let app;
afterEach(() => app.close());
async function initApp({ status, body, headers }) {
app = fastify({ logger: true });
app.get('/v2/pet/:petId', (req, reply) => {
reply
.status(status)
.headers(headers)
.send(body);
});
await app.ready();
}
it('Response headers and body matches the schema', async () => {
await initApp({
status: 200,
body: responses.body.valid.value,
headers: responses.headers.valid.value,
});
const response = await app.inject()
.get('/v2/pet/123')
.end();
expect(schemaValidator(response, { apiDefinitionsPath }))
.toMatchObject({
actual: null,
errors: null,
expected: null,
matchMsg: 'expected response to match API schema',
noMatchMsg: 'expected response to not match API schema',
predicate: true,
});
});
it('Response body does not match the schema', async () => {
await initApp({
status: 200,
body: responses.body.invalid.value,
headers: responses.headers.valid.value,
});
const response = await app.inject()
.get('/v2/pet/123')
.end();
expect(response).not.toMatchApiSchema();
});
it('successful', async () => {
await initApp({
status: 200,
body: responses.body.valid.value,
headers: responses.headers.valid.value,
});
const response = await app.inject()
.get('/v2/pet/123')
.end();
expect(response)
.toBeSuccessful();
});
it('serverError', async () => {
await initApp({
status: 500,
});
const response = await app.inject()
.get('/v2/pet/123')
.end();
expect(response)
.toBeServerError();
});
});
<|start_filename|>lib/plugins/jest/status-matcher.js<|end_filename|>
const diff = require('jest-diff');
const matcherUtils = require('jest-matcher-utils');
const validators = require('../../validators');
module.exports = () => {
buildMatcher('toBeSuccessful', 200);
buildMatcher('toBeCreated', 201);
buildMatcher('toBeBadRequest', 400);
buildMatcher('toBeUnauthorized', 401);
buildMatcher('toBeForbidden', 403);
buildMatcher('toBeNotFound', 404);
buildMatcher('toBeServerError', 500);
buildMatcher('toBeServiceUnavailable', 503);
buildMatcher('toBeGatewayTimeout', 504);
buildMatcher('toHaveStatus');
function buildMatcher(str, statusToTest) {
expect.extend({
[str]: (validatedResponse, customStatus) => {
const expectedStatus = statusToTest || customStatus;
const {
predicate, expected, actual,
} = validators.statusValidator(expectedStatus, validatedResponse);
const pass = predicate;
const message = pass
? () => `${matcherUtils.matcherHint(str)
}\n\n`
+ `Expected: ${matcherUtils.printExpected(expected)}\n`
+ `Received: ${matcherUtils.printReceived(actual)}`
: () => {
const difference = diff(expected, actual, {
expand: this.expand,
});
return (
`${matcherUtils.matcherHint(str)
}\n\n${
difference && difference.includes('- Expect')
? `Difference:\n\n${difference}`
: `Expected: ${matcherUtils.printExpected(expected)}\n`
+ `Received: ${matcherUtils.printReceived(actual)}`}`
);
};
return { actual, message, pass };
},
});
}
};
<|start_filename|>test/chai-coverage.test.js<|end_filename|>
const path = require('path');
const sinon = require('sinon');
const chalk = require('chalk');
const { use } = require('chai');
const apiSchema = require('../lib/index');
const coverage = require('../lib/helpers/coverage');
const apiDefinitionsPath = path.join(__dirname, 'data', 'schema.yaml');
describe('Chai.js plugin coverage', () => {
const sandbox = sinon.createSandbox();
before(() => {
this.spy = sandbox.spy(console, 'info');
});
after(() => {
sandbox.restore();
});
afterEach(() => {
process.removeAllListeners('beforeExit');
sandbox.resetHistory();
});
it('reportCoverage: true', async () => {
use(apiSchema.chaiPlugin({ apiDefinitionsPath, reportCoverage: true }));
// emit exit event in order to trigger the reporting
process.emit('beforeExit');
sinon.assert.calledThrice(this.spy);
sinon.assert.calledWith(this.spy.firstCall, chalk.bold('* API definitions coverage report *'));
sinon.assert.calledWith(this.spy.secondCall, chalk.red('\nUncovered API definitions found'));
sinon.assert.calledWith(this.spy.thirdCall, `${chalk.cyan.underline.bold('*ROUTE*')} | ${chalk.green.underline.bold('*METHOD*')} | ${chalk.yellow.underline.bold('*STATUSES*')} \n/v2/pet | POST | 405 \n/v2/pet | PUT | 400,404,405\n/v2/pet/:petId | GET | 200 \n/v2/pet/:petId | POST | 405 \n/v2/pet/:petId | DELETE | 404 `);
});
it('reportCoverage: undefined', async () => {
use(apiSchema.chaiPlugin({ apiDefinitionsPath }));
// emit exit event in order to trigger the reporting
process.emit('beforeExit');
sinon.assert.notCalled(this.spy);
});
it('reportCoverage: false', async () => {
use(apiSchema.chaiPlugin({ apiDefinitionsPath, reportCoverage: false }));
// emit exit event in order to trigger the reporting
process.emit('beforeExit');
sinon.assert.notCalled(this.spy);
});
it('reportCoverage: true', async () => {
use(apiSchema.chaiPlugin({ apiDefinitionsPath, reportCoverage: true }));
coverage.setCoverage({ path: '/v2/pet', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 400 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 404 });
coverage.setCoverage({ path: '/v2/pet', method: 'put', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'get', status: 200 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'post', status: 405 });
coverage.setCoverage({ path: '/v2/pet/123', method: 'delete', status: 404 });
// emit exit event in order to trigger the reporting
process.emit('beforeExit');
sinon.assert.calledTwice(this.spy);
sinon.assert.calledWith(this.spy.firstCall, chalk.bold('* API definitions coverage report *'));
sinon.assert.calledWith(this.spy.secondCall, chalk.green('\nAll API definitions are covered\n'));
});
});
<|start_filename|>test/data/errors.js<|end_filename|>
module.exports = [
{
dataPath: '.body',
keyword: 'additionalProperties',
message: 'should NOT have additional properties',
params: {
additionalProperty: 'manor',
},
schemaPath: '#/body/additionalProperties',
},
{
dataPath: ".headers['x-request-id']",
keyword: 'minLength',
message: 'should NOT be shorter than 4 characters',
params: {
limit: 4,
},
schemaPath: '#/properties/headers/properties/x-request-id/minLength',
},
{
dataPath: '.headers',
keyword: 'required',
message: "should have required property 'x-elapsed-time'",
params: {
missingProperty: 'x-elapsed-time',
},
schemaPath: '#/properties/headers/required',
},
{
dataPath: '.body',
keyword: 'required',
message: "should have required property 'name'",
params: {
missingProperty: 'name',
},
schemaPath: '#/properties/body/required',
},
{
dataPath: '.body.age',
keyword: 'minimum',
message: 'should be >= 0',
params: {
comparison: '>=',
exclusive: false,
limit: 0,
},
schemaPath: '#/properties/body/properties/age/minimum',
},
{
dataPath: '.body.details',
keyword: 'required',
message: "should have required property 'location'",
params: {
missingProperty: 'location',
},
schemaPath: '#/properties/body/properties/details/required',
},
{
dataPath: '.body.details.food',
keyword: 'minLength',
message: 'should NOT be shorter than 4 characters',
params: {
limit: 4,
},
schemaPath: '#/properties/body/properties/details/properties/food/minLength',
},
];
<|start_filename|>test/jest/jest-coverage.test.js<|end_filename|>
const path = require('path');
const sinon = require('sinon');
const chalk = require('chalk');
const apiSchema = require('../../lib/index');
const apiDefinitionsPath = path.join(__dirname, '..', 'data', 'schema.yaml');
describe('Jest plugin coverage', () => {
const sandbox = sinon.createSandbox();
beforeAll(() => {
this.spy = sandbox.spy(console, 'info');
});
afterAll(() => {
sandbox.restore();
});
afterEach(() => {
process.removeAllListeners('beforeExit');
sandbox.resetHistory();
});
it('reportCoverage: true', async () => {
apiSchema.jestPlugin({ apiDefinitionsPath, reportCoverage: true });
// emit exit event in order to trigger the reporting
process.emit('beforeExit');
sinon.assert.calledThrice(this.spy);
sinon.assert.calledWith(this.spy.firstCall, chalk.bold('* API definitions coverage report *'));
sinon.assert.calledWith(this.spy.secondCall, chalk.red('\nUncovered API definitions found'));
sinon.assert.calledWith(this.spy.thirdCall, `${chalk.cyan.underline.bold('*ROUTE*')} | ${chalk.green.underline.bold('*METHOD*')} | ${chalk.yellow.underline.bold('*STATUSES*')} \n/v2/pet | POST | 405 \n/v2/pet | PUT | 400,404,405\n/v2/pet/:petId | GET | 200 \n/v2/pet/:petId | POST | 405 \n/v2/pet/:petId | DELETE | 404 `);
});
it('reportCoverage: undefined', async () => {
apiSchema.jestPlugin({ apiDefinitionsPath });
// emit exit event in order to trigger the reporting
process.emit('beforeExit');
sinon.assert.notCalled(this.spy);
});
it('reportCoverage: false', async () => {
apiSchema.jestPlugin({ apiDefinitionsPath, reportCoverage: false });
// emit exit event in order to trigger the reporting
process.emit('beforeExit');
sinon.assert.notCalled(this.spy);
});
});
<|start_filename|>test/jest/jest-plugin.test.js<|end_filename|>
const path = require('path');
const apiSchema = require('../../lib');
const { request } = require('../helpers/response-generator');
const responses = require('../data/responses');
const apiDefinitionsPath = path.join(__dirname, '..', 'data', 'schema.yaml');
apiSchema.jestPlugin({ apiDefinitionsPath });
describe('Jest plugin schema', () => {
it('Response object matches the schema (response)', async () => {
const response = await request({
status: 200,
body: responses.body.valid.value,
headers: responses.headers.valid.value,
});
expect(response)
.toMatchApiSchema();
});
it('Response object matches the schema (direct value)', async () => {
expect({
method: 'get',
status: 200,
path: '/v2/pet/123',
body: responses.body.valid.value,
headers: responses.headers.valid.value,
})
.toMatchApiSchema();
});
it('Response body does not match the schema', async () => {
const response = await request({
status: 200,
body: responses.body.invalid.value,
headers: responses.headers.valid.value,
});
expect(response).not.toMatchApiSchema();
});
it('successful', async () => {
const response = await request({
status: 200,
simple: false,
});
expect(response)
.toBeSuccessful();
});
it('created', async () => {
const response = await request({
status: 201,
simple: false,
});
expect(response)
.toBeCreated();
});
it('badRequest', async () => {
const response = await request({
status: 400,
simple: false,
});
expect(response)
.toBeBadRequest();
});
it('unauthorized', async () => {
const response = await request({
status: 401,
simple: false,
});
expect(response)
.toBeUnauthorized();
});
it('forbidden', async () => {
const response = await request({
status: 403,
simple: false,
});
expect(response)
.toBeForbidden();
});
it('notFound', async () => {
const response = await request({
status: 404,
simple: false,
});
expect(response)
.toBeNotFound();
});
it('serverError', async () => {
const response = await request({
status: 500,
simple: false,
});
expect(response)
.toBeServerError();
});
it('serviceUnavailable', async () => {
const response = await request({
status: 503,
simple: false,
});
expect(response)
.toBeServiceUnavailable();
});
it('gatewayTimeout', async () => {
const response = await request({
status: 504,
simple: false,
});
expect(response)
.toBeGatewayTimeout();
});
it('custom code', async () => {
const response = await request({
status: 204,
simple: false,
});
expect(response)
.toHaveStatus(204);
});
it('No status code in response', () => {
expect(() => expect({
request: {
method: 'get',
path: '/pet/123',
},
})
.toBeGatewayTimeout())
.toThrow('required properties for validating schema are missing: \'status\'');
});
it('apiDefinitionsPath is missing', () => {
const error = '\'apiDefinitionsPath\' is required';
expect(() => apiSchema.jestPlugin())
.toThrow(error);
expect(() => apiSchema.jestPlugin({ apiDefinitionsPath: undefined }))
.toThrow(error);
});
});
<|start_filename|>lib/helpers/object-utils.js<|end_filename|>
module.exports.filterMissingProps = (obj, requiredProps) => requiredProps
.filter((prop) => obj[prop] === undefined);
<|start_filename|>lib/helpers/base64.js<|end_filename|>
module.exports.encode = (string) => {
const buffer = Buffer.from(string, 'utf8');
return buffer.toString('base64');
};
module.exports.decode = (string) => {
const buffer = Buffer.from(string, 'base64');
return buffer.toString();
};
<|start_filename|>lib/plugins/chai/schema-matcher.js<|end_filename|>
const validators = require('../../validators');
module.exports = (Assertion, options) => {
Assertion.addMethod('matchApiSchema', function addApiSchemaMethod(apiDefinitionsPath) {
const {
predicate, actual, expected, matchMsg, noMatchMsg,
} = validators.schemaValidator(
this._obj,
{ ...options, apiDefinitionsPath: apiDefinitionsPath || options.apiDefinitionsPath },
);
this.assert(
predicate,
matchMsg,
noMatchMsg,
expected,
actual,
);
});
};
<|start_filename|>lib/index.js<|end_filename|>
const chaiPlugin = require('./plugins/chai');
const shouldPlugin = require('./plugins/should');
const jestPlugin = require('./plugins/jest');
const validators = require('./validators');
module.exports = {
chaiPlugin,
jestPlugin,
shouldPlugin,
validators,
};
<|start_filename|>lib/plugins/jest/schema-matcher.js<|end_filename|>
const diff = require('jest-diff').default;
const matcherUtils = require('jest-matcher-utils');
const validators = require('../../validators');
module.exports = (options) => {
expect.extend(
{
toMatchApiSchema: (validatedRequest, apiDefinitionsPath) => {
const {
predicate, expected, actual,
} = validators.schemaValidator(
validatedRequest,
{ ...options, apiDefinitionsPath: apiDefinitionsPath || options.apiDefinitionsPath },
);
const pass = predicate;
const message = pass
? () => `${matcherUtils.matcherHint('toMatchApiSchema')
}\n\n`
+ `Expected: ${matcherUtils.printExpected(expected)}\n`
+ `Received: ${matcherUtils.printReceived(actual)}`
: () => {
const difference = diff(expected, actual, {
expand: this.expand,
});
return (
`${matcherUtils.matcherHint('toMatchApiSchema')
}\n\n${
difference && difference.includes('- Expect')
? `Difference:\n\n${difference}`
: `Expected: ${matcherUtils.printExpected(expected)}\n`
+ `Received: ${matcherUtils.printReceived(actual)}`}`
);
};
return { actual, message, pass };
},
},
);
};
| nodeneil/api-contract-validator |
<|start_filename|>libs/drawpixels/idea_autocomplite.lua<|end_filename|>
--AUTOCOMPLETE FOR IDEA DO NOT REMOVE OR REQUIRE
---@class DRAW_PIXELS
drawpixels = {}
---Indicates border should be maintained:
function drawpixels.start_fill() end
---Stop border recording:
function drawpixels.stop_fill() end
---Fills the area considering the boundaries:
function drawpixels.fill_area(buffer_info, x,y, red, green, blue, alpha) end
---Method for drawing circle:
function drawpixels.circle(buffer_info, pox_x, pox_y, diameter, red, green, blue, alpha, antialiasing, width) end
---Method for drawing filled circle:
function drawpixels.filled_circle(buffer_info, pos_x, pos_y, diameter, red, green, blue, alpha, antialiasing) end
---Method for drawing rectangle:
function drawpixels.rect(buffer_info, pos_x, pos_y, rect_width, rect_height, red, green, blue, alpha) end
---Method for drawing filled rectangle:
function drawpixels.filled_rect(buffer_info, pos_x, pos_y, rect_width, rect_height, red, green, blue, alpha, angle) end
---Fill buffer with the color:
function drawpixels.fill(buffer_info, red, green, blue, alpha) end
---Draw a line between two points:
function drawpixels.line(buffer_info, x0, y0, x1, y1, red, green, blue, alpha, antialiasing, width) end
---Draw a gradient line between two points:
function drawpixels.gradient_line(buffer_info, x0, y0, x1, y1, red1, green1, blue1, red2, green2, blue2, alpha, antialiasing, width) end
---Draw a arc between two corners:
function drawpixels.arc(buffer_info, x, y, radius, from, to, red, green, blue, alpha) end
---Draw a filled arc between two corners:
function drawpixels.filled_arc(buffer_info, x, y, radius, from, to, red, green, blue, alpha) end
---Draw a gradient arc between two corners:
function drawpixels.gradient_arc(buffer_info, x, y, radius, from, to, red1, green1, blue1, red2, green2, blue2, alpha) end
---Draw a pixel:
function drawpixels.pixel(buffer_info, x, y, red, green, blue, alpha) end
---Read color from a position in the buffer:
function drawpixels.color(buffer_info, x, y) end
---Draw a bezier line between two points and one control point:
function drawpixels.bezier(buffer_info, x0, y0, xc, yc, x1, y1, red, green, blue, alpha) end
return
| JAlHund/Cold-Path-Map-Editor |
<|start_filename|>01-argus_camera/Dockerfile<|end_filename|>
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# Start from Isaac ROS base
ARG BASE_IMAGE="rbonghi/isaac-ros-tutorial:isaac-ros-base"
FROM ${BASE_IMAGE}
################ BUILD & INSTALL ISAAC ROS packages ####################
# Build Isaac ROS package
ENV ISAAC_ROS_WS /opt/isaac_ros_ws
ARG ROSINSTALL=argus_camera.rosinstall
# Copy wstool rosinstall
COPY ${ROSINSTALL} ${ROSINSTALL}
RUN apt-get update && \
apt-get install python3-vcstool python3-pip -y && \
mkdir -p ${ISAAC_ROS_WS}/src && \
vcs import ${ISAAC_ROS_WS}/src < ${ROSINSTALL} && \
rm -rf /var/lib/apt/lists/*
# Pull LFS files
RUN cd ${ISAAC_ROS_WS}/src/isaac_ros_common && git lfs pull && \
cd ${ISAAC_ROS_WS}/src/isaac_ros_image_pipeline && git lfs pull && \
cd ${ISAAC_ROS_WS}/src/isaac_ros_argus_camera && git lfs pull
# Change workdir
WORKDIR $ISAAC_ROS_WS
# Build Isaac ROS
RUN . /opt/ros/$ROS_DISTRO/install/setup.sh && \
colcon build --symlink-install \
--cmake-args \
-DCMAKE_BUILD_TYPE=Release
################ Final enviroment setup ####################
# source ros package from entrypoint
RUN sed --in-place --expression \
'$isource "$ISAAC_ROS_WS/install/setup.bash"' \
/ros_entrypoint.sh
# run ros package launch file
CMD ["ros2", "launch", "isaac_ros_argus_camera_mono", "isaac_ros_argus_camera_mono_launch.py"] | hcostelha/isaac_ros_tutorial |
<|start_filename|>tools/unsmile.c<|end_filename|>
/*
* Copyright 2011 <NAME>
*
* <NAME> licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#include <errno.h>
#include <fcntl.h>
#include <getopt.h>
#include <locale.h>
#include <stdio.h>
#include <string.h>
#include <sys/stat.h>
#include <unistd.h>
#include <wchar.h>
#include "usage.h"
#include <smile.h>
#define BUFFER_SIZE 65536
static const char unsmile_usage[] = "unsmile [-p|--pretty] <file>";
int main(int argc, char **argv)
{
int opt, fd = -1, status = 0, pretty = 0, ret = 0, idx = 0, utf8 = 1;
struct stat st;
const char* path;
char src[BUFFER_SIZE], dst[BUFFER_SIZE];
ssize_t bytes_read, bytes_decoded;
wchar_t bytes_out[BUFFER_SIZE];
static const struct option opts[] =
{
{ "pretty", no_argument, NULL, 'p' },
{ NULL, no_argument, NULL, '\0'}
};
while ((opt = getopt_long(argc, argv, "p", opts, NULL)) != -1) {
switch (opt) {
case 'p':
pretty = 1;
}
}
if (argc != 2) {
usage(unsmile_usage);
}
path = argv[1];
if (!setlocale(LC_CTYPE, "en_US.UTF-8")) {
die("Can't set the specified locale! Check LANG, LC_CTYPE, LC_ALL");
}
status = stat(path, &st);
if (status < 0) {
die("open(\"%s\"): %s", path, strerror(errno));
} else if (S_ISDIR(st.st_mode)) {
die("open(\"%s\"): Is a directory", path);
}
fd = open(path, O_RDONLY);
if (fd == -1) {
die("open(\"%s\"): %s", path, strerror(errno));
}
// Real work begins...
if (smile_decode_block_init()) {
die("Unable to initialize smile decoder");
}
// Read block by block
for (;;) {
bytes_read = read(fd, src, BUFFER_SIZE);
if (bytes_read == -1) {
error("read(\"%s\"): %s", path, strerror(errno));
ret = -1;
goto exit;
} else if (bytes_read == 0) {
goto exit;
}
dprintf("read(\"%s\"): %zd bytes", path, bytes_read);
bytes_decoded = smile_decode_block(dst, BUFFER_SIZE, src, bytes_read);
// Convert to wide characters
memset(bytes_out, '\0', BUFFER_SIZE);
mbstowcs(bytes_out, dst, bytes_decoded);
/* 0 queries the current mode */
if (fwide(stdout, 0) == 0) {
/* stdout has no specific char mode yet, attempt to set to wide */
if (fwide(stdout, 1) <= 0) {
/* a value greater than zero switches to wide character mode */
utf8 = 0;
}
}
if (!utf8) {
for (idx = 0; idx < bytes_decoded; idx++) {
putchar(dst[idx]);
}
} else {
wprintf(L"%ls", bytes_out);
}
}
exit:
close(fd);
smile_decode_block_exit();
exit(ret);
}
| pierre/libsmile |
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysDeptMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysDept;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 部门管理 Mapper 接口
* @Date 2019/1/18 21:38
*/
@Mapper
public interface SysDeptMapper extends BaseMapper<SysDept> {
/**
* 关联dept——relation
*
* @param delFlag 删除标记
* @return 数据列表
*/
List<SysDept> selectDeptDtoList(String delFlag);
}
<|start_filename|>xll-common/src/main/java/com/xll/upms/common/constant/ServiceNameConstant.java<|end_filename|>
package com.xll.upms.common.constant;
/**
* @Author 徐亮亮
* @Description: 服务名称常量
* @Date 2019/1/18 20:50
*/
public interface ServiceNameConstant {
/**
* 认证服务的SERVICEID(zuul 配置的对应)
*/
String AUTH_SERVICE = "pig-auth";
/**
* UMPS模块
*/
String UMPS_SERVICE = "pig-upms-service";
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysUserRoleService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.admin.model.entity.SysUserRole;
/**
* @Author 徐亮亮
* @Description: 用户角色表 服务类
* @Date 2019/1/18 21:58
*/
public interface SysUserRoleService extends IService<SysUserRole> {
/**
* 根据用户Id删除该用户的角色关系
*
* @author 寻欢·李
* @date 2017年12月7日 16:31:38
* @param userId 用户ID
* @return boolean
*/
Boolean deleteByUserId(Integer userId);
}
<|start_filename|>xll-auth/src/main/java/com/xll/upms/auth/config/SocialPropertiesConfig.java<|end_filename|>
package com.xll.upms.auth.config;
import lombok.Data;
/**
* @Author 徐亮亮
* @Description: 登录基础配置
* @Date 2019/1/18 20:27
*/
@Data
public class SocialPropertiesConfig {
/**
* 提供商
*/
private String providerId;
/**
* 应用ID
*/
private String clientId;
/**
* 应用密钥
*/
private String clientSecret;
}
<|start_filename|>xll-modules/xll-mc-service/src/main/java/com/xll/upms/mc/config/SmsAliyunPropertiesConfig.java<|end_filename|>
package com.xll.upms.mc.config;
import lombok.Data;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import java.util.Map;
/**
* @Author 徐亮亮
* @Description: 阿里大鱼短息服务配置
* @Date 2019/1/18 21:23
*/
@Data
@Configuration
@ConditionalOnExpression("!'${sms.aliyun}'.isEmpty()")
@ConfigurationProperties(prefix = "sms.aliyun")
public class SmsAliyunPropertiesConfig {
/**
* 应用ID
*/
private String accessKey;
/**
* 应用秘钥
*/
private String secretKey;
/**
* 短信模板配置
*/
private Map<String, String> channels;
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/common/config/RabbitConfig.java<|end_filename|>
package com.xll.upms.admin.common.config;
import com.xll.upms.common.constant.MqQueueConstant;
import org.springframework.amqp.core.Queue;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @Author 徐亮亮
* @Description: rabbit初始化配置
* @Date 2019/1/18 21:30
*/
@Configuration
public class RabbitConfig {
/**
* 初始化 log队列
*
* @return
*/
@Bean
public Queue initLogQueue() {
return new Queue(MqQueueConstant.LOG_QUEUE);
}
/**
* 初始化 手机验证码通道
*
* @return
*/
@Bean
public Queue initMobileCodeQueue() {
return new Queue(MqQueueConstant.MOBILE_CODE_QUEUE);
}
/**
* 初始化服务状态改变队列
*
* @return
*/
@Bean
public Queue initMobileServiceStatusChangeQueue() {
return new Queue(MqQueueConstant.MOBILE_SERVICE_STATUS_CHANGE);
}
/**
* 初始化钉钉状态改变队列
*
* @return
*/
@Bean
public Queue initDingTalkServiceStatusChangeQueue() {
return new Queue(MqQueueConstant.DINGTALK_SERVICE_STATUS_CHANGE);
}
/**
* 初始化zipkin队列
*
* @return
*/
@Bean
public Queue initZipkinQueue() {
return new Queue(MqQueueConstant.ZIPKIN_NAME_QUEUE);
}
/**
* 初始化路由配置状态队列
*
* @return
*/
@Bean
public Queue initRouteConfigChangeQueue() {
return new Queue(MqQueueConstant.ROUTE_CONFIG_CHANGE);
}
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/UPMSGatewayApplication.java<|end_filename|>
package com.xll.upms.gateway;
import org.springframework.boot.SpringApplication;
import org.springframework.cloud.client.SpringCloudApplication;
import org.springframework.cloud.client.loadbalancer.LoadBalancerClient;
import org.springframework.cloud.client.loadbalancer.LoadBalancerInterceptor;
import org.springframework.cloud.netflix.feign.EnableFeignClients;
import org.springframework.cloud.netflix.zuul.EnableZuulProxy;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.core.annotation.Order;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
import org.springframework.web.filter.CorsFilter;
/**
* @Author 徐亮亮
* @Description: 服务网关
* @Date 2019/1/18 21:20
*/
@EnableZuulProxy
@EnableFeignClients
@EnableGlobalMethodSecurity(prePostEnabled = true)
@SpringCloudApplication
@ComponentScan(basePackages = {"com.xll.upms.gateway", "com.xll.upms.common.bean"})
public class UPMSGatewayApplication {
public static void main(String[] args) {
SpringApplication.run(UPMSGatewayApplication.class, args);
}
// 开发时候配置跨域,现在使用vue-cli 自己的代理解决,生成部署,是用Nginx 代理
@Bean
@Order(Integer.MAX_VALUE)
public CorsFilter corsFilter() {
final UrlBasedCorsConfigurationSource urlBasedCorsConfigurationSource = new UrlBasedCorsConfigurationSource();
final CorsConfiguration corsConfiguration = new CorsConfiguration();
corsConfiguration.setAllowCredentials(true);
corsConfiguration.addAllowedOrigin("*");
corsConfiguration.addAllowedHeader("*");
corsConfiguration.addAllowedMethod("*");
urlBasedCorsConfigurationSource.registerCorsConfiguration("/**", corsConfiguration);
return new CorsFilter(urlBasedCorsConfigurationSource);
}
@Bean
LoadBalancerInterceptor loadBalancerInterceptor(LoadBalancerClient loadBalance){
return new LoadBalancerInterceptor(loadBalance);
}
@Bean
public RestTemplate restTemplate() {
return new RestTemplate();
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/impl/SysOauthClientDetailsServiceImpl.java<|end_filename|>
package com.xll.upms.admin.service.impl;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.xll.upms.admin.mapper.SysOauthClientDetailsMapper;
import com.xll.upms.admin.model.entity.SysOauthClientDetails;
import com.xll.upms.admin.service.SysOauthClientDetailsService;
import org.springframework.stereotype.Service;
/**
* @Author 徐亮亮
* @Description: 服务实现类
* @Date 2019/1/18 21:53
*/
@Service
public class SysOauthClientDetailsServiceImpl extends ServiceImpl<SysOauthClientDetailsMapper, SysOauthClientDetails> implements SysOauthClientDetailsService {
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/impl/SysRoleDeptServiceImpl.java<|end_filename|>
package com.xll.upms.admin.service.impl;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.xll.upms.admin.mapper.SysRoleDeptMapper;
import com.xll.upms.admin.model.entity.SysRoleDept;
import com.xll.upms.admin.service.SysRoleDeptService;
import org.springframework.stereotype.Service;
/**
* @Author 徐亮亮
* @Description: 角色与部门对应关系 服务实现类
* @Date 2019/1/18 21:53
*/
@Service
public class SysRoleDeptServiceImpl extends ServiceImpl<SysRoleDeptMapper, SysRoleDept> implements SysRoleDeptService {
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/util/RibbonVersionHolder.java<|end_filename|>
package com.xll.upms.gateway.util;
import com.alibaba.ttl.TransmittableThreadLocal;
/**
* @Author 徐亮亮
* @Description: 负载均衡处理器
* @Date 2019/1/18 21:19
*/
public class RibbonVersionHolder {
private static final ThreadLocal<String> context = new TransmittableThreadLocal<>();
public static String getContext() {
return context.get();
}
public static void setContext(String value) {
context.set(value);
}
public static void clearContext() {
context.remove();
}
}
<|start_filename|>xll-modules/xll-daemon-service/src/main/java/com/xll/upms/daemon/job/UmpsDataflowJob.java<|end_filename|>
package com.xll.upms.daemon.job;
import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.zen.elasticjob.spring.boot.annotation.ElasticJobConfig;
import java.util.List;
/**
* @Author 徐亮亮
* @Description:数据流任务Job
* @Date 2019/1/18 21:21
*/
@ElasticJobConfig(cron = "0 0 0/1 * * ? ", shardingTotalCount = 3, shardingItemParameters = "0=Beijing,1=Shanghai,2=Guangzhou")
public class UmpsDataflowJob implements DataflowJob<Integer> {
@Override
public List<Integer> fetchData(ShardingContext shardingContext) {
return null;
}
@Override
public void processData(ShardingContext shardingContext, List<Integer> list) {
}
}
<|start_filename|>xll-auth/src/main/java/com/xll/upms/auth/UPMSAuthServerApplication.java<|end_filename|>
package com.xll.upms.auth;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.netflix.feign.EnableFeignClients;
import org.springframework.context.annotation.ComponentScan;
/**
* @Author 徐亮亮
* @Description: 获取用户信息也是通过这个应用实现
* 这里既是认证服务器,也是资源服务器
* @Date 2019/1/18 20:08
*/
@SpringBootApplication
@EnableDiscoveryClient
@EnableFeignClients
@ComponentScan(basePackages = {"com.xll.upms.auth", "com.xll.upms.common.bean"})
public class UPMSAuthServerApplication {
public static void main(String[] args) {
SpringApplication.run(UPMSAuthServerApplication.class, args);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysLogService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.common.entity.SysLog;
/**
* @Author 徐亮亮
* @Description: 日志表 服务类
* @Date 2019/1/18 21:56
*/
public interface SysLogService extends IService<SysLog> {
/**
* 通过ID删除日志(逻辑删除)
*
* @param id 日志ID
* @return true/false
*/
Boolean updateByLogId(Long id);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysRoleDeptMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysRoleDept;
import org.apache.ibatis.annotations.Mapper;
/**
* @Author 徐亮亮
* @Description: 角色与部门对应关系 Mapper 接口
* @Date 2019/1/18 21:42
*/
@Mapper
public interface SysRoleDeptMapper extends BaseMapper<SysRoleDept> {
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/feign/MenuService.java<|end_filename|>
package com.xll.upms.gateway.feign;
import com.xll.upms.common.vo.MenuVO;
import com.xll.upms.gateway.feign.fallback.MenuServiceFallbackImpl;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import java.util.Set;
/**
* @Author 徐亮亮
* @Description: 调度服务upms接口
* @Date 2019/1/18 21:15
*/
@FeignClient(name = "xll-upms-service", fallback = MenuServiceFallbackImpl.class)
public interface MenuService {
/**
* 通过角色名查询菜单
*
* @param role 角色名称
* @return 菜单列表
*/
@GetMapping(value = "/menu/findMenuByRole/{role}")
Set<MenuVO> findMenuByRole(@PathVariable("role") String role);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/model/dto/UserDTO.java<|end_filename|>
package com.xll.upms.admin.model.dto;
import com.xll.upms.admin.model.entity.SysUser;
import lombok.Data;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 用户DTO
* @Date 2019/1/18 21:47
*/
@Data
public class UserDTO extends SysUser {
/**
* 角色ID
*/
private List<Integer> role;
private Integer deptId;
/**
* 新密码
*/
private String newpassword1;
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/controller/DeptController.java<|end_filename|>
package com.xll.upms.admin.controller;
import com.baomidou.mybatisplus.mapper.EntityWrapper;
import com.xll.upms.admin.model.dto.DeptTree;
import com.xll.upms.admin.model.entity.SysDept;
import com.xll.upms.admin.service.SysDeptService;
import com.xll.upms.common.constant.CommonConstant;
import com.xll.upms.common.web.BaseController;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.Date;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 部门管理 前端控制器
* @Date 2019/1/18 21:34
*/
@RestController
@RequestMapping("/dept")
public class DeptController extends BaseController {
@Autowired
private SysDeptService sysDeptService;
/**
* 通过ID查询
*
* @param id ID
* @return SysDept
*/
@GetMapping("/{id}")
public SysDept get(@PathVariable Integer id) {
return sysDeptService.selectById(id);
}
/**
* 返回树形菜单集合
*
* @return 树形菜单
*/
@GetMapping(value = "/tree")
public List<DeptTree> getTree() {
SysDept condition = new SysDept();
condition.setDelFlag(CommonConstant.STATUS_NORMAL);
return sysDeptService.selectListTree(new EntityWrapper<>(condition));
}
/**
* 添加
*
* @param sysDept 实体
* @return success/false
*/
@PostMapping
public Boolean add(@RequestBody SysDept sysDept) {
return sysDeptService.insertDept(sysDept);
}
/**
* 删除
*
* @param id ID
* @return success/false
*/
@DeleteMapping("/{id}")
public Boolean delete(@PathVariable Integer id) {
return sysDeptService.deleteDeptById(id);
}
/**
* 编辑
*
* @param sysDept 实体
* @return success/false
*/
@PutMapping
public Boolean edit(@RequestBody SysDept sysDept) {
sysDept.setUpdateTime(new Date());
return sysDeptService.updateDeptById(sysDept);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/impl/SysDeptRelationServiceImpl.java<|end_filename|>
package com.xll.upms.admin.service.impl;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.xll.upms.admin.mapper.SysDeptRelationMapper;
import com.xll.upms.admin.model.entity.SysDeptRelation;
import com.xll.upms.admin.service.SysDeptRelationService;
import org.springframework.stereotype.Service;
/**
* @Author 徐亮亮
* @Description: 服务实现类
* @Date 2019/1/18 21:51
*/
@Service
public class SysDeptRelationServiceImpl extends ServiceImpl<SysDeptRelationMapper, SysDeptRelation> implements SysDeptRelationService {
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/feign/fallback/MenuServiceFallbackImpl.java<|end_filename|>
package com.xll.upms.gateway.feign.fallback;
import com.xll.upms.common.vo.MenuVO;
import com.xll.upms.gateway.feign.MenuService;
import com.xiaoleilu.hutool.collection.CollUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.util.Set;
/**
* @Author 徐亮亮
* @Description: 按钮业务逻辑回调类
* @Date 2019/1/18 21:14
*/
@Slf4j
@Service
public class MenuServiceFallbackImpl implements MenuService {
@Override
public Set<MenuVO> findMenuByRole(String role) {
log.error("调用{}异常{}","findMenuByRole",role);
return CollUtil.newHashSet();
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/model/entity/SysRoleMenu.java<|end_filename|>
package com.xll.upms.admin.model.entity;
import com.baomidou.mybatisplus.activerecord.Model;
import com.baomidou.mybatisplus.annotations.TableId;
import com.baomidou.mybatisplus.annotations.TableName;
import com.baomidou.mybatisplus.enums.IdType;
import lombok.Data;
import java.io.Serializable;
/**
* @Author 徐亮亮
* @Description: 角色菜单表
* @Date 2019/1/18 21:50
*/
@Data
@TableName("sys_role_menu")
public class SysRoleMenu extends Model<SysRoleMenu> {
private static final long serialVersionUID = 1L;
/**
* 角色ID
*/
@TableId(type = IdType.INPUT)
private Integer roleId;
/**
* 菜单ID
*/
@TableId(type = IdType.INPUT)
private Integer menuId;
@Override
protected Serializable pkVal() {
return this.roleId;
}
@Override
public String toString() {
return "SysRoleMenu{" +
", roleId=" + roleId +
", menuId=" + menuId +
"}";
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysDictService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.admin.model.entity.SysDict;
/**
* @Author 徐亮亮
* @Description: 字典表 服务类
* @Date 2019/1/18 21:56
*/
public interface SysDictService extends IService<SysDict> {
}
<|start_filename|>xll-common/src/main/java/com/xll/upms/common/bean/interceptor/DataScope.java<|end_filename|>
package com.xll.upms.common.bean.interceptor;
import lombok.Data;
import java.util.HashMap;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 数据权限、参考guns实现,增强查询参数
* @Date 2019/1/18 20:38
*/
@Data
public class DataScope extends HashMap {
/**
* 限制范围的字段名称
*/
private String scopeName = "dept_id";
/**
* 具体的数据范围
*/
private List<Integer> deptIds;
/**
* 是否只查询本部门
*/
private Boolean isOnly = false;
}
<|start_filename|>xll-visual/xll-monitor/src/main/java/com/xll/upms/monitor/config/MonitorPropertiesConfig.java<|end_filename|>
package com.xll.upms.monitor.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
* @Author 徐亮亮
* @Description: 监控属性配置类
* @Date 2019/1/18 22:04
*/
@Data
@Configuration
@ConfigurationProperties(prefix = "notifier")
public class MonitorPropertiesConfig {
private MonitorMobilePropertiesConfig mobile;
private MonitorDingTalkPropertiesConfig dingTalk;
}
<|start_filename|>xll-visual/xll-monitor/src/main/java/com/xll/upms/monitor/config/MonitorDingTalkPropertiesConfig.java<|end_filename|>
package com.xll.upms.monitor.config;
import lombok.Data;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
/**
* @Author 徐亮亮
* @Description: 监控钉钉属性配置类
* @Date 2019/1/18 22:03
*/
@Data
@ConditionalOnExpression("!'${webhook}'.isEmpty()")
public class MonitorDingTalkPropertiesConfig {
/**
* 是否开启钉钉通知
*/
private Boolean enabled;
}
<|start_filename|>xll-common/src/main/java/com/xll/upms/common/util/template/MobileMsgTemplate.java<|end_filename|>
package com.xll.upms.common.util.template;
import lombok.Data;
import java.io.Serializable;
/**
* @Author 徐亮亮
* @Description:短信消息模板
* @Date 2019/1/18 20:55
*/
@Data
public class MobileMsgTemplate implements Serializable {
/**
* 手机号
*/
private String mobile;
/**
* 组装后的模板内容JSON字符串
*/
private String context;
/**
* 短信通道
*/
private String channel;
/**
* 短信类型(验证码或者通知短信)
* 暂时不用,留着后面存数据库备用吧
*/
private String type;
/**
* 短信签名
*/
private String signName;
/**
* 短信模板
*/
private String template;
public MobileMsgTemplate(String mobile, String context, String channel, String signName, String template){
this.mobile = mobile;
this.context = context;
this.channel = channel;
this.signName = signName;
this.template = template;
}
}
<|start_filename|>xll-common/src/main/java/com/xll/upms/common/constant/enums/EnumSmsChannel.java<|end_filename|>
package com.xll.upms.common.constant.enums;
/**
* @Author 徐亮亮
* @Description: 短信通道枚举
* @Date 2019/1/18 20:47
*/
public enum EnumSmsChannel {
/**
* 阿里大鱼短信通道
*/
ALIYUN("ALIYUN_SMS", "阿里大鱼");
/**
* 通道名称
*/
private String name;
/**
* 通道描述
*/
private String description;
EnumSmsChannel(String name, String description) {
this.name = name;
this.description = description;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
<|start_filename|>xll-modules/xll-daemon-service/src/main/java/com/xll/upms/daemon/UPMSDaemonApplication.java<|end_filename|>
package com.xll.upms.daemon;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
/**
* @Author 徐亮亮
* @Description: 分布式任务调度模块
* @Date 2019/1/18 21:22
*/
@EnableDiscoveryClient
@SpringBootApplication
public class UPMSDaemonApplication {
public static void main(String[] args) {
SpringApplication.run(UPMSDaemonApplication.class, args);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/common/config/MybatisPlusConfig.java<|end_filename|>
package com.xll.upms.admin.common.config;
import com.baomidou.mybatisplus.plugins.PaginationInterceptor;
import com.xll.upms.common.bean.interceptor.DataScopeInterceptor;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @Author 徐亮亮
* @Description:Mapper映射文件配置类
* @Date 2019/1/18 21:30
*/
@Configuration
@MapperScan("com.xll.upms.admin.mapper")
public class MybatisPlusConfig {
/**
* 分页插件
*
* @return PaginationInterceptor
*/
@Bean
public PaginationInterceptor paginationInterceptor() {
return new PaginationInterceptor();
}
/**
* 数据权限插件
*
* @return DataScopeInterceptor
*/
@Bean
public DataScopeInterceptor dataScopeInterceptor() {
return new DataScopeInterceptor();
}
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/component/handler/MetadataCanaryRuleHandler.java<|end_filename|>
package com.xll.upms.gateway.component.handler;
/**
* @author lengleng
* @date 2018/10/19
*/
import com.xll.upms.common.constant.SecurityConstants;
import com.xll.upms.gateway.util.RibbonVersionHolder;
import com.netflix.loadbalancer.AbstractServerPredicate;
import com.netflix.loadbalancer.PredicateKey;
import com.netflix.loadbalancer.ZoneAvoidanceRule;
import com.netflix.niws.loadbalancer.DiscoveryEnabledServer;
import com.xiaoleilu.hutool.util.StrUtil;
import lombok.extern.slf4j.Slf4j;
import java.util.Map;
/**
* @author lengleng
* @date 2018/10/16
* <p>
*
* <p>
*
*/
/**
* @Author 徐亮亮
* @Description: 路由微服务断言
* 1. eureka metadata 存在版本定义时候进行判断
* 2. 不存在 metadata 直接返回true
* @Date 2019/1/18 21:12
*/
@Slf4j
public class MetadataCanaryRuleHandler extends ZoneAvoidanceRule {
@Override
public AbstractServerPredicate getPredicate() {
return new AbstractServerPredicate() {
@Override
public boolean apply(PredicateKey predicateKey) {
String targetVersion = RibbonVersionHolder.getContext();
RibbonVersionHolder.clearContext();
if (StrUtil.isBlank(targetVersion)) {
log.debug("客户端未配置目标版本直接路由");
return true;
}
DiscoveryEnabledServer server = (DiscoveryEnabledServer) predicateKey.getServer();
final Map<String, String> metadata = server.getInstanceInfo().getMetadata();
if (StrUtil.isBlank(metadata.get(SecurityConstants.VERSION))) {
log.debug("当前微服务{} 未配置版本直接路由");
return true;
}
if (metadata.get(SecurityConstants.VERSION).equals(targetVersion)) {
return true;
} else {
log.debug("当前微服务{} 版本为{},目标版本{} 匹配失败", server.getInstanceInfo().getAppName()
, metadata.get(SecurityConstants.VERSION), targetVersion);
return false;
}
}
};
}
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/service/LogSendService.java<|end_filename|>
package com.xll.upms.gateway.service;
import com.netflix.zuul.context.RequestContext;
/**
* @Author 徐亮亮
* @Description: 日志发送业务逻辑
* @Date 2019/1/18 21:17
*/
public interface LogSendService {
/**
* 往消息通道发消息
*
* @param requestContext requestContext
*/
void send(RequestContext requestContext);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysDeptRelationService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.admin.model.entity.SysDeptRelation;
/**
* @Author 徐亮亮
* @Description: 部门服务类
* @Date 2019/1/18 21:55
*/
public interface SysDeptRelationService extends IService<SysDeptRelation> {
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysMenuMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysMenu;
import com.xll.upms.common.vo.MenuVO;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 菜单权限表 Mapper 接口
* @Date 2019/1/18 21:40
*/
@Mapper
public interface SysMenuMapper extends BaseMapper<SysMenu> {
/**
* 通过角色名查询菜单
*
* @param role 角色名称
* @return 菜单列表
*/
List<MenuVO> findMenuByRoleName(@Param("role") String role);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysUserRoleMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysUserRole;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
/**
* @Author 徐亮亮
* @Description: 用户角色表 Mapper 接口
* @Date 2019/1/18 21:43
*/
@Mapper
public interface SysUserRoleMapper extends BaseMapper<SysUserRole> {
/**
* @Author 徐亮亮
* @Description: 根据用户Id删除该用户的角色关系
* @Date 2019/1/18 21:44
*/
Boolean deleteByUserId(@Param("userId") Integer userId);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysRoleService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.mapper.EntityWrapper;
import com.baomidou.mybatisplus.plugins.Page;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.admin.model.dto.RoleDTO;
import com.xll.upms.admin.model.entity.SysRole;
import com.xll.upms.common.util.Query;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 角色 服务类
* @Date 2019/1/18 21:58
*/
public interface SysRoleService extends IService<SysRole> {
/**
* 添加角色
*
* @param roleDto 角色信息
* @return 成功、失败
*/
Boolean insertRole(RoleDTO roleDto);
/**
* 分页查角色列表
*
* @param objectQuery 查询条件
* @param objectEntityWrapper wapper
* @return page
*/
Page selectwithDeptPage(Query<Object> objectQuery, EntityWrapper<Object> objectEntityWrapper);
/**
* 更新角色
* @param roleDto 含有部门信息
* @return 成功、失败
*/
Boolean updateRoleById(RoleDTO roleDto);
/**
* 通过部门ID查询角色列表
* @param deptId 部门ID
* @return 角色列表
*/
List<SysRole> selectListByDeptId(Integer deptId);
}
<|start_filename|>xll-modules/xll-mc-service/src/main/java/com/xll/upms/mc/listener/DingTalkServiceChangeReceiveListener.java<|end_filename|>
package com.xll.upms.mc.listener;
import com.xll.upms.common.constant.MqQueueConstant;
import com.xll.upms.mc.handler.DingTalkMessageHandler;
import lombok.extern.slf4j.Slf4j;
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* @Author 徐亮亮
* @Description: 监听服务状态改变发送请求
* @Date 2019/1/18 21:25
*/
@Slf4j
@Component
@RabbitListener(queues = MqQueueConstant.DINGTALK_SERVICE_STATUS_CHANGE)
public class DingTalkServiceChangeReceiveListener {
@Autowired
private DingTalkMessageHandler dingTalkMessageHandler;
@RabbitHandler
public void receive(String text) {
long startTime = System.currentTimeMillis();
log.info("消息中心接收到钉钉发送请求-> 内容:{} ", text);
dingTalkMessageHandler.process(text);
long useTime = System.currentTimeMillis() - startTime;
log.info("调用 钉钉网关处理完毕,耗时 {}毫秒", useTime);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysOauthClientDetailsMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysOauthClientDetails;
import org.apache.ibatis.annotations.Mapper;
/**
* @Author 徐亮亮
* @Description: 客户认证详情信息Mapper
* @Date 2019/1/18 21:40
*/
@Mapper
public interface SysOauthClientDetailsMapper extends BaseMapper<SysOauthClientDetails> {
}
<|start_filename|>xll-visual/xll-monitor/src/main/java/com/xll/upms/monitor/config/MonitorMobilePropertiesConfig.java<|end_filename|>
package com.xll.upms.monitor.config;
import lombok.Data;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import java.util.ArrayList;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 监控手机属性配置类
* @Date 2019/1/18 22:04
*/
@Data
@ConditionalOnExpression("!'${mobiles}'.isEmpty()")
public class MonitorMobilePropertiesConfig {
private Boolean enabled;
private List<String> mobiles = new ArrayList<>();
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/service/impl/PermissionServiceImpl.java<|end_filename|>
package com.xll.upms.gateway.service.impl;
import com.xll.upms.common.vo.MenuVO;
import com.xll.upms.gateway.feign.MenuService;
import com.xll.upms.gateway.service.PermissionService;
import com.xiaoleilu.hutool.collection.CollUtil;
import com.xiaoleilu.hutool.util.StrUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.stereotype.Service;
import org.springframework.util.AntPathMatcher;
import javax.servlet.http.HttpServletRequest;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* @Author 徐亮亮
* @Description: 允许业务逻辑
* @Date 2019/1/18 21:16
*/
@Slf4j
@Service("permissionService")
public class PermissionServiceImpl implements PermissionService {
@Autowired
private MenuService menuService;
private AntPathMatcher antPathMatcher = new AntPathMatcher();
@Override
public boolean hasPermission(HttpServletRequest request, Authentication authentication) {
//ele-admin options 跨域配置,现在处理是通过前端配置代理,不使用这种方式,存在风险
// if (HttpMethod.OPTIONS.name().equalsIgnoreCase(request.getMethod())) {
// return true;
// }
Object principal = authentication.getPrincipal();
List<SimpleGrantedAuthority> authorityList = (List<SimpleGrantedAuthority>) authentication.getAuthorities();
AtomicBoolean hasPermission = new AtomicBoolean(false);
if (principal != null) {
if (CollUtil.isEmpty(authorityList)) {
log.warn("角色列表为空:{}", authentication.getPrincipal());
return false;
}
Set<MenuVO> urls = new HashSet<>();
authorityList.stream().filter(authority ->
!StrUtil.equals(authority.getAuthority(), "ROLE_USER"))
.forEach(authority -> {
Set<MenuVO> menuVOSet = menuService.findMenuByRole(authority.getAuthority());
CollUtil.addAll(urls, menuVOSet);
});
urls.stream().filter(menu -> StrUtil.isNotEmpty(menu.getUrl())
&& antPathMatcher.match(menu.getUrl(), request.getRequestURI())
&& request.getMethod().equalsIgnoreCase(menu.getMethod()))
.findFirst().ifPresent(menuVO -> hasPermission.set(true));
}
return hasPermission.get();
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysZuulRouteService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.common.entity.SysZuulRoute;
/**
* @Author 徐亮亮
* @Description: 动态路由配置表 服务类
* @Date 2019/1/18 21:59
*/
public interface SysZuulRouteService extends IService<SysZuulRoute> {
/**
* 立即生效配置
* @return
*/
Boolean applyZuulRoute();
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/controller/UserController.java<|end_filename|>
package com.xll.upms.admin.controller;
import com.baomidou.mybatisplus.plugins.Page;
import com.xll.upms.admin.model.dto.UserDTO;
import com.xll.upms.admin.model.dto.UserInfo;
import com.xll.upms.admin.model.entity.SysUser;
import com.xll.upms.admin.model.entity.SysUserRole;
import com.xll.upms.admin.service.SysUserService;
import com.xll.upms.common.bean.config.FdfsPropertiesConfig;
import com.xll.upms.common.constant.CommonConstant;
import com.xll.upms.common.util.Query;
import com.xll.upms.common.util.R;
import com.xll.upms.common.vo.UserVO;
import com.xll.upms.common.web.BaseController;
import com.luhuiguo.fastdfs.domain.StorePath;
import com.luhuiguo.fastdfs.service.FastFileStorageClient;
import com.xiaoleilu.hutool.io.FileUtil;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* @Author 徐亮亮
* @Description: 用户 前端控制器
* @Date 2019/1/18 21:37
*/
@RestController
@RequestMapping("/user")
public class UserController extends BaseController {
private static final PasswordEncoder ENCODER = new BCryptPasswordEncoder();
@Autowired
private FastFileStorageClient fastFileStorageClient;
@Autowired
private SysUserService userService;
@Autowired
private FdfsPropertiesConfig fdfsPropertiesConfig;
/**
* 获取当前用户信息(角色、权限)
* 并且异步初始化用户部门信息
*
* @param userVo 当前用户信息
* @return 用户名
*/
@GetMapping("/info")
public R<UserInfo> user(UserVO userVo) {
UserInfo userInfo = userService.findUserInfo(userVo);
return new R<>(userInfo);
}
/**
* 通过ID查询当前用户信息
*
* @param id ID
* @return 用户信息
*/
@GetMapping("/{id}")
public UserVO user(@PathVariable Integer id) {
return userService.selectUserVoById(id);
}
/**
* 删除用户信息
*
* @param id ID
* @return R
*/
@ApiOperation(value = "删除用户", notes = "根据ID删除用户")
@ApiImplicitParam(name = "id", value = "用户ID", required = true, dataType = "int", paramType = "path")
@DeleteMapping("/{id}")
public R<Boolean> userDel(@PathVariable Integer id) {
SysUser sysUser = userService.selectById(id);
return new R<>(userService.deleteUserById(sysUser));
}
/**
* 添加用户
*
* @param userDto 用户信息
* @return success/false
*/
@PostMapping
public R<Boolean> user(@RequestBody UserDTO userDto) {
SysUser sysUser = new SysUser();
BeanUtils.copyProperties(userDto, sysUser);
sysUser.setDelFlag(CommonConstant.STATUS_NORMAL);
sysUser.setPassword(ENCODER.encode(userDto.getNewpassword1()));
userService.insert(sysUser);
userDto.getRole().forEach(roleId -> {
SysUserRole userRole = new SysUserRole();
userRole.setUserId(sysUser.getUserId());
userRole.setRoleId(roleId);
userRole.insert();
});
return new R<>(Boolean.TRUE);
}
/**
* 更新用户信息
*
* @param userDto 用户信息
* @return R
*/
@PutMapping
public R<Boolean> userUpdate(@RequestBody UserDTO userDto) {
return new R<>(userService.updateUser(userDto));
}
/**
* 通过用户名查询用户及其角色信息
*
* @param username 用户名
* @return UseVo 对象
*/
@GetMapping("/findUserByUsername/{username}")
public UserVO findUserByUsername(@PathVariable String username) {
return userService.findUserByUsername(username);
}
/**
* 通过手机号查询用户及其角色信息
*
* @param mobile 手机号
* @return UseVo 对象
*/
@GetMapping("/findUserByMobile/{mobile}")
public UserVO findUserByMobile(@PathVariable String mobile) {
return userService.findUserByMobile(mobile);
}
/**
* 通过OpenId查询
*
* @param openId openid
* @return 对象
*/
@GetMapping("/findUserByOpenId/{openId}")
public UserVO findUserByOpenId(@PathVariable String openId) {
return userService.findUserByOpenId(openId);
}
/**
* 分页查询用户
*
* @param params 参数集
* @param userVO 用户信息
* @return 用户集合
*/
@RequestMapping("/userPage")
public Page userPage(@RequestParam Map<String, Object> params, UserVO userVO) {
return userService.selectWithRolePage(new Query(params), userVO);
}
/**
* 上传用户头像
* (多机部署有问题,建议使用独立的文件服务器)
*
* @param file 资源
* @return filename map
*/
@PostMapping("/upload")
public Map<String, String> upload(@RequestParam("file") MultipartFile file) {
String fileExt = FileUtil.extName(file.getOriginalFilename());
Map<String, String> resultMap = new HashMap<>(1);
try {
StorePath storePath = fastFileStorageClient.uploadFile(file.getBytes(), fileExt);
resultMap.put("filename", fdfsPropertiesConfig.getFileHost() + storePath.getFullPath());
} catch (IOException e) {
logger.error("文件上传异常", e);
throw new RuntimeException(e);
}
return resultMap;
}
/**
* 修改个人信息
*
* @param userDto userDto
* @param userVo 登录用户信息
* @return success/false
*/
@PutMapping("/editInfo")
public R<Boolean> editInfo(@RequestBody UserDTO userDto, UserVO userVo) {
return userService.updateUserInfo(userDto, userVo.getUsername());
}
}
<|start_filename|>xll-modules/xll-mc-service/src/main/java/com/xll/upms/mc/handler/DingTalkMessageHandler.java<|end_filename|>
package com.xll.upms.mc.handler;
import com.alibaba.fastjson.JSONObject;
import com.xll.upms.common.util.template.DingTalkMsgTemplate;
import com.xll.upms.mc.config.DingTalkPropertiesConfig;
import com.xiaoleilu.hutool.http.HttpUtil;
import com.xiaoleilu.hutool.util.StrUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* @Author 徐亮亮
* @Description: 发送钉钉消息逻辑
* @Date 2019/1/18 21:24
*/
@Slf4j
@Component
public class DingTalkMessageHandler {
@Autowired
private DingTalkPropertiesConfig dingTalkPropertiesConfig;
/**
* 业务处理
*
* @param text 消息
*/
public boolean process(String text) {
String webhook = dingTalkPropertiesConfig.getWebhook();
if (StrUtil.isBlank(webhook)) {
log.error("钉钉配置错误,webhook为空");
return false;
}
DingTalkMsgTemplate dingTalkMsgTemplate = new DingTalkMsgTemplate();
dingTalkMsgTemplate.setMsgtype("text");
DingTalkMsgTemplate.TextBean textBean = new DingTalkMsgTemplate.TextBean();
textBean.setContent(text);
dingTalkMsgTemplate.setText(textBean);
String result = HttpUtil.post(webhook, JSONObject.toJSONString(dingTalkMsgTemplate));
log.info("钉钉提醒成功,报文响应:{}", result);
return true;
}
}
<|start_filename|>xll-modules/xll-mc-service/src/main/java/com/xll/upms/mc/handler/AbstractMessageHandler.java<|end_filename|>
package com.xll.upms.mc.handler;
import com.xll.upms.common.util.template.MobileMsgTemplate;
/**
* @Author 徐亮亮
* @Description: 抽象消息处理器
* @Date 2019/1/18 21:23
*/
public abstract class AbstractMessageHandler implements SmsMessageHandler {
/**
* 执行入口
*
* @param mobileMsgTemplate 信息
*/
@Override
public void execute(MobileMsgTemplate mobileMsgTemplate) {
check(mobileMsgTemplate);
if (!process(mobileMsgTemplate)) {
fail(mobileMsgTemplate);
}
}
/**
* 数据校验
*
* @param mobileMsgTemplate 信息
*/
@Override
public abstract void check(MobileMsgTemplate mobileMsgTemplate);
/**
* 业务处理
*
* @param mobileMsgTemplate 信息
* @return boolean
*/
@Override
public abstract boolean process(MobileMsgTemplate mobileMsgTemplate);
/**
* 失败处理
*
* @param mobileMsgTemplate 信息
*/
@Override
public abstract void fail(MobileMsgTemplate mobileMsgTemplate);
}
<|start_filename|>xll-modules/xll-mc-service/src/main/java/com/xll/upms/mc/config/DingTalkPropertiesConfig.java<|end_filename|>
package com.xll.upms.mc.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
* @Author 徐亮亮
* @Description: 钉钉服务配置
* @Date 2019/1/18 21:22
*/
@Data
@Configuration
@ConfigurationProperties(prefix = "sms.dingtalk")
public class DingTalkPropertiesConfig {
/**
* webhook
*/
private String webhook;
}
<|start_filename|>xll-common/src/main/java/com/xll/upms/common/vo/ErrorPojo.java<|end_filename|>
package com.xll.upms.common.vo;
import com.alibaba.fastjson.annotation.JSONField;
import lombok.Data;
import java.io.Serializable;
/**
* @Author 徐亮亮
* @Description:spring boot 的异常对象视图实体类
* @Date 2019/1/18 20:59
*/
@Data
public class ErrorPojo implements Serializable {
@JSONField(name = "timestamp")
private long timestamp;
@JSONField(name = "status")
private int status;
@JSONField(name = "error")
private String error;
@JSONField(name = "exception")
private String exception;
@JSONField(name = "message")
private String message;
@JSONField(name = "path")
private String path;
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/controller/MenuController.java<|end_filename|>
package com.xll.upms.admin.controller;
import com.baomidou.mybatisplus.mapper.EntityWrapper;
import com.xll.upms.admin.common.util.TreeUtil;
import com.xll.upms.admin.model.dto.MenuTree;
import com.xll.upms.admin.model.entity.SysMenu;
import com.xll.upms.admin.service.SysMenuService;
import com.xll.upms.common.constant.CommonConstant;
import com.xll.upms.common.util.R;
import com.xll.upms.common.vo.MenuVO;
import com.xll.upms.common.vo.UserVO;
import com.xll.upms.common.web.BaseController;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.*;
import java.util.stream.Collectors;
/**
* @Author 徐亮亮
* @Description: 按钮 前端控制器
* @Date 2019/1/18 21:35
*/
@RestController
@RequestMapping("/menu")
public class MenuController extends BaseController {
@Autowired
private SysMenuService sysMenuService;
/**
* 通过角色名称查询用户菜单
*
* @param role 角色名称
* @return 菜单列表
*/
@GetMapping("/findMenuByRole/{role}")
public List<MenuVO> findMenuByRole(@PathVariable String role) {
return sysMenuService.findMenuByRoleName(role);
}
/**
* 返回当前用户的树形菜单集合
*
* @param userVO
* @return 当前用户的树形菜单
*/
@GetMapping(value = "/userMenu")
public List<MenuTree> userMenu(UserVO userVO) {
// 获取符合条件得菜单
Set<MenuVO> all = new HashSet<>();
userVO.getRoleList().forEach(role -> all.addAll(sysMenuService.findMenuByRoleName(role.getRoleCode())));
List<MenuTree> menuTreeList = all.stream().filter(vo -> CommonConstant.MENU
.equals(vo.getType()))
.map(MenuTree::new)
.sorted(Comparator.comparingInt(MenuTree::getSort))
.collect(Collectors.toList());
return TreeUtil.bulid(menuTreeList, -1);
}
/**
* 返回树形菜单集合
*
* @return 树形菜单
*/
@GetMapping(value = "/allTree")
public List<MenuTree> getTree() {
SysMenu condition = new SysMenu();
condition.setDelFlag(CommonConstant.STATUS_NORMAL);
return TreeUtil.bulidTree(sysMenuService.selectList(new EntityWrapper<>(condition)), -1);
}
/**
* 返回角色的菜单集合
*
* @param roleName 角色名称
* @return 属性集合
*/
@GetMapping("/roleTree/{roleName}")
public List<Integer> roleTree(@PathVariable String roleName) {
List<MenuVO> menus = sysMenuService.findMenuByRoleName(roleName);
List<Integer> menuList = new ArrayList<>();
for (MenuVO menuVo : menus) {
menuList.add(menuVo.getMenuId());
}
return menuList;
}
/**
* 通过ID查询菜单的详细信息
*
* @param id 菜单ID
* @return 菜单详细信息
*/
@GetMapping("/{id}")
public SysMenu menu(@PathVariable Integer id) {
return sysMenuService.selectById(id);
}
/**
* 新增菜单
*
* @param sysMenu 菜单信息
* @return success/false
*/
@PostMapping
public R<Boolean> menu(@RequestBody SysMenu sysMenu) {
return new R<>(sysMenuService.insert(sysMenu));
}
/**
* 删除菜单
*
* @param id 菜单ID
* @return success/false
* TODO 级联删除下级节点
*/
@DeleteMapping("/{id}")
public R<Boolean> menuDel(@PathVariable Integer id) {
return new R<>(sysMenuService.deleteMenu(id));
}
@PutMapping
public R<Boolean> menuUpdate(@RequestBody SysMenu sysMenu) {
return new R<>(sysMenuService.updateMenuById(sysMenu));
}
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/component/listener/GroovyLoadInitListener.java<|end_filename|>
package com.xll.upms.gateway.component.listener;
import com.netflix.zuul.FilterFileManager;
import com.netflix.zuul.FilterLoader;
import com.netflix.zuul.groovy.GroovyCompiler;
import com.netflix.zuul.groovy.GroovyFileFilter;
import com.netflix.zuul.monitoring.MonitoringHelper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.embedded.EmbeddedServletContainerInitializedEvent;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
/**
* @Author 徐亮亮
* @Description: 动态filter 初始化配置
* @Date 2019/1/18 21:13
*/
@Slf4j
@Component
@ConditionalOnProperty("zuul.groovy.path")
public class GroovyLoadInitListener {
@Value("${zuul.groovy.path}")
private String groovyPath;
@EventListener(value = {EmbeddedServletContainerInitializedEvent.class})
public void init() {
MonitoringHelper.initMocks();
FilterLoader.getInstance().setCompiler(new GroovyCompiler());
FilterFileManager.setFilenameFilter(new GroovyFileFilter());
try {
FilterFileManager.init(10, groovyPath);
} catch (Exception e) {
log.error("初始化网关Groovy 文件失败 {}", e);
}
log.warn("初始化网关Groovy 文件成功");
}
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/component/filter/AccessFilter.java<|end_filename|>
package com.xll.upms.gateway.component.filter;
import com.xll.upms.common.constant.SecurityConstants;
import com.xll.upms.gateway.util.RibbonVersionHolder;
import com.netflix.zuul.ZuulFilter;
import com.netflix.zuul.context.RequestContext;
import com.xiaoleilu.hutool.collection.CollectionUtil;
import com.xiaoleilu.hutool.util.StrUtil;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.netflix.zuul.filters.support.FilterConstants;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
import static org.springframework.cloud.netflix.zuul.filters.support.FilterConstants.FORM_BODY_WRAPPER_FILTER_ORDER;
/**
* @Author 徐亮亮
* @Description: 在RateLimitPreFilter 之前执行,不然又空指针问题
* @Date 2019/1/18 21:08
*/
@Component
public class AccessFilter extends ZuulFilter {
@Value("${zuul.ribbon.metadata.enabled:false}")
private boolean canary;
@Override
public String filterType() {
return FilterConstants.PRE_TYPE;
}
@Override
public int filterOrder() {
return FORM_BODY_WRAPPER_FILTER_ORDER - 1;
}
@Override
public boolean shouldFilter() {
return true;
}
@Override
public Object run() {
RequestContext requestContext = RequestContext.getCurrentContext();
String version = requestContext.getRequest().getHeader(SecurityConstants.VERSION);
if (canary && StrUtil.isNotBlank(version)) {
RibbonVersionHolder.setContext(version);
}
requestContext.set("startTime", System.currentTimeMillis());
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
requestContext.addZuulRequestHeader(SecurityConstants.USER_HEADER, authentication.getName());
requestContext.addZuulRequestHeader(SecurityConstants.ROLE_HEADER, CollectionUtil.join(authentication.getAuthorities(), ","));
}
return null;
}
}
<|start_filename|>xll-modules/xll-sso-client-demo/src/main/java/com/xll/upms/sso/UPMSSsoClientDemoApplication.java<|end_filename|>
package com.xll.upms.sso;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.security.oauth2.client.EnableOAuth2Sso;
/**
* @Author 徐亮亮
* @Description: 单点登录客户端
* @Date 2019/1/18 21:27
*/
@EnableOAuth2Sso
@SpringBootApplication
public class UPMSSsoClientDemoApplication {
public static void main(String[] args) {
SpringApplication.run(UPMSSsoClientDemoApplication.class, args);
}
}
<|start_filename|>xll-common/src/main/java/com/xll/upms/common/util/exception/CheckedException.java<|end_filename|>
package com.xll.upms.common.util.exception;
/**
* @Author 徐亮亮
* @Description: 检查异常
* @Date 2019/1/18 20:52
*/
public class CheckedException extends RuntimeException {
private static final long serialVersionUID = 1L;
public CheckedException() {
}
public CheckedException(String message) {
super(message);
}
public CheckedException(Throwable cause) {
super(cause);
}
public CheckedException(String message, Throwable cause) {
super(message, cause);
}
public CheckedException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysDictMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysDict;
import org.apache.ibatis.annotations.Mapper;
/**
* @Author 徐亮亮
* @Description: 字典表 Mapper 接口
* @Date 2019/1/18 21:40
*/
@Mapper
public interface SysDictMapper extends BaseMapper<SysDict> {
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysRoleMenuService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.admin.model.entity.SysRoleMenu;
/**
* @Author 徐亮亮
* @Description: 角色菜单表 服务类
* @Date 2019/1/18 21:57
*/
public interface SysRoleMenuService extends IService<SysRoleMenu> {
/**
* 更新角色菜单
*
*
* @param role
* @param roleId 角色
* @param menuIds 菜单列表
* @return
*/
Boolean insertRoleMenus(String role, Integer roleId, String menuIds);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysDeptRelationMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysDeptRelation;
import org.apache.ibatis.annotations.Mapper;
/**
* @Author 徐亮亮
* @Description: 部门关系 Mapper
* @Date 2019/1/18 21:39
*/
@Mapper
public interface SysDeptRelationMapper extends BaseMapper<SysDeptRelation> {
/**
* 删除部门关系表数据
*
* @param id 部门ID
*/
void deleteAllDeptRealtion(Integer id);
/**
* 更改部分关系表数据
*
* @param deptRelation
*/
void updateDeptRealtion(SysDeptRelation deptRelation);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/impl/SysLogServiceImpl.java<|end_filename|>
package com.xll.upms.admin.service.impl;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.xll.upms.admin.mapper.SysLogMapper;
import com.xll.upms.admin.service.SysLogService;
import com.xll.upms.common.constant.CommonConstant;
import com.xll.upms.common.entity.SysLog;
import com.xll.upms.common.util.Assert;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
/**
* @Author 徐亮亮
* @Description: 日志表 服务实现类
* @Date 2019/1/18 21:52
*/
@Service
@Transactional
public class SysLogServiceImpl extends ServiceImpl<SysLogMapper, SysLog> implements SysLogService {
@Override
public Boolean updateByLogId(Long id) {
Assert.isNull(id, "日志ID为空");
SysLog sysLog = new SysLog();
sysLog.setId(id);
sysLog.setDelFlag(CommonConstant.STATUS_DEL);
sysLog.setUpdateTime(new Date());
return updateById(sysLog);
}
}
<|start_filename|>xll-modules/xll-mc-service/src/main/java/com/xll/upms/mc/handler/SmsMessageHandler.java<|end_filename|>
package com.xll.upms.mc.handler;
import com.xll.upms.common.util.template.MobileMsgTemplate;
/**
* @Author 徐亮亮
* @Description: 消息处理器
* @Date 2019/1/18 21:24
*/
public interface SmsMessageHandler {
/**
* 执行入口
*
* @param mobileMsgTemplate 信息
*/
void execute(MobileMsgTemplate mobileMsgTemplate);
/**
* 数据校验
*
* @param mobileMsgTemplate 信息
*/
void check(MobileMsgTemplate mobileMsgTemplate);
/**
* 业务处理
*
* @param mobileMsgTemplate 信息
* @return boolean
*/
boolean process(MobileMsgTemplate mobileMsgTemplate);
/**
* 失败处理
*
* @param mobileMsgTemplate 信息
*/
void fail(MobileMsgTemplate mobileMsgTemplate);
}
<|start_filename|>xll-modules/xll-sso-client-demo/src/main/java/com/xll/upms/sso/controller/DemoController.java<|end_filename|>
package com.xll.upms.sso.controller;
import org.springframework.security.core.Authentication;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* @Author 徐亮亮
* @Description: 测试
* @Date 2019/1/18 21:26
*/
@RestController
public class DemoController {
@GetMapping("/")
public Authentication user(Authentication authentication) {
return authentication;
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/model/dto/RouteConfig.java<|end_filename|>
package com.xll.upms.admin.model.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.Map;
/**
* @Author 徐亮亮
* @Description: 路由配置
* @Date 2019/1/18 21:46
*/
@Data
public class RouteConfig implements Serializable{
@com.alibaba.fastjson.annotation.JSONField(name = "path")
private String path;
@com.alibaba.fastjson.annotation.JSONField(name = "component")
private String component;
@com.alibaba.fastjson.annotation.JSONField(name = "name")
private String name;
@com.alibaba.fastjson.annotation.JSONField(name = "components")
private String components;
@com.alibaba.fastjson.annotation.JSONField(name = "redirect")
private String redirect;
@com.alibaba.fastjson.annotation.JSONField(name = "props")
private String props;
@com.alibaba.fastjson.annotation.JSONField(name = "alias")
private String alias;
@com.alibaba.fastjson.annotation.JSONField(name = "children")
private String children;
@com.alibaba.fastjson.annotation.JSONField(name = "beforeEnter")
private String beforeEnter;
@com.alibaba.fastjson.annotation.JSONField(name = "meta")
private Map<String,String> meta;
@com.alibaba.fastjson.annotation.JSONField(name = "caseSensitive")
private Boolean caseSensitive;
@com.alibaba.fastjson.annotation.JSONField(name = "pathToRegexpOptions")
private String pathToRegexpOptions;
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/model/dto/UserInfo.java<|end_filename|>
package com.xll.upms.admin.model.dto;
import com.xll.upms.admin.model.entity.SysUser;
import lombok.Data;
import java.io.Serializable;
/**
* @Author 徐亮亮
* @Description: 用户基础信息
* @Date 2019/1/18 21:47
*/
@Data
public class UserInfo implements Serializable {
/**
* 用户基本信息
*/
private SysUser sysUser;
/**
* 权限标识集合
*/
private String[] permissions;
/**
* 角色集合
*/
private String[] roles;
}
<|start_filename|>xll-auth/src/main/java/com/xll/upms/auth/feign/fallback/UserServiceFallbackImpl.java<|end_filename|>
package com.xll.upms.auth.feign.fallback;
import com.xll.upms.auth.feign.UserService;
import com.xll.upms.common.vo.UserVO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
/**
* @author lengleng
* @date 2017/10/31
* 用户服务的fallback
*/
/**
* @Author 徐亮亮
* @Description: 用户服务的回调(如果请求异常快速返回信息给用户)
* @Date 2019/1/18 20:20
*/
@Service
public class UserServiceFallbackImpl implements UserService {
private Logger logger = LoggerFactory.getLogger(this.getClass());
@Override
public UserVO findUserByUsername(String username) {
logger.error("调用{}异常:{}", "findUserByUsername", username);
return null;
}
/**
* 通过手机号查询用户、角色信息
*
* @param mobile 手机号
* @return UserVo
*/
@Override
public UserVO findUserByMobile(String mobile) {
logger.error("调用{}异常:{}", "通过手机号查询用户", mobile);
return null;
}
/**
* 根据OpenId查询用户信息
*
* @param openId openId
* @return UserVo
*/
@Override
public UserVO findUserByOpenId(String openId) {
logger.error("调用{}异常:{}", "通过OpenId查询用户", openId);
return null;
}
}
<|start_filename|>xll-visual/xll-zipkin-db/src/main/java/com/xll/upms/zipkin/UPMSZipkinDbApplication.java<|end_filename|>
package com.xll.upms.zipkin;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import zipkin.server.EnableZipkinServer;
/**
* @Author 徐亮亮
* @Description: zipkin mysql 存储实现
* @Date 2019/1/18 22:06
*/
@EnableDiscoveryClient
@SpringBootApplication
@EnableZipkinServer
public class UPMSZipkinDbApplication {
public static void main(String[] args) {
SpringApplication.run(UPMSZipkinDbApplication.class, args);
}
}
<|start_filename|>xll-eureka/src/main/java/com/xll/upms/eureka/UPMSEurekaApplication.java<|end_filename|>
package com.xll.upms.eureka;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.netflix.eureka.server.EnableEurekaServer;
/**
* @Author 徐亮亮
* @Description:服务注册与服务发现
* @Date 2019/1/18 21:04
*/
@EnableEurekaServer
@SpringBootApplication
public class UPMSEurekaApplication {
public static void main(String[] args) {
SpringApplication.run(UPMSEurekaApplication.class, args);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/model/dto/DeptTree.java<|end_filename|>
package com.xll.upms.admin.model.dto;
import lombok.Data;
/**
* @Author 徐亮亮
* @Description: 部门树
* @Date 2019/1/18 21:45
*/
@Data
public class DeptTree extends TreeNode {
private String name;
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/controller/OauthClientDetailsController.java<|end_filename|>
package com.xll.upms.admin.controller;
import com.baomidou.mybatisplus.mapper.EntityWrapper;
import com.baomidou.mybatisplus.plugins.Page;
import com.xll.upms.admin.model.entity.SysOauthClientDetails;
import com.xll.upms.admin.service.SysOauthClientDetailsService;
import com.xll.upms.common.util.Query;
import com.xll.upms.common.util.R;
import com.xll.upms.common.web.BaseController;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
/**
* @Author 徐亮亮
* @Description: 客户认证详情信息 前端控制器
* @Date 2019/1/18 21:35
*/
@RestController
@RequestMapping("/client")
public class OauthClientDetailsController extends BaseController {
@Autowired
private SysOauthClientDetailsService sysOauthClientDetailsService;
/**
* 通过ID查询
*
* @param id ID
* @return SysOauthClientDetails
*/
@GetMapping("/{id}")
public SysOauthClientDetails get(@PathVariable Integer id) {
return sysOauthClientDetailsService.selectById(id);
}
/**
* 分页查询信息
*
* @param params 分页对象
* @return 分页对象
*/
@RequestMapping("/page")
public Page page(@RequestParam Map<String, Object> params) {
return sysOauthClientDetailsService.selectPage(new Query<>(params), new EntityWrapper<>());
}
/**
* 添加
*
* @param sysOauthClientDetails 实体
* @return success/false
*/
@PostMapping
public R<Boolean> add(@RequestBody SysOauthClientDetails sysOauthClientDetails) {
return new R<>(sysOauthClientDetailsService.insert(sysOauthClientDetails));
}
/**
* 删除
*
* @param id ID
* @return success/false
*/
@DeleteMapping("/{id}")
public R<Boolean> delete(@PathVariable String id) {
SysOauthClientDetails sysOauthClientDetails = new SysOauthClientDetails();
sysOauthClientDetails.setClientId(id);
return new R<>(sysOauthClientDetailsService.deleteById(sysOauthClientDetails));
}
/**
* 编辑
*
* @param sysOauthClientDetails 实体
* @return success/false
*/
@PutMapping
public R<Boolean> edit(@RequestBody SysOauthClientDetails sysOauthClientDetails) {
return new R<>(sysOauthClientDetailsService.updateById(sysOauthClientDetails));
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/common/listener/LogReceiveListener.java<|end_filename|>
package com.xll.upms.admin.common.listener;
import com.xll.upms.admin.service.SysLogService;
import com.xll.upms.common.constant.MqQueueConstant;
import com.xll.upms.common.entity.SysLog;
import com.xll.upms.common.vo.LogVO;
import org.slf4j.MDC;
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
/**
* @Author 徐亮亮
* @Description: 日志接收监听器
* @Date 2019/1/18 21:31
*/
@Component
//@Service
@RabbitListener(queues = MqQueueConstant.LOG_QUEUE)
public class LogReceiveListener {
private static final String KEY_USER = "user";
@Autowired
private SysLogService sysLogService;
@RabbitHandler
public void receive(LogVO logVo) {
SysLog sysLog = logVo.getSysLog();
MDC.put(KEY_USER, logVo.getUsername());
sysLog.setCreateBy(logVo.getUsername());
sysLogService.insert(sysLog);
MDC.remove(KEY_USER);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/impl/SysDictServiceImpl.java<|end_filename|>
package com.xll.upms.admin.service.impl;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.xll.upms.admin.mapper.SysDictMapper;
import com.xll.upms.admin.model.entity.SysDict;
import com.xll.upms.admin.service.SysDictService;
import org.springframework.stereotype.Service;
/**
* @Author 徐亮亮
* @Description: 字典表 服务实现类
* @Date 2019/1/18 21:52
*/
@Service
public class SysDictServiceImpl extends ServiceImpl<SysDictMapper, SysDict> implements SysDictService {
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysRoleMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysRole;
import com.xll.upms.common.util.Query;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
import java.util.Map;
/**
* @Author 徐亮亮
* @Description: 角色Mapper
* @Date 2019/1/18 21:42
*/
@Mapper
public interface SysRoleMapper extends BaseMapper<SysRole> {
/**
* 查询角色列表含有部门信息
* @param query 查询对象
* @param condition 条件
* @return List
*/
List<Object> selectRolePage(Query<Object> query, Map<String, Object> condition);
/**
* 通过部门ID查询角色列表
*
* @param deptId 部门ID
* @return 角色列表
*/
List<SysRole> selectListByDeptId(Integer deptId);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/UPMSAdminApplication.java<|end_filename|>
package com.xll.upms.admin;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.scheduling.annotation.EnableAsync;
/**
* @Author 徐亮亮
* @Description: UPMS用户授权管理系统服务
* @Date 2019/1/18 21:59
*/
@EnableAsync
@SpringBootApplication
@EnableDiscoveryClient
@ComponentScan(basePackages = {"com.xll.upms.admin", "com.xll.upms.common.bean"})
//@ComponentScan(basePackages = {"com.xll.upms"})
public class UPMSAdminApplication {
public static void main(String[] args) {
SpringApplication.run(UPMSAdminApplication.class, args);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysOauthClientDetailsService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.admin.model.entity.SysOauthClientDetails;
/**
* @Author 徐亮亮
* @Description: 服务类
* @Date 2019/1/18 21:57
*/
public interface SysOauthClientDetailsService extends IService<SysOauthClientDetails> {
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/impl/SysRoleMenuServiceImpl.java<|end_filename|>
package com.xll.upms.admin.service.impl;
import com.baomidou.mybatisplus.mapper.EntityWrapper;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.xll.upms.admin.mapper.SysRoleMenuMapper;
import com.xll.upms.admin.model.entity.SysRoleMenu;
import com.xll.upms.admin.service.SysRoleMenuService;
import com.xiaoleilu.hutool.util.StrUtil;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 角色菜单表 服务实现类
* @Date 2019/1/18 21:53
*/
@Service
public class SysRoleMenuServiceImpl extends ServiceImpl<SysRoleMenuMapper, SysRoleMenu> implements SysRoleMenuService {
@Override
@CacheEvict(value = "menu_details", key = "#role + '_menu'")
public Boolean insertRoleMenus(String role, Integer roleId,String menuIds) {
SysRoleMenu condition = new SysRoleMenu();
condition.setRoleId(roleId);
this.delete(new EntityWrapper<>(condition));
if (StrUtil.isBlank(menuIds)){
return Boolean.TRUE;
}
if (StrUtil.isBlank(menuIds)) {
return Boolean.TRUE;
}
List<SysRoleMenu> roleMenuList = new ArrayList<>();
List<String> menuIdList = Arrays.asList(menuIds.split(","));
for (String menuId : menuIdList) {
SysRoleMenu roleMenu = new SysRoleMenu();
roleMenu.setRoleId(roleId);
roleMenu.setMenuId(Integer.valueOf(menuId));
roleMenuList.add(roleMenu);
}
return this.insertBatch(roleMenuList);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysUserService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.plugins.Page;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.admin.model.dto.UserDTO;
import com.xll.upms.admin.model.dto.UserInfo;
import com.xll.upms.admin.model.entity.SysUser;
import com.xll.upms.common.util.Query;
import com.xll.upms.common.util.R;
import com.xll.upms.common.vo.UserVO;
/**
* @Author 徐亮亮
* @Description: 用户 服务器
* @Date 2019/1/18 21:58
*/
public interface SysUserService extends IService<SysUser> {
/**
* 根据用户名查询用户角色信息
*
* @param username 用户名
* @return userVo
*/
UserVO findUserByUsername(String username);
/**
* 分页查询用户信息(含有角色信息)
*
* @param query 查询条件
* @param userVO
* @return
*/
Page selectWithRolePage(Query query, UserVO userVO);
/**
* 查询用户信息
*
* @param userVo 角色名
* @return userInfo
*/
UserInfo findUserInfo(UserVO userVo);
/**
* 保存验证码
* @param randomStr 随机串
* @param imageCode 验证码*/
void saveImageCode(String randomStr, String imageCode);
/**
* 删除用户
* @param sysUser 用户
* @return boolean
*/
Boolean deleteUserById(SysUser sysUser);
/**
* 更新当前用户基本信息
* @param userDto 用户信息
* @param username 用户名
* @return Boolean
*/
R<Boolean> updateUserInfo(UserDTO userDto, String username);
/**
* 更新指定用户信息
* @param userDto 用户信息
* @return
*/
Boolean updateUser(UserDTO userDto);
/**
* 通过手机号查询用户信息
* @param mobile 手机号
* @return 用户信息
*/
UserVO findUserByMobile(String mobile);
/**
* 发送验证码
* @param mobile 手机号
* @return R
*/
R<Boolean> sendSmsCode(String mobile);
/**
* 通过openId查询用户
* @param openId openId
* @return 用户信息
*/
UserVO findUserByOpenId(String openId);
/**
* 通过ID查询用户信息
* @param id 用户ID
* @return 用户信息
*/
UserVO selectUserVoById(Integer id);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysRoleMenuMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysRoleMenu;
import org.apache.ibatis.annotations.Mapper;
/**
* @Author 徐亮亮
* @Description: 角色菜单表 Mapper 接口
* @Date 2019/1/18 21:43
*/
@Mapper
public interface SysRoleMenuMapper extends BaseMapper<SysRoleMenu> {
}
<|start_filename|>xll-common/src/main/java/com/xll/upms/common/bean/resolver/TokenArgumentResolver.java<|end_filename|>
package com.xll.upms.common.bean.resolver;
import com.xll.upms.common.constant.SecurityConstants;
import com.xll.upms.common.vo.SysRole;
import com.xll.upms.common.vo.UserVO;
import com.xiaoleilu.hutool.util.StrUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.MethodParameter;
import org.springframework.web.bind.support.WebDataBinderFactory;
import org.springframework.web.context.request.NativeWebRequest;
import org.springframework.web.method.support.HandlerMethodArgumentResolver;
import org.springframework.web.method.support.ModelAndViewContainer;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: Token转化UserVo
* @Date 2019/1/18 20:40
*/
@Slf4j
@Configuration
public class TokenArgumentResolver implements HandlerMethodArgumentResolver {
/**
* 1. 入参筛选
*
* @param methodParameter 参数集合
* @return 格式化后的参数
*/
@Override
public boolean supportsParameter(MethodParameter methodParameter) {
return methodParameter.getParameterType().equals(UserVO.class);
}
/**
* @param methodParameter 入参集合
* @param modelAndViewContainer model 和 view
* @param nativeWebRequest web相关
* @param webDataBinderFactory 入参解析
* @return 包装对象
* @throws Exception exception
*/
@Override
public Object resolveArgument(MethodParameter methodParameter,
ModelAndViewContainer modelAndViewContainer,
NativeWebRequest nativeWebRequest,
WebDataBinderFactory webDataBinderFactory) {
HttpServletRequest request = nativeWebRequest.getNativeRequest(HttpServletRequest.class);
String username = request.getHeader(SecurityConstants.USER_HEADER);
String roles = request.getHeader(SecurityConstants.ROLE_HEADER);
if (StrUtil.isBlank(username) || StrUtil.isBlank(roles)) {
log.warn("resolveArgument error username or role is empty");
return null;
} else {
log.info("resolveArgument username :{} roles:{}", username, roles);
}
UserVO userVO = new UserVO();
userVO.setUsername(username);
List<SysRole> sysRoleList = new ArrayList<>();
Arrays.stream(roles.split(",")).forEach(role -> {
SysRole sysRole = new SysRole();
sysRole.setRoleCode(role);
sysRoleList.add(sysRole);
});
userVO.setRoleList(sysRoleList);
return userVO;
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysMenuService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.admin.model.entity.SysMenu;
import com.xll.upms.common.vo.MenuVO;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 菜单权限表 服务类
* @Date 2019/1/18 21:57
*/
public interface SysMenuService extends IService<SysMenu> {
/**
* 通过角色名称查询URL 权限
*
* @param role 角色名称
* @return 菜单列表
*/
List<MenuVO> findMenuByRoleName(String role);
/**
* 级联删除菜单
*
* @param id 菜单ID
* @return 成功、失败
*/
Boolean deleteMenu(Integer id);
/**
* 更新菜单信息
*
* @param sysMenu 菜单信息
* @return 成功、失败
*/
Boolean updateMenuById(SysMenu sysMenu);
}
<|start_filename|>xll-auth/src/main/java/com/xll/upms/auth/service/UserDetailServiceImpl.java<|end_filename|>
package com.xll.upms.auth.service;
import com.xll.upms.auth.feign.UserService;
import com.xll.upms.auth.util.UserDetailsImpl;
import com.xll.upms.common.vo.UserVO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
/**
* @Author 徐亮亮
* @Description: 用户详情业务逻辑
* @Date 2019/1/18 20:10
*/
@Service("userDetailService")
public class UserDetailServiceImpl implements UserDetailsService {
@Autowired
private UserService userService;
@Override
public UserDetailsImpl loadUserByUsername(String username) throws UsernameNotFoundException {
UserVO userVo = userService.findUserByUsername(username);
if (userVo == null) {
throw new UsernameNotFoundException("用户名不存在或者密码错误");
}
return new UserDetailsImpl(userVo);
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysUserMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.admin.model.entity.SysUser;
import com.xll.upms.common.bean.interceptor.DataScope;
import com.xll.upms.common.util.Query;
import com.xll.upms.common.vo.UserVO;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 用户表 Mapper 接口
* @Date 2019/1/18 21:43
*/
@Mapper
public interface SysUserMapper extends BaseMapper<SysUser> {
/**
* 通过用户名查询用户信息(含有角色信息)
*
* @param username 用户名
* @return userVo
*/
UserVO selectUserVoByUsername(String username);
/**
* 分页查询用户信息(含角色)
*
* @param query 查询条件
* @param username 用户名
* @param dataScope 数据权限
* @return list
*/
List selectUserVoPageDataScope(Query query, @Param("username") Object username, DataScope dataScope);
/**
* 通过手机号查询用户信息(含有角色信息)
*
* @param mobile 用户名
* @return userVo
*/
UserVO selectUserVoByMobile(String mobile);
/**
* 通过openId查询用户信息
*
* @param openId openid
* @return userVo
*/
UserVO selectUserVoByOpenId(String openId);
/**
* 通过ID查询用户信息
*
* @param id 用户ID
* @return userVo
*/
UserVO selectUserVoById(Integer id);
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/mapper/SysZuulRouteMapper.java<|end_filename|>
package com.xll.upms.admin.mapper;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.xll.upms.common.entity.SysZuulRoute;
import org.apache.ibatis.annotations.Mapper;
/**
* @Author 徐亮亮
* @Description: 动态路由配置表 Mapper 接口
* @Date 2019/1/18 21:44
*/
@Mapper
public interface SysZuulRouteMapper extends BaseMapper<SysZuulRoute> {
}
<|start_filename|>xll-common/src/main/java/com/xll/upms/common/vo/LogVO.java<|end_filename|>
package com.xll.upms.common.vo;
import com.xll.upms.common.entity.SysLog;
import lombok.Data;
import java.io.Serializable;
/**
* @Author 徐亮亮
* @Description: 日志视图实体类
* @Date 2019/1/18 21:01
*/
@Data
public class LogVO implements Serializable {
private static final long serialVersionUID = 1L;
private SysLog sysLog;
private String username;
}
<|start_filename|>xll-gateway/src/main/java/com/xll/upms/gateway/component/config/RibbonMetaFilterAutoConfiguration.java<|end_filename|>
package com.xll.upms.gateway.component.config;
import com.xll.upms.gateway.component.handler.MetadataCanaryRuleHandler;
import com.netflix.loadbalancer.ZoneAvoidanceRule;
import com.netflix.niws.loadbalancer.DiscoveryEnabledNIWSServerList;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.boot.autoconfigure.AutoConfigureBefore;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.cloud.netflix.ribbon.RibbonClientConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
/**
* @Author 徐亮亮
* @Description:灰度路由初始化类
* @Date 2019/1/18 21:06
*/
@Configuration
@ConditionalOnClass(DiscoveryEnabledNIWSServerList.class)
@AutoConfigureBefore(RibbonClientConfiguration.class)
@ConditionalOnProperty(value = "zuul.ribbon.metadata.enabled")
public class RibbonMetaFilterAutoConfiguration {
@Bean
@ConditionalOnMissingBean
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public ZoneAvoidanceRule metadataAwareRule() {
return new MetadataCanaryRuleHandler();
}
}
<|start_filename|>xll-auth/src/main/java/com/xll/upms/auth/feign/UserService.java<|end_filename|>
package com.xll.upms.auth.feign;
import com.xll.upms.auth.feign.fallback.UserServiceFallbackImpl;
import com.xll.upms.common.vo.UserVO;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
/**
* @Author 徐亮亮
* @Description: 调用umps服务接口
* @Date 2019/1/18 20:12
*/
@FeignClient(name = "pig-upms-service", fallback = UserServiceFallbackImpl.class)
public interface UserService {
/**
* 通过用户名查询用户、角色信息
*
* @param username 用户名
* @return UserVo
*/
@GetMapping("/user/findUserByUsername/{username}")
UserVO findUserByUsername(@PathVariable("username") String username);
/**
* 通过手机号查询用户、角色信息
*
* @param mobile 手机号
* @return UserVo
*/
@GetMapping("/user/findUserByMobile/{mobile}")
UserVO findUserByMobile(@PathVariable("mobile") String mobile);
/**
* 根据OpenId查询用户信息
* @param openId openId
* @return UserVo
*/
@GetMapping("/user/findUserByOpenId/{openId}")
UserVO findUserByOpenId(@PathVariable("openId") String openId);
}
<|start_filename|>xll-common/src/main/java/com/xll/upms/common/bean/config/WebMvcConfig.java<|end_filename|>
package com.xll.upms.common.bean.config;
import com.xll.upms.common.bean.resolver.TokenArgumentResolver;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.method.support.HandlerMethodArgumentResolver;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: MVC配置类
* @Date 2019/1/18 20:37
*/
@Configuration
public class WebMvcConfig extends WebMvcConfigurerAdapter {
@Override
public void addArgumentResolvers(List<HandlerMethodArgumentResolver> argumentResolvers) {
argumentResolvers.add(new TokenArgumentResolver());
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/model/dto/RoleDTO.java<|end_filename|>
package com.xll.upms.admin.model.dto;
import com.xll.upms.admin.model.entity.SysRole;
import lombok.Data;
/**
* @Author 徐亮亮
* @Description: 角色DTO
* @Date 2019/1/18 21:46
*/
@Data
public class RoleDTO extends SysRole {
/**
* 角色部门Id
*/
private Integer roleDeptId;
/**
* 部门名称
*/
private String deptName;
}
<|start_filename|>xll-auth/src/main/java/com/xll/upms/auth/component/mobile/MobileAuthenticationProvider.java<|end_filename|>
package com.xll.upms.auth.component.mobile;
import com.xll.upms.auth.feign.UserService;
import com.xll.upms.auth.util.UserDetailsImpl;
import com.xll.upms.common.vo.UserVO;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
/**
* @Author 徐亮亮
* @Description: 手机号登录校验逻辑
* @Date 2019/1/18 20:29
*/
public class MobileAuthenticationProvider implements AuthenticationProvider {
private UserService userService;
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
MobileAuthenticationToken mobileAuthenticationToken = (MobileAuthenticationToken) authentication;
UserVO userVo = userService.findUserByMobile((String) mobileAuthenticationToken.getPrincipal());
if (userVo == null) {
throw new UsernameNotFoundException("手机号不存在:" + mobileAuthenticationToken.getPrincipal());
}
UserDetailsImpl userDetails = buildUserDeatils(userVo);
MobileAuthenticationToken authenticationToken = new MobileAuthenticationToken(userDetails, userDetails.getAuthorities());
authenticationToken.setDetails(mobileAuthenticationToken.getDetails());
return authenticationToken;
}
private UserDetailsImpl buildUserDeatils(UserVO userVo) {
return new UserDetailsImpl(userVo);
}
@Override
public boolean supports(Class<?> authentication) {
return MobileAuthenticationToken.class.isAssignableFrom(authentication);
}
public UserService getUserService() {
return userService;
}
public void setUserService(UserService userService) {
this.userService = userService;
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/impl/SysZuulRouteServiceImpl.java<|end_filename|>
package com.xll.upms.admin.service.impl;
import com.baomidou.mybatisplus.mapper.EntityWrapper;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.xll.upms.admin.mapper.SysZuulRouteMapper;
import com.xll.upms.admin.service.SysZuulRouteService;
import com.xll.upms.common.constant.CommonConstant;
import com.xll.upms.common.constant.MqQueueConstant;
import com.xll.upms.common.entity.SysZuulRoute;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* @Author 徐亮亮
* @Description: 动态路由配置表 服务实现类
* @Date 2019/1/18 21:55
*/
@Service
public class SysZuulRouteServiceImpl extends ServiceImpl<SysZuulRouteMapper, SysZuulRoute> implements SysZuulRouteService {
@Autowired
private RedisTemplate redisTemplate;
@Autowired
private RabbitTemplate rabbitTemplate;
/**
* 立即生效配置
*
* @return
*/
@Override
public Boolean applyZuulRoute() {
EntityWrapper wrapper = new EntityWrapper();
wrapper.eq(CommonConstant.DEL_FLAG, CommonConstant.STATUS_NORMAL);
List<SysZuulRoute> routeList = selectList(wrapper);
redisTemplate.opsForValue().set(CommonConstant.ROUTE_KEY, routeList);
rabbitTemplate.convertAndSend(MqQueueConstant.ROUTE_CONFIG_CHANGE, 1);
return Boolean.TRUE;
}
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/SysRoleDeptService.java<|end_filename|>
package com.xll.upms.admin.service;
import com.baomidou.mybatisplus.service.IService;
import com.xll.upms.admin.model.entity.SysRoleDept;
/**
* @Author 徐亮亮
* @Description: 角色与部门对应关系 服务类
* @Date 2019/1/18 21:57
*/
public interface SysRoleDeptService extends IService<SysRoleDept> {
}
<|start_filename|>xll-modules/xll-upms-service/src/main/java/com/xll/upms/admin/service/impl/SysUserRoleServiceImpl.java<|end_filename|>
package com.xll.upms.admin.service.impl;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.xll.upms.admin.mapper.SysUserRoleMapper;
import com.xll.upms.admin.model.entity.SysUserRole;
import com.xll.upms.admin.service.SysUserRoleService;
import org.springframework.stereotype.Service;
/**
* @Author 徐亮亮
* @Description: 用户角色表 服务实现类
* @Date 2019/1/18 21:54
*/
@Service
public class SysUserRoleServiceImpl extends ServiceImpl<SysUserRoleMapper, SysUserRole> implements SysUserRoleService {
/**
* 根据用户Id删除该用户的角色关系
*
* @param userId 用户ID
* @return boolean
* @author 寻欢·李
* @date 2017年12月7日 16:31:38
*/
@Override
public Boolean deleteByUserId(Integer userId) {
return baseMapper.deleteByUserId(userId);
}
}
| gitHub-good/xll-upms |
<|start_filename|>scripts/centos.dockerfile<|end_filename|>
FROM centos:7
RUN yum update -y \
&& yum install -y epel-release centos-release-scl \
&& yum-config-manager --enable rhel-server-rhscl-7-rpms \
&& yum update -y
RUN yum install -y \
devtoolset-6 \
gcc-c++ \
xz \
ccache \
git \
wget
RUN mkdir /root/node \
&& cd /root/node \
&& curl https://nodejs.org/dist/v12.14.0/node-v12.14.0-linux-x64.tar.xz | tar xJ --strip-components=1 -- \
&& ln -s /root/node/bin/node /usr/bin/node
ENV PATH "$PATH:/root/node/bin"
RUN npm install -g yarn
<|start_filename|>example/src/hello.js<|end_filename|>
console.log("Hey there!");
console.log("here is a banana");
console.log("frogger");
console.log("built with nbin", process.versions.nbin);
| cdr/nbin |