repo_id stringlengths 6 101 | size int64 367 5.14M | file_path stringlengths 2 269 | content stringlengths 367 5.14M |
|---|---|---|---|
2881099/dotnetGen_postgresql | 2,606 | Common/Common.csproj | <?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="14.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.50727</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{F0054101-9AC9-4E0E-9E78-44EA89FC5C19}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Common</RootNamespace>
<AssemblyName>Common</AssemblyName>
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
<FileUpgradeFlags>
</FileUpgradeFlags>
<UpgradeBackupLocation>
</UpgradeBackupLocation>
<OldToolsVersion>2.0</OldToolsVersion>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="BaseSocket.cs" />
<Compile Include="Deflate.cs" />
<Compile Include="Misc.cs" />
<Compile Include="Model\BuildInfo.cs" />
<Compile Include="Model\ClientInfo.cs" />
<Compile Include="Model\ColumnInfo.cs" />
<Compile Include="Model\DatabaseInfo.cs" />
<Compile Include="Model\DataSort.cs" />
<Compile Include="Model\ForeignKeyInfo.cs" />
<Compile Include="Model\TableInfo.cs" />
<Compile Include="NpgsqlDbType.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> |
2881099/dotnetGen_sqlserver | 1,053 | Server/log4net.config | <?xml version="1.0" encoding="utf-8" ?>
<configuration>
<!-- Register a section handler for the log4net section -->
<configSections>
<section name="log4net" type="System.Configuration.IgnoreSectionHandler" />
</configSections>
<!-- This section contains the log4net configuration settings -->
<log4net>
<!-- remotor -->
<appender name="remotor__rolling_file_appender" type="log4net.Appender.RollingFileAppender">
<file value="d:\Ҷ\logs\mc\rolling.txt" />
<appendToFile value="true" />
<maxSizeRollBackups value="20" />
<maximumFileSize value="1048576" />
<rollingStyle value="Size" />
<staticLogFileName value="true" />
<lockingModel type="log4net.Appender.FileAppender+MinimalLock" />
<layout type="log4net.Layout.PatternLayout">
<conversionPattern value="%date %-5level %logger (%property{log4net:HostName}) - %message%newline" />
</layout>
</appender>
<logger name="remotor">
<level value="DEBUG" />
<appender-ref ref="remotor__rolling_file_appender" />
</logger>
</log4net>
</configuration> |
2881099/dotnetGen_mysql | 1,413 | GenMy/FastExcel/FastExcel.Worksheets.cs | using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Xml.Linq;
namespace FastExcel {
public partial class FastExcel {
private Worksheet[] _worksheets;
/// <summary>
/// List of worksheets, loaded on first access of property
/// </summary>
public Worksheet[] Worksheets {
get {
if (_worksheets != null) {
return _worksheets;
} else {
_worksheets = GetWorksheetProperties();
return _worksheets;
}
}
}
private Worksheet[] GetWorksheetProperties() {
CheckFiles();
PrepareArchive(false);
var worksheets = new List<Worksheet>();
using (Stream stream = this.Archive.GetEntry("xl/workbook.xml").Open()) {
XDocument document = XDocument.Load(stream);
if (document == null) {
throw new Exception("Unable to load workbook.xml");
}
List<XElement> sheetsElements = document.Descendants().Where(d => d.Name.LocalName == "sheet").ToList();
foreach (var sheetElement in sheetsElements) {
var worksheet = new Worksheet(this);
worksheet.Index = sheetsElements.IndexOf(sheetElement) + 1;
worksheet.Name = (from attribute in sheetElement.Attributes()
where attribute.Name == "name"
select attribute.Value).FirstOrDefault();
worksheets.Add(worksheet);
}
}
return worksheets.ToArray();
}
}
}
|
2881099/dotnetGen_mysql | 1,325 | GenMy/FastExcel/FastExcel.Delete.cs | using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Xml.Linq;
namespace FastExcel {
public partial class FastExcel {
/// <summary>
/// Deletes the selected sheet Note:delete happens on Dispose
/// </summary>
/// <param name="sheetNumber">sheet number, starts at 1</param>
public void Delete(int sheetNumber) {
this.Delete(sheetNumber, null);
}
/// <summary>
/// Deletes the selected sheet Note:delete happens on Dispose
/// </summary>
/// <param name="sheetName">Worksheet name</param>
public void Delete(string sheetName) {
this.Update(null, sheetName);
}
private void Delete(int? sheetNumber = null, string sheetName = null) {
CheckFiles();
PrepareArchive(false);
// Get worksheet details
Worksheet worksheet = new Worksheet();
worksheet.GetWorksheetProperties(this, sheetNumber, sheetName);
// Delete the file
if (!string.IsNullOrEmpty(worksheet.FileName)) {
ZipArchiveEntry entry = this.Archive.GetEntry(worksheet.FileName);
if (entry != null) {
entry.Delete();
}
if (this.DeleteWorksheets == null) {
this.DeleteWorksheets = new List<int>();
}
this.DeleteWorksheets.Add(worksheet.Index);
}
}
}
}
|
2881099/dotnetGen_mysql | 1,022 | GenMy/FastExcel/FastExcel.Update.cs | using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Xml.Linq;
namespace FastExcel {
public partial class FastExcel {
/// <summary>
/// Update the worksheet
/// </summary>
/// <param name="data">The worksheet</param>
/// <param name="sheetNumber">eg 1,2,4</param>
public void Update(Worksheet data, int sheetNumber) {
this.Update(data, sheetNumber, null);
}
/// <summary>
/// Update the worksheet
/// </summary>
/// <param name="data">The worksheet</param>
/// <param name="sheetName">eg. Sheet1, Sheet2</param>
public void Update(Worksheet data, string sheetName) {
this.Update(data, null, sheetName);
}
private void Update(Worksheet data, int? sheetNumber = null, string sheetName = null) {
CheckFiles();
PrepareArchive();
Worksheet currentData = this.Read(sheetNumber, sheetName);
currentData.Merge(data);
this.Write(currentData);
}
}
}
|
27182812/ChatGLM-LLaMA-chinese-insturct | 2,205 | src/transformers/models/deta/__init__.py | # Copyright 2022 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available
_import_structure = {
"configuration_deta": ["DETA_PRETRAINED_CONFIG_ARCHIVE_MAP", "DetaConfig"],
}
try:
if not is_vision_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["image_processing_deta"] = ["DetaImageProcessor"]
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_deta"] = [
"DETA_PRETRAINED_MODEL_ARCHIVE_LIST",
"DetaForObjectDetection",
"DetaModel",
"DetaPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_deta import DETA_PRETRAINED_CONFIG_ARCHIVE_MAP, DetaConfig
try:
if not is_vision_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .image_processing_deta import DetaImageProcessor
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_deta import (
DETA_PRETRAINED_MODEL_ARCHIVE_LIST,
DetaForObjectDetection,
DetaModel,
DetaPreTrainedModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
|
2881099/dotnetGen_postgresql | 1,261 | GenPg/Deflate.cs | using System;
using System.IO;
using System.IO.Compression;
using System.Text;
public static class Deflate {
public static string cs_head = string.Empty;
public static byte[] Decompress(Stream stream) {
try {
stream.Position = 0;
using (MemoryStream ms = new MemoryStream()) {
using (DeflateStream def = new DeflateStream(stream, CompressionMode.Decompress)) {
byte[] data = new byte[1024];
int size = 0;
while ((size = def.Read(data, 0, data.Length)) > 0) {
ms.Write(data, 0, size);
}
}
return ms.ToArray();
}
} catch { return (stream as MemoryStream).ToArray(); };
}
public static byte[] Decompress(byte[] bt) {
return Decompress(new MemoryStream(bt));
}
public static byte[] Compress(string text) {
if (text.Trim().StartsWith("using ")) {
text = Deflate.cs_head + text;
}
return Compress(Encoding.UTF8.GetBytes(text));
}
public static byte[] Compress(byte[] bt) {
return Compress(bt, 0, bt.Length);
}
public static byte[] Compress(byte[] bt, int startIndex, int length) {
using (MemoryStream ms = new MemoryStream()) {
using (DeflateStream def = new DeflateStream(ms, CompressionMode.Compress)) {
def.Write(bt, startIndex, length);
}
return ms.ToArray();
}
}
}
|
2881099/dotnetGen_postgresql | 996 | GenPg/GenPg.csproj | <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.1</TargetFramework>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<IsPackable>true</IsPackable>
<PackAsTool>true</PackAsTool>
<Authors>2881099</Authors>
<Company>2881099</Company>
<Product>dotnetGen</Product>
<Description>用于快速创建和更新 .NETCore 2.1 + PostgreSQL 项目,非常合适敏捷开发; dotnet tool install -g GenPg</Description>
<PackageProjectUrl>https://github.com/2881099/dotnetgen_postgresql</PackageProjectUrl>
<RepositoryUrl>https://github.com/2881099/dotnetgen_postgresql</RepositoryUrl>
<Version>1.1.14</Version>
<PackageTags>生成器,postgresql,core,pgsql</PackageTags>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="11.0.2" />
<PackageReference Include="Npgsql" Version="4.0.3" />
<PackageReference Include="System.IO.Compression.ZipFile" Version="4.3.0" />
</ItemGroup>
</Project>
|
2881099/dotnetGen_mysql | 5,412 | GenMy/FastExcel/SharedStrings.cs | using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Xml.Linq;
namespace FastExcel
{
/// <summary>
/// Read and update xl/sharedStrings.xml file
/// </summary>
public class SharedStrings
{
//A dictionary is a lot faster than a list
private Dictionary<string, int> StringDictionary { get; set; }
private Dictionary<int, string> StringArray { get; set; }
private bool SharedStringsExists { get; set; }
private ZipArchive ZipArchive { get; set; }
public bool PendingChanges { get; private set; }
public bool ReadWriteMode { get; set; }
internal SharedStrings(ZipArchive archive)
{
this.ZipArchive = archive;
this.SharedStringsExists = true;
if (!this.ZipArchive.Entries.Where(entry => entry.FullName == "xl/sharedStrings.xml").Any())
{
this.StringDictionary = new Dictionary<string, int>();
this.SharedStringsExists = false;
return;
}
using (Stream stream = this.ZipArchive.GetEntry("xl/sharedStrings.xml").Open())
{
if (stream == null)
{
this.StringDictionary = new Dictionary<string, int>();
this.SharedStringsExists = false;
return;
}
XDocument document = XDocument.Load(stream);
if (document == null)
{
this.StringDictionary = new Dictionary<string, int>();
this.SharedStringsExists = false;
return;
}
int i = 0;
this.StringDictionary = document.Descendants().Where(d => d.Name.LocalName == "t").Select(e => e.Value).ToDictionary(k=> k,v => i++);
}
}
internal int AddString(string stringValue)
{
if (this.StringDictionary.ContainsKey(stringValue))
{
return this.StringDictionary[stringValue];
}
else
{
this.PendingChanges = true;
this.StringDictionary.Add(stringValue, this.StringDictionary.Count);
// Clear String Array used for retrieval
if (this.ReadWriteMode && this.StringArray != null)
{
this.StringArray.Add(this.StringDictionary.Count - 1, stringValue);
}
else
{
this.StringArray = null;
}
return this.StringDictionary.Count - 1;
}
}
internal void Write()
{
// Only update if changes were made
if (!this.PendingChanges)
{
return;
}
StreamWriter streamWriter = null;
try
{
if (this.SharedStringsExists)
{
streamWriter = new StreamWriter(this.ZipArchive.GetEntry("xl/sharedStrings.xml").Open());
}
else
{
streamWriter = new StreamWriter(this.ZipArchive.CreateEntry("xl/sharedStrings.xml").Open());
}
// TODO instead of saving the headers then writing them back get position where the headers finish then write from there
/* Note: the count attribute value is wrong, it is the number of times strings are used thoughout the workbook it is different to the unique count
* but because this library is about speed and Excel does not seem to care I am not going to fix it because I would need to read the whole workbook
*/
streamWriter.Write(string.Format("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>" +
"<sst uniqueCount=\"{0}\" count=\"{0}\" xmlns=\"http://schemas.openxmlformats.org/spreadsheetml/2006/main\">", this.StringDictionary.Count));
// Add Rows
foreach (var stringValue in this.StringDictionary)
{
streamWriter.Write(string.Format("<si><t>{0}</t></si>", stringValue.Key));
}
//Add Footers
streamWriter.Write("</sst>");
streamWriter.Flush();
}
finally
{
streamWriter.Dispose();
this.PendingChanges = false;
}
}
internal string GetString(string position)
{
int pos = 0;
if (int.TryParse(position, out pos))
{
return GetString(pos + 1);
}
else
{
// TODO: should I throw an error? this is a corrupted excel document
return string.Empty;
}
}
internal string GetString(int position)
{
if (this.StringArray == null)
{
this.StringArray = this.StringDictionary.ToDictionary(kv => kv.Value, kv => kv.Key);
}
return this.StringArray[position - 1];
}
}
}
|
2881099/dotnetGen_sqlserver | 2,986 | Server/Logger.cs | using System;
[assembly: log4net.Config.XmlConfigurator(ConfigFile = "log4net.config", Watch = true)]
namespace Server {
/// <summary>
/// ־
/// </summary>
[Serializable]
public class Logger {
protected readonly string _Name;
private log4net.ILog _Log;
protected log4net.ILog Log {
get {
if (_Log == null) _Log = log4net.LogManager.GetLogger(_Name);
return _Log;
}
}
protected Logger() { }
public Logger(string name) { this._Name = name; }
/// <summary>
/// ȫ־
/// </summary>
public static readonly Logger remotor = new Logger("remotor");
public void Debug(object message, Exception exception) {
Log.Debug(message, exception);
}
public void Debug(object message) {
Log.Debug(message);
}
public void DebugFormat(IFormatProvider provider, string format, params object[] args) {
Log.DebugFormat(provider, format, args);
}
public void DebugFormat(string format, params object[] args) {
Log.DebugFormat(format, args);
}
public void Error(object message, Exception exception) {
Log.Error(message, exception);
}
public void Error(object message) {
Log.Error(message);
}
public void ErrorFormat(IFormatProvider provider, string format, params object[] args) {
Log.ErrorFormat(provider, format, args);
}
public void ErrorFormat(string format, params object[] args) {
Log.ErrorFormat(format, args);
}
public void Fatal(object message, Exception exception) {
Log.Fatal("" + message, exception);
}
public void Fatal(object message) {
Log.Fatal("" + message);
}
public void FatalFormat(IFormatProvider provider, string format, params object[] args) {
Log.FatalFormat(provider, format, args);
}
public void FatalFormat(string format, params object[] args) {
Log.FatalFormat(format, args);
}
public void Info(object message, Exception exception) {
Log.Info(message, exception);
}
public void Info(object message) {
Log.Info(message);
}
public void InfoFormat(IFormatProvider provider, string format, params object[] args) {
Log.InfoFormat(provider, format, args);
}
public void InfoFormat(string format, params object[] args) {
Log.InfoFormat(format, args);
}
public bool IsDebugEnabled {
get { return Log.IsDebugEnabled; }
}
public bool IsErrorEnabled {
get { return Log.IsErrorEnabled; }
}
public bool IsFatalEnabled {
get { return Log.IsFatalEnabled; }
}
public bool IsInfoEnabled {
get { return Log.IsInfoEnabled; }
}
public bool IsWarnEnabled {
get { return Log.IsWarnEnabled; }
}
public void Warn(object message, Exception exception) {
Log.Warn(message, exception);
}
public void Warn(object message) {
Log.Warn(message);
}
public void WarnFormat(IFormatProvider provider, string format, params object[] args) {
Log.WarnFormat(provider, format, args);
}
public void WarnFormat(string format, params object[] args) {
Log.WarnFormat(format, args);
}
}
}
|
2881099/dotnetGen_sqlserver | 13,469 | Server/CodeBuild(DB).cs | using System;
using System.Collections.Generic;
using System.Data;
using System.Text;
using Model;
namespace Server {
internal partial class CodeBuild : IDisposable {
private ClientInfo _client;
private AcceptSocket _socket;
private List<TableInfo> _tables;
private Dictionary<string, Dictionary<string, string>> _column_coments = new Dictionary<string, Dictionary<string, string>>();
public CodeBuild(ClientInfo client, AcceptSocket socket) {
_client = client;
_socket = socket;
}
private object[][] GetDataSet(string commandText) {
SocketMessager messager = new SocketMessager("ExecuteDataSet", commandText);
_socket.Write(messager, delegate (object sender, ServerSocketReceiveEventArgs e) {
messager = e.Messager;
});
object[][] ret = messager.Arg as object[][]; //.netcore
if (ret == null) {
DataSet ds = messager.Arg as DataSet; //.net
if (ds != null) {
List<object[]> tmp = new List<object[]>();
foreach (DataRow row in ds.Tables[0].Rows)
tmp.Add(row.ItemArray);
ret = tmp.ToArray();
}
}
return ret;
}
private int ExecuteNonQuery(string commandText) {
SocketMessager messager = new SocketMessager("ExecuteNonQuery", commandText);
_socket.Write(messager, delegate(object sender, ServerSocketReceiveEventArgs e) {
messager = e.Messager;
});
int val;
int.TryParse(string.Concat(messager.Arg), out val);
return val;
}
public List<DatabaseInfo> GetDatabases() {
Logger.remotor.Info("GetDatabases: " + _client.Server + "," + _client.Username + "," + _client.Password);
List<DatabaseInfo> loc1 = null;
object[][] ds = this.GetDataSet(@"select name from sys.databases where name not in ('master','tempdb','model','msdb')");
if (ds == null) return loc1;
loc1 = new List<DatabaseInfo>();
foreach (object[] row in ds) {
loc1.Add(new DatabaseInfo(string.Concat(row[0])));
}
return loc1;
}
public List<TableInfo> GetTablesByDatabase(string database) {
_client.Database = database;
Logger.remotor.Info("GetTablesByDatabase: " + _client.Server + "," + _client.Username + "," + _client.Password + "," + _client.Database);
List<TableInfo> loc1 = _tables = null;
Dictionary<int, TableInfo> loc2 = new Dictionary<int, TableInfo>();
Dictionary<int, Dictionary<string, ColumnInfo>> loc3 = new Dictionary<int, Dictionary<string, ColumnInfo>>();
object[][] ds = this.GetDataSet(@"
select
a.Object_id
,b.name 'Owner'
,a.name 'Name'
,'T' type
from sys.tables a
inner join sys.schemas b on b.schema_id = a.schema_id
where not(b.name = 'dbo' and a.name = 'sysdiagrams')
union all
select
a.Object_id
,b.name 'Owner'
,a.name 'Name'
,'V' type
from sys.views a
inner join sys.schemas b on b.schema_id = a.schema_id
union all
select
a.Object_id
,b.name 'Owner'
,a.name 'Name'
,'P' type
from sys.procedures a
inner join sys.schemas b on b.schema_id = a.schema_id
where a.type = 'P' and charindex('$NPSP', a.name) = 0 and charindex('diagram', a.name) = 0
order by type desc, b.name, a.name
");
if (ds == null) return loc1;
List<int> loc6 = new List<int>();
List<int> loc66 = new List<int>();
foreach (object[] row in ds) {
int object_id = int.Parse(string.Concat(row[0]));
string owner = string.Concat(row[1]);
string table = string.Concat(row[2]);
string type = string.Concat(row[3]);
loc2.Add(object_id, new TableInfo(object_id, owner, table, type));
loc3.Add(object_id, new Dictionary<string, ColumnInfo>());
switch (type) {
case "V":
case "T":
loc6.Add(object_id);
break;
case "P":
loc66.Add(object_id);
break;
}
}
if (loc6.Count == 0) return loc1;
string loc8 = string.Join(",", loc6.ConvertAll<string>(delegate(int item) { return string.Concat(item); }).ToArray());
string loc88 = string.Join(",", loc66.ConvertAll<string>(delegate(int item) { return string.Concat(item); }).ToArray());
string tsql_place = @"
select
isnull(e.name,'') + '.' + isnull(d.name,'')
,a.Object_id
,a.name 'Column'
,b.name 'Type'
,case
when b.name in ('Text', 'NText', 'Image') then -1
when b.name in ('NChar', 'NVarchar') then a.max_length / 2
else a.max_length end 'Length'
,b.name + case
when b.name in ('Char', 'VarChar', 'NChar', 'NVarChar', 'Binary', 'VarBinary') then '(' +
case when a.max_length = -1 then 'MAX'
when b.name in ('NChar', 'NVarchar') then cast(a.max_length / 2 as varchar)
else cast(a.max_length as varchar) end + ')'
when b.name in ('Numeric', 'Decimal') then '(' + cast(a.precision as varchar) + ',' + cast(a.scale as varchar) + ')'
else '' end as 'SqlType'
,c.value
{0} a
inner join sys.types b on b.user_type_id = a.user_type_id
left join sys.extended_properties AS c ON c.major_id = a.object_id AND c.minor_id = a.column_id
left join sys.tables d on d.object_id = a.object_id
left join sys.schemas e on e.schema_id = d.schema_id
where a.object_id in ({1})
";
string tsql = string.Format(tsql_place, @"
,a.is_nullable 'IsNullable'
,a.is_identity 'IsIdentity'
from sys.columns", loc8);
if (loc88.Length > 0) {
tsql += "union all" +
string.Format(tsql_place.Replace(
"left join sys.extended_properties AS c ON c.major_id = a.object_id AND c.minor_id = a.column_id",
"left join sys.extended_properties AS c ON c.major_id = a.object_id AND c.minor_id = a.parameter_id"), @"
,cast(0 as bit) 'IsNullable'
,a.is_output 'IsIdentity'
from sys.parameters", loc88);
}
ds = this.GetDataSet(tsql);
if (ds == null) return loc1;
foreach (object[] row in ds) {
string table_id = string.Concat(row[0]);
int object_id = int.Parse(string.Concat(row[1]));
string column = string.Concat(row[2]);
string type = string.Concat(row[3]);
int max_length = int.Parse(string.Concat(row[4]));
string sqlType = string.Concat(row[5]);
string comment = string.Concat(row[6]);
if (string.IsNullOrEmpty(comment)) comment = column;
bool is_nullable = bool.Parse(string.Concat(row[7]));
bool is_identity = bool.Parse(string.Concat(row[8]));
if (max_length == 0) max_length = -1;
loc3[object_id].Add(column, new ColumnInfo(
column, CodeBuild.GetDBType(type), max_length, sqlType,
DataSort.NONE, is_nullable, is_identity, false, false));
if (!_column_coments.ContainsKey(table_id)) _column_coments.Add(table_id, new Dictionary<string, string>());
if (!_column_coments[table_id].ContainsKey(column)) _column_coments[table_id].Add(column, comment);
else _column_coments[table_id][column] = comment;
}
ds = this.GetDataSet(string.Format(@"
select
a.object_id 'Object_id'
,c.name 'Column'
,b.index_id 'Index_id'
,b.is_unique 'IsUnique'
,b.is_primary_key 'IsPrimaryKey'
,cast(case when b.type_desc = 'CLUSTERED' then 1 else 0 end as bit) 'IsClustered'
,case when a.is_descending_key = 1 then 2 when a.is_descending_key = 0 then 1 else 0 end 'IsDesc'
from sys.index_columns a
inner join sys.indexes b on b.object_id = a.object_id and b.index_id = a.index_id
left join sys.columns c on c.object_id = a.object_id and c.column_id = a.column_id
where a.object_id in ({0})
", loc8));
if (ds == null) return loc1;
Dictionary<int, Dictionary<int, List<ColumnInfo>>> indexColumns = new Dictionary<int, Dictionary<int, List<ColumnInfo>>>();
Dictionary<int, Dictionary<int, List<ColumnInfo>>> uniqueColumns = new Dictionary<int, Dictionary<int, List<ColumnInfo>>>();
foreach (object[] row in ds) {
int object_id = int.Parse(string.Concat(row[0]));
string column = string.Concat(row[1]);
int index_id = int.Parse(string.Concat(row[2]));
bool is_unique = bool.Parse(string.Concat(row[3]));
bool is_primary_key = bool.Parse(string.Concat(row[4]));
bool is_clustered = bool.Parse(string.Concat(row[5]));
int is_desc = int.Parse(string.Concat(row[6]));
if (loc3.ContainsKey(object_id) == false || loc3[object_id].ContainsKey(column) == false) continue;
ColumnInfo loc9 = loc3[object_id][column];
if (loc9.IsClustered == false && is_clustered) loc9.IsClustered = is_clustered;
if (loc9.IsPrimaryKey == false && is_primary_key) loc9.IsPrimaryKey = is_primary_key;
if (loc9.Orderby == DataSort.NONE) loc9.Orderby = (DataSort)is_desc;
Dictionary<int, List<ColumnInfo>> loc10 = null;
List<ColumnInfo> loc11 = null;
if (!indexColumns.TryGetValue(object_id, out loc10)) {
indexColumns.Add(object_id, loc10 = new Dictionary<int, List<ColumnInfo>>());
}
if (!loc10.TryGetValue(index_id, out loc11)) {
loc10.Add(index_id, loc11 = new List<ColumnInfo>());
}
loc11.Add(loc9);
if (is_unique) {
if (!uniqueColumns.TryGetValue(object_id, out loc10)) {
uniqueColumns.Add(object_id, loc10 = new Dictionary<int, List<ColumnInfo>>());
}
if (!loc10.TryGetValue(index_id, out loc11)) {
loc10.Add(index_id, loc11 = new List<ColumnInfo>());
}
loc11.Add(loc9);
}
}
foreach (int object_id in indexColumns.Keys) {
foreach (List<ColumnInfo> columns in indexColumns[object_id].Values) {
loc2[object_id].Indexes.Add(columns);
}
}
foreach (int object_id in uniqueColumns.Keys) {
foreach (List<ColumnInfo> columns in uniqueColumns[object_id].Values) {
columns.Sort(delegate(ColumnInfo c1, ColumnInfo c2) {
return c1.Name.CompareTo(c2.Name);
});
loc2[object_id].Uniques.Add(columns);
}
}
ds = this.GetDataSet(string.Format(@"
select
b.object_id 'Object_id'
,c.name 'Column'
,a.constraint_object_id 'FKId'
,referenced_object_id
,cast(1 as bit) 'IsForeignKey'
,d.name 'Referenced_Column'
,null 'Referenced_Sln'
,null 'Referenced_Table'
from sys.foreign_key_columns a
inner join sys.tables b on b.object_id = a.parent_object_id
inner join sys.columns c on c.object_id = a.parent_object_id and c.column_id = a.parent_column_id
inner join sys.columns d on d.object_id = a.referenced_object_id and d.column_id = a.referenced_column_id
where b.object_id in ({0})
", loc8));
if (ds == null) return loc1;
Dictionary<int, Dictionary<int, ForeignKeyInfo>> fkColumns = new Dictionary<int, Dictionary<int, ForeignKeyInfo>>();
foreach (object[] row in ds) {
int object_id, fk_id, referenced_object_id ;
int.TryParse(string.Concat(row[0]), out object_id);
string column = string.Concat(row[1]);
int.TryParse(string.Concat(row[2]), out fk_id);
int.TryParse(string.Concat(row[3]), out referenced_object_id);
bool is_foreign_key = bool.Parse(string.Concat(row[4]));
string referenced_column = string.Concat(row[5]);
string referenced_db = string.Concat(row[6]);
string referenced_table = string.Concat(row[7]);
ColumnInfo loc9 = loc3[object_id][column];
TableInfo loc10 = null;
ColumnInfo loc11 = null;
bool isThisSln = referenced_object_id != 0;
if (isThisSln) {
loc10 = loc2[referenced_object_id];
loc11 = loc3[referenced_object_id][referenced_column];
} else {
}
Dictionary<int, ForeignKeyInfo> loc12 = null;
ForeignKeyInfo loc13 = null;
if (!fkColumns.TryGetValue(object_id, out loc12)) {
fkColumns.Add(object_id, loc12 = new Dictionary<int, ForeignKeyInfo>());
}
if (!loc12.TryGetValue(fk_id, out loc13)) {
if (isThisSln) {
loc13 = new ForeignKeyInfo(loc2[object_id], loc10);
} else {
loc13 = new ForeignKeyInfo(referenced_db, referenced_table, is_foreign_key);
}
loc12.Add(fk_id, loc13);
}
loc13.Columns.Add(loc9);
if (isThisSln) {
loc13.ReferencedColumns.Add(loc11);
} else {
loc13.ReferencedColumnNames.Add(referenced_column);
}
}
foreach (int object_id in fkColumns.Keys) {
foreach (ForeignKeyInfo fk in fkColumns[object_id].Values) {
loc2[object_id].ForeignKeys.Add(fk);
}
}
foreach (int loc4 in loc3.Keys) {
foreach (ColumnInfo loc5 in loc3[loc4].Values) {
loc2[loc4].Columns.Add(loc5);
if (loc5.IsIdentity) {
loc2[loc4].Identitys.Add(loc5);
}
if (loc5.IsClustered) {
loc2[loc4].Clustereds.Add(loc5);
}
if (loc5.IsPrimaryKey) {
loc2[loc4].PrimaryKeys.Add(loc5);
}
}
}
loc1 = _tables = new List<TableInfo>();
foreach (TableInfo loc4 in loc2.Values) {
if (loc4.PrimaryKeys.Count == 0 && loc4.Uniques.Count > 0) {
foreach (ColumnInfo loc5 in loc4.Uniques[0]) {
loc5.IsPrimaryKey = true;
loc4.PrimaryKeys.Add(loc5);
}
}
this.Sort(loc4);
loc1.Add(loc4);
}
loc2.Clear();
loc3.Clear();
return loc1;
}
protected virtual void Sort(TableInfo table) {
table.PrimaryKeys.Sort(delegate (ColumnInfo c1, ColumnInfo c2) {
return c1.Name.CompareTo(c2.Name);
});
table.Columns.Sort(delegate(ColumnInfo c1, ColumnInfo c2) {
int compare = c2.IsPrimaryKey.CompareTo(c1.IsPrimaryKey);
if (compare == 0) {
bool b1 = table.ForeignKeys.Find(delegate(ForeignKeyInfo fk) {
return fk.Columns.Find(delegate(ColumnInfo c3) {
return c3.Name == c1.Name;
}) != null;
}) != null;
bool b2 = table.ForeignKeys.Find(delegate(ForeignKeyInfo fk) {
return fk.Columns.Find(delegate(ColumnInfo c3) {
return c3.Name == c2.Name;
}) != null;
}) != null;
compare = b2.CompareTo(b1);
}
if (compare == 0) compare = c1.Name.CompareTo(c2.Name);
return compare;
});
}
#region IDisposable Ա
public void Dispose() {
if (_tables != null) {
_tables.Clear();
}
}
#endregion
}
}
|
2881099/dotnetGen_postgresql | 25,692 | GenPg/ConsoleApp.cs | using Model;
using Newtonsoft.Json.Linq;
using Npgsql;
using System;
using System.Collections.Generic;
using System.Data;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
namespace GenPg {
public class ConsoleApp {
ClientInfo _client;
ClientSocket _socket;
public string ConnectionString {
get {
string connStr = "Host={0};Port={1};Username={2};Password={3};Database={4};";
return string.Format(connStr, this._client.Server, this._client.Port, this._client.Username, this._client.Password, this._client.Database);
}
}
public string Server;
public int Port;
public string Username;
public string Password;
public string Database;
public string SolutionName;
public bool IsMakeSolution;
public bool IsMakeWebAdmin;
public bool IsDownloadRes;
public string OutputPath;
public ConsoleApp(string[] args, ManualResetEvent wait) {
this.OutputPath = Directory.GetCurrentDirectory();
string args0 = args[0].Trim().ToLower();
if (args[0] == "?" || args0 == "--help" || args0 == "-help") {
var bgcolor = Console.BackgroundColor;
var fgcolor = Console.ForegroundColor;
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.Write("######################################");
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = fgcolor;
Console.WriteLine("");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.DarkRed;
Console.Write(" ");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = fgcolor;
Console.WriteLine("");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.DarkRed;
Console.Write(" .NETCore 2.1 + PostgreSQL 生成器 ");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = fgcolor;
Console.WriteLine("");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.DarkRed;
Console.Write(" ");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = fgcolor;
Console.WriteLine("");
Console.BackgroundColor = ConsoleColor.DarkCyan;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.Write("######################################");
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = ConsoleColor.DarkCyan;
Console.Write(@"
用于快速创建和更新 .NETCore 2.1 + PostgreSQL 项目,非常合适敏捷开发;
Github: https://github.com/2881099/dotnetgen_postgresql
");
Console.ForegroundColor = ConsoleColor.DarkCyan;
Console.Write("Example:");
Console.ForegroundColor = fgcolor;
Console.WriteLine(@"
> GenPg 127.0.0.1[:5432] -U postgres -P 123456 -D dyschool -N dyschool -S -A -R
-U PostgreSQL账号
-P PostgreSQL密码
-D 需要生成的数据库
-N 字符串,生成代码的解决方案名和命名空间
-S 生成解决方案,在项目第一次生成时使用
-A 生成后台管理
-R 下载资源
-O 输出路径(默认:当前目录)");
wait.Set();
return;
}
string[] ss = args[0].Split(new char[] { ':' }, 2);
this.Server = ss[0];
if (int.TryParse(ss.Length == 2 ? ss[1] : "5432", out this.Port) == false) this.Port = 5432;
for (int a = 1; a < args.Length; a++) {
switch (args[a]) {
case "-U":
if (a + 1 >= args.Length) Console.WriteLine("-U 参数错误");
else this.Username = args[a + 1];
a++;
break;
case "-P":
if (a + 1 >= args.Length) Console.WriteLine("-P 参数错误");
else this.Password = args[a + 1];
a++;
break;
case "-D":
if (a + 1 >= args.Length) Console.WriteLine("-D 参数错误");
else this.Database = args[a + 1];
a++;
break;
case "-N":
if (a + 1 >= args.Length) Console.WriteLine("-N 参数错误");
else this.SolutionName = args[a + 1];
a++;
break;
case "-O":
if (a + 1 >= args.Length) Console.WriteLine("-O 参数错误");
else this.OutputPath = args[a + 1];
a++;
break;
case "-S":
this.IsMakeSolution = true;
break;
case "-A":
this.IsMakeWebAdmin = true;
break;
case "-R":
this.IsDownloadRes = true;
break;
}
}
this._client = new ClientInfo(this.Server, this.Port, this.Username, this.Password);
StreamReader sr = new StreamReader(System.Net.WebRequest.Create("https://files.cnblogs.com/files/kellynic/GenPg_server.css").GetResponse().GetResponseStream(), Encoding.UTF8);
string server = sr.ReadToEnd()?.Trim();
//server = "127.0.0.1:38888";
Uri uri = new Uri("tcp://" + server + "/");
this._socket = new ClientSocket();
this._socket.Error += Socket_OnError;
this._socket.Receive += Socket_OnReceive;
this._socket.Connect(uri.Host, uri.Port);
Thread.CurrentThread.Join(TimeSpan.FromSeconds(1));
if (this._socket.Running == false) {
wait.Set();
return;
}
WriteLine("正在生成,稍候 …", ConsoleColor.DarkGreen);
SocketMessager messager = new SocketMessager("GetDatabases", this._client);
this._socket.Write(messager, delegate (object sender2, ClientSocketReceiveEventArgs e2) {
List<DatabaseInfo> dbs = e2.Messager.Arg as List<DatabaseInfo>;
});
this._client.Database = this.Database;
List<TableInfo> tables = null;
messager = new SocketMessager("GetTablesByDatabase", this._client.Database);
this._socket.Write(messager, delegate (object sender2, ClientSocketReceiveEventArgs e2) {
tables = e2.Messager.Arg as List<TableInfo>;
});
if (tables == null) {
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] 无法读取表");
this._socket.Close();
this._socket.Dispose();
wait.Set();
return;
}
tables.ForEach(a => a.IsOutput = true);
List<BuildInfo> bs = null;
messager = new SocketMessager("Build", new object[] {
SolutionName,
IsMakeSolution,
string.Join("", tables.ConvertAll<string>(delegate(TableInfo table){
return string.Concat(table.IsOutput ? 1 : 0);
}).ToArray()),
IsMakeWebAdmin,
IsDownloadRes
});
this._socket.Write(messager, delegate (object sender2, ClientSocketReceiveEventArgs e2) {
bs = e2.Messager.Arg as List<BuildInfo>;
if (e2.Messager.Arg is Exception) throw e2.Messager.Arg as Exception;
}, TimeSpan.FromSeconds(60 * 5));
if (bs != null) {
foreach (BuildInfo b in bs) {
string path = Path.Combine(OutputPath, b.Path);
Directory.CreateDirectory(Path.GetDirectoryName(path));
string fileName = Path.GetFileName(b.Path);
string ext = Path.GetExtension(b.Path);
Encoding encode = Encoding.UTF8;
if (fileName.EndsWith(".rar") || fileName.EndsWith(".zip") || fileName.EndsWith(".dll")) {
using (FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write)) {
fs.Write(b.Data, 0, b.Data.Length);
fs.Close();
}
continue;
}
byte[] data = Deflate.Decompress(b.Data);
string content = Encoding.UTF8.GetString(data);
if (string.Compare(fileName, "web.config") == 0) {
string place = System.Web.HttpUtility.HtmlEncode(this.ConnectionString);
content = content.Replace("{connectionString}", place);
}
if (fileName.EndsWith(".json")) {
content = content.Replace("{connectionString}", this.ConnectionString);
}
if (string.Compare(ext, ".refresh") == 0) {
encode = Encoding.Unicode;
}
using (StreamWriter sw = new StreamWriter(path, false, encode)) {
sw.Write(content);
sw.Close();
}
}
var appsettingsPath = Path.Combine(OutputPath, "appsettings.json");
var appsettingsPathWebHost = Path.Combine(OutputPath, @"src\WebHost\appsettings.json");
var htmZipPath = Path.Combine(OutputPath, "htm.zip");
//解压htm.zip
if (this.IsDownloadRes && File.Exists(htmZipPath)) {
try {
System.IO.Compression.ZipFile.ExtractToDirectory(htmZipPath, OutputPath, Encoding.UTF8, true);
} catch (Exception ex) {
var color = Console.ForegroundColor;
Console.ForegroundColor = ConsoleColor.Red;
Console.WriteLine($"解压 htm.zip 失败:{ex.Message}");
Console.ForegroundColor = color;
}
}
if (this.IsMakeSolution) {
WriteLine("代码已生成完毕!使用 -S 生成完整项目,正在建立脚手架,大约需要10秒 …", ConsoleColor.DarkGreen);
var shellret = ShellRun(OutputPath, "gulp -v");
if (!string.IsNullOrEmpty(shellret.err)) {
WriteLine("");
WriteLine(@"正在安装gulp-cli …", ConsoleColor.DarkGreen);
shellret = ShellRun(OutputPath, "npm install --global gulp-cli");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
}
//WriteLine("");
//WriteLine("正在还原项目 …", ConsoleColor.DarkGreen);
//shellret = ShellRun(OutputPath, "dotnet1 restore");
//if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
//if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
//if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine(@"正在编译Module\Test …", ConsoleColor.DarkGreen);
shellret =ShellRun(Path.Combine(OutputPath, @"src\Module\Test"), "dotnet build");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine(@"正在编译Module\Admin …", ConsoleColor.DarkGreen);
shellret = ShellRun(Path.Combine(OutputPath, @"src\Module\Admin"), "dotnet build");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine("正在安装npm包 …", ConsoleColor.DarkGreen);
shellret = ShellRun(Path.Combine(OutputPath, @"src\WebHost"), "npm install");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine("正在编译WebHost …", ConsoleColor.DarkGreen);
shellret = ShellRun(Path.Combine(OutputPath, @"src\WebHost"), "dotnet build");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine($"脚手架建立完成。", ConsoleColor.DarkGreen);
//WriteLine("");
//Write($"项目运行依赖 ", ConsoleColor.DarkYellow);
//Write($"redis-server", ConsoleColor.Green);
//Write($",安装地址:", ConsoleColor.DarkYellow);
//Write("https://files.cnblogs.com/files/kellynic/Redis-x64-2.8.2402.zip", ConsoleColor.Blue);
//WriteLine($",或前往官方下载", ConsoleColor.DarkYellow);
WriteLine($"{Path.Combine(OutputPath, @"src\WebHost")} 目执行 dotnet run", ConsoleColor.DarkYellow);
WriteLine("");
//Console.WriteLine(ShellRun(Path.Combine(OutputPath, @"src\WebHost"), "dotnet run"));
var pro = new System.Diagnostics.Process();
pro.StartInfo = new System.Diagnostics.ProcessStartInfo("dotnet", "run --urls=http://0.0.0.0:5000") {
WorkingDirectory = Path.Combine(OutputPath, @"src\WebHost"),
EnvironmentVariables = { ["ASPNETCORE_ENVIRONMENT"] = "Development" }
};
pro.Start();
pro.WaitForExit();
}
//如果三个选项为false,并且 src\WebHost\appsettings.json 不存在,则在当前目录使用 appsettings.json
if (this.IsDownloadRes == false && this.IsMakeSolution == false && this.IsMakeWebAdmin == false && File.Exists(appsettingsPathWebHost) == false) {
var appsettings = Newtonsoft.Json.JsonConvert.DeserializeObject(File.Exists(appsettingsPath) ? File.ReadAllText(appsettingsPath) : "{}") as JToken;
var oldtxt = appsettings.ToString();
if (appsettings["ConnectionStrings"] == null) appsettings["ConnectionStrings"] = new JObject();
if (appsettings["ConnectionStrings"][$"{this.SolutionName}_npgsql"] == null) appsettings["ConnectionStrings"][$"{this.SolutionName}_npgsql"] = this.ConnectionString + ";Pooling=true;Maximum Pool Size=100";
if (appsettings["ConnectionStrings"]["redis1"] == null) appsettings["ConnectionStrings"]["redis1"] = $"127.0.0.1:6379,password=,defaultDatabase=13,poolsize=10,ssl=false,writeBuffer=20480,prefix={this.SolutionName}";
if (appsettings["ConnectionStrings"]["redis2"] == null) appsettings["ConnectionStrings"]["redis2"] = $"127.0.0.1:6379,password=,defaultDatabase=13,poolsize=10,ssl=false,writeBuffer=20480,prefix={this.SolutionName}";
if (appsettings[$"{this.SolutionName}_BLL_ITEM_CACHE"] == null) appsettings[$"{this.SolutionName}_BLL_ITEM_CACHE"] = JToken.FromObject(new {
Timeout = 180
});
if (appsettings["Logging"] == null) appsettings["Logging"] = new JObject();
if (appsettings["Logging"]["IncludeScopes"] == null) appsettings["Logging"]["IncludeScopes"] = false;
if (appsettings["Logging"]["LogLevel"] == null) appsettings["Logging"]["LogLevel"] = new JObject();
if (appsettings["Logging"]["LogLevel"]["Default"] == null) appsettings["Logging"]["LogLevel"]["Default"] = "Debug";
if (appsettings["Logging"]["LogLevel"]["System"] == null) appsettings["Logging"]["LogLevel"]["System"] = "Information";
if (appsettings["Logging"]["LogLevel"]["Microsoft"] == null) appsettings["Logging"]["LogLevel"]["Microsoft"] = "Information";
var newtxt = appsettings.ToString();
if (newtxt != oldtxt) File.WriteAllText(appsettingsPath, newtxt, Encoding.UTF8);
//增加当前目录 .csproj nuguet 引用 <PackageReference Include="dng.Pgsql" Version="" />
string csprojPath = Directory.GetFiles(OutputPath, "*.csproj").FirstOrDefault();
if (!string.IsNullOrEmpty(csprojPath) && File.Exists(csprojPath)) {
if (Regex.IsMatch(File.ReadAllText(csprojPath), @"dng\.Pgsql""\s+Version=""", RegexOptions.IgnoreCase) == false) {
System.Diagnostics.Process pro = new System.Diagnostics.Process();
pro.StartInfo = new System.Diagnostics.ProcessStartInfo("dotnet", "add package dng.Pgsql") {
WorkingDirectory = OutputPath
};
pro.Start();
pro.WaitForExit();
}
if (Regex.IsMatch(File.ReadAllText(csprojPath), @"CSRedisCore""\s+Version=""", RegexOptions.IgnoreCase) == false) {
System.Diagnostics.Process pro = new System.Diagnostics.Process();
pro.StartInfo = new System.Diagnostics.ProcessStartInfo("dotnet", "add package CSRedisCore") {
WorkingDirectory = OutputPath
};
pro.Start();
pro.WaitForExit();
}
}
//向startup.cs注入代码
string startupPath = Path.Combine(OutputPath, "Startup.cs");
if (!string.IsNullOrEmpty(startupPath) && File.Exists(startupPath)) {
//web项目才需要 Caching.CSRedis
if (Regex.IsMatch(File.ReadAllText(csprojPath), @"Caching.CSRedis""\s+Version=""", RegexOptions.IgnoreCase) == false) {
System.Diagnostics.Process pro = new System.Diagnostics.Process();
pro.StartInfo = new System.Diagnostics.ProcessStartInfo("dotnet", "add package Caching.CSRedis") {
WorkingDirectory = OutputPath
};
pro.Start();
pro.WaitForExit();
}
bool isChanged = false;
var startupCode = File.ReadAllText(startupPath);
if (Regex.IsMatch(startupCode, @"using\s+Microsoft\.Extensions\.Caching\.Distributed;") == false) {
isChanged = true;
startupCode = "using Microsoft.Extensions.Caching.Distributed;\r\n" + startupCode;
}
if (Regex.IsMatch(startupCode, @"using\s+Microsoft\.Extensions\.Logging;") == false) {
isChanged = true;
startupCode = "using Microsoft.Extensions.Logging;\r\n" + startupCode;
}
if (Regex.IsMatch(startupCode, @"using\s+Microsoft\.Extensions\.Configuration;") == false) {
isChanged = true;
startupCode = "using Microsoft.Extensions.Configuration;\r\n" + startupCode;
}
var servicesName = "services";
if (startupCode.IndexOf("RedisHelper.Initialization") == -1) {
startupCode = Regex.Replace(startupCode, @"[\t ]+public\s+void\s+ConfigureServices\s*\(\s*IServiceCollection\s+(\w+)[^\{]+\{", m => {
isChanged = true;
var connStr1 = @"Configuration[""ConnectionStrings:redis2""]";
var connStr2 = @"Configuration[""ConnectionStrings:redis1""]";
if (File.Exists(appsettingsPath) == false) {
connStr1 = $"127.0.0.1:6379,password=,defaultDatabase=13,poolsize=50,ssl=false,writeBuffer=20480,prefix={this.SolutionName}";
connStr2 = $"127.0.0.1:6379,password=,defaultDatabase=13,poolsize=50,ssl=false,writeBuffer=20480,prefix={this.SolutionName}";
}
return m.Groups[0].Value + $@"
//单redis节点模式,如需开启集群负载,请将注释去掉并做相应配置
RedisHelper.Initialization(
csredis: new CSRedis.CSRedisClient(//null,
//{connStr1},
{connStr2}));
{servicesName = m.Groups[1].Value}.AddSingleton<IDistributedCache>(new Microsoft.Extensions.Caching.Redis.CSRedisCache(RedisHelper.Instance));
";
}, RegexOptions.Multiline);
}
if (Regex.IsMatch(startupCode, @"\s+IConfiguration(Root)?\s+Configuration(;|\s+\{)") == false) {
startupCode = Regex.Replace(startupCode, @"[\t ]+public\s+void\s+ConfigureServices\s*\(\s*IServiceCollection\s+(\w+)[^\{]+\{", m => {
isChanged = true;
return $@"
public IConfiguration Configuration {{ get; set; }}
{m.Groups[0].Value}
Configuration = {servicesName = m.Groups[1].Value}.BuildServiceProvider().GetService<IConfiguration>();";
}, RegexOptions.Multiline);
}
if (startupCode.IndexOf(this.SolutionName + ".BLL.PSqlHelper.Initialization") == -1) {
startupCode = Regex.Replace(startupCode, @"([\t ]+public\s+void\s+Configure\s*\()([^\{]+)\{", m => {
isChanged = true;
var str1 = m.Groups[1].Value;
var str2 = m.Groups[2].Value;
var loggerFactory = Regex.Match(str2, @"\bILoggerFactory\s+(\w+)");
if (loggerFactory.Success == false) str2 = "ILoggerFactory loggerFactory, " + str2;
loggerFactory = Regex.Match(str2, @"\bILoggerFactory\s+(\w+)");
var appName = Regex.Match(str2, @"\bIApplicationBuilder\s+(\w+)");
if (appName.Success == false) str2 = "IApplicationBuilder app, " + str2;
appName = Regex.Match(str2, @"\bIApplicationBuilder\s+(\w+)");
var connStr = $@"Configuration[""ConnectionStrings:{this.SolutionName}_npgsql""]";
if (File.Exists(appsettingsPath) == false) {
connStr = $"{this.ConnectionString};Pooling=true;Maximum Pool Size=100";
}
return str1 + str2 + $@"{{
{this.SolutionName}.BLL.PSqlHelper.Initialization({appName.Groups[1].Value}.ApplicationServices.GetService<IDistributedCache>(), Configuration.GetSection(""{this.SolutionName}_BLL_ITEM_CACHE""),
{connStr}, /* 此参数可以配置【从数据库】 */ null, {loggerFactory.Groups[1].Value}.CreateLogger(""{this.SolutionName}_DAL_psqlhelper""));
";
}, RegexOptions.Multiline);
}
if (isChanged) File.WriteAllText(startupPath, startupCode);
}
}
if (File.Exists(Path.Combine(OutputPath, "GenPg只更新db.bat")) == false) {
var batPath = Path.Combine(OutputPath, $"GenPg_{this.SolutionName}_{this.Server}_{this.Database}.bat");
if (File.Exists(batPath) == false) File.WriteAllText(batPath, $@"
GenPg {this.Server}:{this.Port} -U {this.Username} -P {this.Password} -D {this.Database} -N {this.SolutionName}");
}
}
this._socket.Close();
this._socket.Dispose();
GC.Collect();
ConsoleColor fc = Console.ForegroundColor;
Console.ForegroundColor = ConsoleColor.Green;
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] The code files be maked in \"" + OutputPath + "\", please check.");
Console.ForegroundColor = fc;
wait.Set();
}
private void Socket_OnError(object sender, ClientSocketErrorEventArgs e) {
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] " + e.Exception.Message);
}
private void Socket_OnReceive(object sender, ClientSocketReceiveEventArgs e) {
SocketMessager messager = null;
switch (e.Messager.Action) {
case "ExecuteDataSet":
string sql = e.Messager.Arg.ToString();
object[][] ds = null;
try {
ds = ConsoleApp.ExecuteDataSet(this.ConnectionString, sql);
} catch (Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, ds);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
case "ExecuteNonQuery":
string sql2 = e.Messager.Arg.ToString();
int val = 0;
try {
val = ConsoleApp.ExecuteNonQuery(this.ConnectionString, sql2);
} catch (Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, val);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
default:
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] " + "您当前使用的版本未能实现功能!");
break;
}
}
public static int ExecuteNonQuery(string connectionString, string cmdText) {
int val = 0;
using (NpgsqlConnection conn = new NpgsqlConnection(connectionString)) {
NpgsqlCommand cmd = new NpgsqlCommand(cmdText, conn);
try {
cmd.Connection.Open();
val = cmd.ExecuteNonQuery();
} catch {
cmd.Parameters.Clear();
cmd.Connection.Close();
throw;
}
}
return val;
}
public static object[][] ExecuteDataSet(string connectionString, string cmdText) {
List<object[]> ret = new List<object[]>();
using (NpgsqlConnection conn = new NpgsqlConnection(connectionString)) {
NpgsqlCommand cmd = new NpgsqlCommand(cmdText, conn);
try {
cmd.Connection.Open();
using (var dr = cmd.ExecuteReader()) {
while (dr.Read()) {
object[] vals = new object[dr.FieldCount];
dr.GetValues(vals);
ret.Add(vals);
}
}
} catch {
cmd.Parameters.Clear();
cmd.Connection.Close();
throw;
}
cmd.Connection.Close();
cmd.Parameters.Clear();
}
return ret.ToArray();
}
public static (string info, string warn, string err) ShellRun(string cddir, params string[] bat) {
if (bat == null || bat.Any() == false) return ("", "", "");
var proc = new System.Diagnostics.Process();
proc.StartInfo = new System.Diagnostics.ProcessStartInfo {
CreateNoWindow = true,
FileName = "cmd.exe",
UseShellExecute = false,
RedirectStandardError = true,
RedirectStandardInput = true,
RedirectStandardOutput = true,
WorkingDirectory = cddir
};
proc.Start();
foreach (var cmd in bat)
proc.StandardInput.WriteLine(cmd);
proc.StandardInput.WriteLine("exit");
var outStr = proc.StandardOutput.ReadToEnd();
var errStr = proc.StandardError.ReadToEnd();
proc.Close();
var idx = outStr.IndexOf($">{bat[0]}");
if (idx != -1) {
idx = outStr.IndexOf("\n", idx);
if (idx != -1) outStr = outStr.Substring(idx + 1);
}
idx = outStr.LastIndexOf(">exit");
if (idx != -1) {
idx = outStr.LastIndexOf("\n", idx);
if (idx != -1) outStr = outStr.Remove(idx);
}
outStr = outStr.Trim();
if (outStr == "") outStr = null;
if (errStr == "") errStr = null;
return (outStr, string.IsNullOrEmpty(outStr) ? null : errStr, string.IsNullOrEmpty(outStr) ? errStr : null);
}
public static void WriteLine(string text, ConsoleColor? foregroundColor = null, ConsoleColor? backgroundColor = null) => Write($"{text}\r\n", foregroundColor, backgroundColor);
public static void Write(string text, ConsoleColor? foregroundColor = null, ConsoleColor? backgroundColor = null) {
var bgcolor = Console.BackgroundColor;
var fgcolor = Console.ForegroundColor;
if (backgroundColor != null) Console.BackgroundColor = backgroundColor.Value;
if (foregroundColor != null) Console.ForegroundColor = foregroundColor.Value;
Console.Write(text);
if (backgroundColor != null) Console.BackgroundColor = bgcolor;
if (foregroundColor != null) Console.ForegroundColor = fgcolor;
}
}
}
|
2881099/dotnetGen_postgresql | 13,092 | GenPg/NpgsqlDbType.cs | using System;
using System.Collections.Generic;
using System.Text;
//
// 摘要:
// Represents a PostgreSQL data type that can be written or read to the database.
// Used in places such as Npgsql.NpgsqlParameter.NpgsqlDbType to unambiguously specify
// how to encode or decode values.
//
// 备注:
// See http://www.postgresql.org/docs/current/static/datatype.html
public enum NpgsqlDbType {
// Note that it's important to never change the numeric values of this enum, since user applications
// compile them in.
#region Numeric Types
/// <summary>
/// Corresponds to the PostgreSQL 8-byte "bigint" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-numeric.html</remarks>
Bigint = 1,
/// <summary>
/// Corresponds to the PostgreSQL 8-byte floating-point "double" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-numeric.html</remarks>
Double = 8,
/// <summary>
/// Corresponds to the PostgreSQL 4-byte "integer" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-numeric.html</remarks>
Integer = 9,
/// <summary>
/// Corresponds to the PostgreSQL arbitrary-precision "numeric" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-numeric.html</remarks>
Numeric = 13,
/// <summary>
/// Corresponds to the PostgreSQL floating-point "real" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-numeric.html</remarks>
Real = 17,
/// <summary>
/// Corresponds to the PostgreSQL 2-byte "smallint" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-numeric.html</remarks>
Smallint = 18,
#endregion
#region Boolean Type
/// <summary>
/// Corresponds to the PostgreSQL "boolean" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-boolean.html</remarks>
Boolean = 2,
#endregion
#region Enumerated Types
/// <summary>
/// Corresponds to the PostgreSQL "enum" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-enum.html</remarks>
Enum = 47,
#endregion
#region Geometric types
/// <summary>
/// Corresponds to the PostgreSQL geometric "box" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-geometric.html</remarks>
Box = 3,
/// <summary>
/// Corresponds to the PostgreSQL geometric "circle" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-geometric.html</remarks>
Circle = 5,
/// <summary>
/// Corresponds to the PostgreSQL geometric "line" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-geometric.html</remarks>
Line = 10,
/// <summary>
/// Corresponds to the PostgreSQL geometric "lseg" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-geometric.html</remarks>
LSeg = 11,
/// <summary>
/// Corresponds to the PostgreSQL geometric "path" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-geometric.html</remarks>
Path = 14,
/// <summary>
/// Corresponds to the PostgreSQL geometric "point" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-geometric.html</remarks>
Point = 15,
/// <summary>
/// Corresponds to the PostgreSQL geometric "polygon" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-geometric.html</remarks>
Polygon = 16,
#endregion
#region Monetary Types
/// <summary>
/// Corresponds to the PostgreSQL "money" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-money.html</remarks>
Money = 12,
#endregion
#region Character Types
/// <summary>
/// Corresponds to the PostgreSQL "char(n)"type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-character.html</remarks>
Char = 6,
/// <summary>
/// Corresponds to the PostgreSQL "text" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-character.html</remarks>
Text = 19,
/// <summary>
/// Corresponds to the PostgreSQL "varchar" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-character.html</remarks>
Varchar = 22,
/// <summary>
/// Corresponds to the PostgreSQL internal "name" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-character.html</remarks>
Name = 32,
/// <summary>
/// Corresponds to the PostgreSQL "citext" type for the citext module.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/citext.html</remarks>
Citext = 51,
/// <summary>
/// Corresponds to the PostgreSQL "char" type.
/// </summary>
/// <remarks>
/// This is an internal field and should normally not be used for regular applications.
///
/// See http://www.postgresql.org/docs/current/static/datatype-text.html
/// </remarks>
InternalChar = 38,
#endregion
#region Binary Data Types
/// <summary>
/// Corresponds to the PostgreSQL "bytea" type, holding a raw byte string.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-binary.html</remarks>
Bytea = 4,
#endregion
#region Date/Time Types
/// <summary>
/// Corresponds to the PostgreSQL "date" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-datetime.html</remarks>
Date = 7,
/// <summary>
/// Corresponds to the PostgreSQL "time" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-datetime.html</remarks>
Time = 20,
/// <summary>
/// Corresponds to the PostgreSQL "timestamp" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-datetime.html</remarks>
Timestamp = 21,
/// <summary>
/// Corresponds to the PostgreSQL "timestamp with time zone" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-datetime.html</remarks>
TimestampTZ = 26,
/// <summary>
/// Corresponds to the PostgreSQL "interval" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-datetime.html</remarks>
Interval = 30,
/// <summary>
/// Corresponds to the PostgreSQL "time with time zone" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-datetime.html</remarks>
TimeTZ = 31,
/// <summary>
/// Corresponds to the obsolete PostgreSQL "abstime" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-datetime.html</remarks>
[Obsolete("The PostgreSQL abstime time is obsolete.")]
Abstime = 33,
#endregion
#region Network Address Types
/// <summary>
/// Corresponds to the PostgreSQL "inet" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-net-types.html</remarks>
Inet = 24,
/// <summary>
/// Corresponds to the PostgreSQL "cidr" type, a field storing an IPv4 or IPv6 network.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-net-types.html</remarks>
Cidr = 44,
/// <summary>
/// Corresponds to the PostgreSQL "macaddr" type, a field storing a 6-byte physical address.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-net-types.html</remarks>
MacAddr = 34,
#endregion
#region Bit String Types
/// <summary>
/// Corresponds to the PostgreSQL "bit" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-bit.html</remarks>
Bit = 25,
/// <summary>
/// Corresponds to the PostgreSQL "varbit" type, a field storing a variable-length string of bits.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-boolean.html</remarks>
Varbit = 39,
#endregion
#region Text Search Types
/// <summary>
/// Corresponds to the PostgreSQL "tsvector" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-textsearch.html</remarks>
TsVector = 45,
/// <summary>
/// Corresponds to the PostgreSQL "tsquery" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-textsearch.html</remarks>
TsQuery = 46,
#endregion
#region UUID Type
/// <summary>
/// Corresponds to the PostgreSQL "uuid" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-uuid.html</remarks>
Uuid = 27,
#endregion
#region XML Type
/// <summary>
/// Corresponds to the PostgreSQL "xml" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-xml.html</remarks>
Xml = 28,
#endregion
#region JSON Types
/// <summary>
/// Corresponds to the PostgreSQL "json" type, a field storing JSON in text format.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-json.html</remarks>
/// <seealso cref="Jsonb"/>
Json = 35,
/// <summary>
/// Corresponds to the PostgreSQL "jsonb" type, a field storing JSON in an optimized binary
/// format.
/// </summary>
/// <remarks>
/// Supported since PostgreSQL 9.4.
/// See http://www.postgresql.org/docs/current/static/datatype-json.html
/// </remarks>
Jsonb = 36,
#endregion
#region HSTORE Type
/// <summary>
/// Corresponds to the PostgreSQL "hstore" type, a dictionary of string key-value pairs.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/hstore.html</remarks>
Hstore = 37,
#endregion
#region Arrays
/// <summary>
/// Corresponds to the PostgreSQL "array" type, a variable-length multidimensional array of
/// another type. This value must be combined with another value from <see cref="NpgsqlDbType"/>
/// via a bit OR (e.g. NpgsqlDbType.Array | NpgsqlDbType.Integer)
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/arrays.html</remarks>
Array = int.MinValue,
#endregion
#region Composite Types
/// <summary>
/// Corresponds to the PostgreSQL "composite" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/rowtypes.html</remarks>
Composite = 48,
#endregion
#region Range Types
/// <summary>
/// Corresponds to the PostgreSQL "array" type, a variable-length multidimensional array of
/// another type. This value must be combined with another value from <see cref="NpgsqlDbType"/>
/// via a bit OR (e.g. NpgsqlDbType.Array | NpgsqlDbType.Integer)
/// </summary>
/// <remarks>
/// Supported since PostgreSQL 9.2.
/// See http://www.postgresql.org/docs/9.2/static/rangetypes.html
/// </remarks>
Range = 0x40000000,
#endregion
#region Internal Types
/// <summary>
/// Corresponds to the PostgreSQL "refcursor" type.
/// </summary>
Refcursor = 23,
/// <summary>
/// Corresponds to the PostgreSQL internal "oidvector" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-oid.html</remarks>
Oidvector = 29,
/// <summary>
/// Corresponds to the PostgreSQL internal "int2vector" type.
/// </summary>
Int2Vector = 52,
/// <summary>
/// Corresponds to the PostgreSQL "oid" type.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-oid.html</remarks>
Oid = 41,
/// <summary>
/// Corresponds to the PostgreSQL "xid" type, an internal transaction identifier.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-oid.html</remarks>
Xid = 42,
/// <summary>
/// Corresponds to the PostgreSQL "cid" type, an internal command identifier.
/// </summary>
/// <remarks>See http://www.postgresql.org/docs/current/static/datatype-oid.html</remarks>
Cid = 43,
/// <summary>
/// Corresponds to the PostgreSQL "regtype" type, a numeric (OID) ID of a type in the pg_type table.
/// </summary>
Regtype = 49,
/// <summary>
/// Corresponds to the PostgreSQL "tid" type, a tuple id identifying the physical location of a row within its table.
/// </summary>
Tid = 53,
#endregion
#region Special
/// <summary>
/// A special value that can be used to send parameter values to the database without
/// specifying their type, allowing the database to cast them to another value based on context.
/// The value will be converted to a string and send as text.
/// </summary>
/// <remarks>
/// This value shouldn't ordinarily be used, and makes sense only when sending a data type
/// unsupported by Npgsql.
/// </remarks>
Unknown = 40,
#endregion
#region Postgis
/// <summary>
/// The geometry type for postgresql spatial extension postgis.
/// </summary>
Geometry = 50,
#endregion
IntegerRange, BigintRange, NumericRange, TimestampRange, TimestampTZRange, DateRange
} |
2881099/dotnetGen_mysql | 13,345 | GenMy/WinFormClass/Socket/ServerSocket.cs | using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
public class ServerSocket : IDisposable {
private TcpListener _tcpListener;
private Thread _tcpListenerThread;
private Dictionary<int, AcceptSocket> _clients = new Dictionary<int, AcceptSocket>();
private object _clients_lock = new object();
private int _id = 1;
private int _port;
private bool _running;
private ManualResetEvent _stopWait;
public event ServerSocketAcceptedEventHandler Accepted;
public event ServerSocketClosedEventHandler Closed;
public event ServerSocketReceiveEventHandler Receive;
public event ServerSocketErrorEventHandler Error;
private WorkQueue _acceptWQ;
internal WorkQueue _receiveWQ;
internal WorkQueue _receiveSyncWQ;
private WorkQueue _writeWQ;
public ServerSocket(int port) {
this._port = port;
}
public void Start() {
if (this._running == false) {
this._running = true;
try {
this._tcpListener = new TcpListener(IPAddress.Any, this._port);
this._tcpListener.Start();
this._acceptWQ = new WorkQueue();
this._receiveWQ = new WorkQueue();
this._receiveSyncWQ = new WorkQueue();
this._writeWQ = new WorkQueue();
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
return;
}
this._tcpListenerThread = new Thread(delegate() {
while (this._running) {
try {
TcpClient tcpClient = this._tcpListener.AcceptTcpClientAsync().Result;
this._acceptWQ.Enqueue(delegate() {
try {
AcceptSocket acceptSocket = new AcceptSocket(this, tcpClient, this._id);
this.OnAccepted(acceptSocket);
} catch (Exception ex) {
this.OnError(ex);
}
});
} catch (Exception ex) {
this.OnError(ex);
}
}
int[] keys = new int[this._clients.Count];
try {
this._clients.Keys.CopyTo(keys, 0);
} catch {
lock (this._clients_lock) {
keys = new int[this._clients.Count];
this._clients.Keys.CopyTo(keys, 0);
}
}
foreach (int key in keys) {
AcceptSocket client = null;
if (this._clients.TryGetValue(key, out client)) {
client.Close();
}
}
if (this._acceptWQ != null) {
this._acceptWQ.Dispose();
}
if (this._receiveWQ != null) {
this._receiveWQ.Dispose();
}
if (this._receiveSyncWQ != null) {
this._receiveSyncWQ.Dispose();
}
if (this._writeWQ != null) {
this._writeWQ.Dispose();
}
this._clients.Clear();
this._stopWait.Set();
});
this._tcpListenerThread.Start();
}
}
public void Stop() {
if (this._tcpListener != null) {
this._tcpListener.Stop();
}
if (this._running == true) {
this._stopWait = new ManualResetEvent(false);
this._stopWait.Reset();
this._running = false;
this._stopWait.WaitOne();
}
}
internal void AccessDenied(AcceptSocket client) {
client.Write(SocketMessager.SYS_ACCESS_DENIED, delegate(object sender2, ServerSocketReceiveEventArgs e2) {
}, TimeSpan.FromSeconds(1));
client.Close();
}
public void Write(SocketMessager messager) {
int[] keys = new int[this._clients.Count];
try {
this._clients.Keys.CopyTo(keys, 0);
} catch {
lock (this._clients_lock) {
keys = new int[this._clients.Count];
this._clients.Keys.CopyTo(keys, 0);
}
}
foreach (int key in keys) {
AcceptSocket client = null;
if (this._clients.TryGetValue(key, out client)) {
this._writeWQ.Enqueue(delegate() {
client.Write(messager);
});
}
}
}
public AcceptSocket GetAcceptSocket(int id) {
AcceptSocket socket = null;
this._clients.TryGetValue(id, out socket);
return socket;
}
internal void CloseClient(AcceptSocket client) {
this._clients.Remove(client.Id);
}
protected virtual void OnAccepted(ServerSocketAcceptedEventArgs e) {
SocketMessager helloMessager = new SocketMessager(SocketMessager.SYS_HELLO_WELCOME.Action);
e.AcceptSocket.Write(helloMessager, delegate(object sender2, ServerSocketReceiveEventArgs e2) {
if (e2.Messager.Id == helloMessager.Id &&
string.Compare(e2.Messager.Action, helloMessager.Action) == 0) {
e.AcceptSocket._accepted = true;
}
}, TimeSpan.FromSeconds(2));
if (e.AcceptSocket._accepted) {
if (this.Accepted != null) {
try {
this.Accepted(this, e);
} catch (Exception ex) {
this.OnError(ex);
}
}
} else {
e.AcceptSocket.AccessDenied();
}
}
private void OnAccepted(AcceptSocket client) {
lock (_clients_lock) {
_clients.Add(this._id++, client);
}
ServerSocketAcceptedEventArgs e = new ServerSocketAcceptedEventArgs(this._clients.Count, client);
this.OnAccepted(e);
}
protected virtual void OnClosed(ServerSocketClosedEventArgs e) {
if (this.Closed != null) {
this.Closed(this, e);
}
}
internal void OnClosed(AcceptSocket client) {
ServerSocketClosedEventArgs e = new ServerSocketClosedEventArgs(this._clients.Count, client.Id);
this.OnClosed(e);
}
protected virtual void OnReceive(ServerSocketReceiveEventArgs e) {
if (this.Receive != null) {
this.Receive(this, e);
}
}
internal void OnReceive2(ServerSocketReceiveEventArgs e) {
this.OnReceive(e);
}
protected virtual void OnError(ServerSocketErrorEventArgs e) {
if (this.Error != null) {
this.Error(this, e);
}
}
protected void OnError(Exception ex) {
ServerSocketErrorEventArgs e = new ServerSocketErrorEventArgs(-1, ex, null);
this.OnError(e);
}
internal void OnError2(ServerSocketErrorEventArgs e) {
this.OnError(e);
}
#region IDisposable 成员
public void Dispose() {
this.Stop();
}
#endregion
}
public class AcceptSocket : BaseSocket, IDisposable {
private ServerSocket _server;
private TcpClient _tcpClient;
private Thread _thread;
private bool _running;
private int _id;
private int _receives;
private int _errors;
private object _errors_lock = new object();
private object _write_lock = new object();
private Dictionary<int, SyncReceive> _receiveHandlers = new Dictionary<int, SyncReceive>();
private object _receiveHandlers_lock = new object();
private DateTime _lastActive;
internal bool _accepted;
public AcceptSocket(ServerSocket server, TcpClient tcpClient, int id) {
this._running = true;
this._id = id;
this._server = server;
this._tcpClient = tcpClient;
this._lastActive = DateTime.Now;
this._thread = new Thread(delegate() {
while (this._running) {
try {
NetworkStream ns = this._tcpClient.GetStream();
ns.ReadTimeout = 1000 * 20;
if (ns.DataAvailable) {
SocketMessager messager = base.Read(ns);
if (string.Compare(messager.Action, SocketMessager.SYS_TEST_LINK.Action) != 0) {
ServerSocketReceiveEventArgs e = new ServerSocketReceiveEventArgs(this._receives++, messager, this);
SyncReceive receive = null;
if (this._receiveHandlers.TryGetValue(messager.Id, out receive)) {
this._server._receiveSyncWQ.Enqueue(delegate() {
try {
receive.ReceiveHandler(this, e);
} catch (Exception ex) {
this.OnError(ex);
} finally {
receive.Wait.Set();
}
});
} else {
this._server._receiveWQ.Enqueue(delegate() {
this.OnReceive(e);
});
}
}
this._lastActive = DateTime.Now;
} else if (_accepted) {
TimeSpan ts = DateTime.Now - _lastActive;
if (ts.TotalSeconds > 5) {
this.Write(SocketMessager.SYS_TEST_LINK);
}
}
if (!ns.DataAvailable) Thread.CurrentThread.Join(1);
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
}
}
this.Close();
this.OnClosed();
});
this._thread.Start();
}
public void Close() {
this._running = false;
if (this._tcpClient != null) {
this._tcpClient.Dispose();
this._tcpClient = null;
}
this._server.CloseClient(this);
int[] keys = new int[this._receiveHandlers.Count];
try {
this._receiveHandlers.Keys.CopyTo(keys, 0);
} catch {
lock (this._receiveHandlers_lock) {
keys = new int[this._receiveHandlers.Count];
this._receiveHandlers.Keys.CopyTo(keys, 0);
}
}
foreach (int key in keys) {
SyncReceive receiveHandler = null;
if (this._receiveHandlers.TryGetValue(key, out receiveHandler)) {
receiveHandler.Wait.Set();
}
}
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Clear();
}
}
public void Write(SocketMessager messager) {
this.Write(messager, null, TimeSpan.Zero);
}
public void Write(SocketMessager messager, ServerSocketReceiveEventHandler receiveHandler) {
this.Write(messager, receiveHandler, TimeSpan.FromSeconds(20));
}
public void Write(SocketMessager messager, ServerSocketReceiveEventHandler receiveHandler, TimeSpan timeout) {
SyncReceive syncReceive = null;
try {
if (receiveHandler != null) {
syncReceive = new SyncReceive(receiveHandler);
lock (this._receiveHandlers_lock) {
if (!this._receiveHandlers.ContainsKey(messager.Id)) {
this._receiveHandlers.Add(messager.Id, syncReceive);
} else {
this._receiveHandlers[messager.Id] = syncReceive;
}
}
}
lock (_write_lock) {
NetworkStream ns = this._tcpClient.GetStream();
base.Write(ns, messager);
}
this._lastActive = DateTime.Now;
if (syncReceive != null) {
syncReceive.Wait.Reset();
syncReceive.Wait.WaitOne(timeout);
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
if (syncReceive != null) {
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
}
}
/// <summary>
/// 拒绝访问,并关闭连接
/// </summary>
public void AccessDenied() {
this._server.AccessDenied(this);
}
protected virtual void OnClosed() {
try {
this._server.OnClosed(this);
} catch (Exception ex) {
this.OnError(ex);
}
}
protected virtual void OnReceive(ServerSocketReceiveEventArgs e) {
try {
this._server.OnReceive2(e);
} catch (Exception ex) {
this.OnError(ex);
}
}
protected virtual void OnError(Exception ex) {
int errors = 0;
lock (this._errors_lock) {
errors = ++this._errors;
}
ServerSocketErrorEventArgs e = new ServerSocketErrorEventArgs(errors, ex, this);
this._server.OnError2(e);
}
public int Id {
get { return _id; }
}
class SyncReceive : IDisposable {
private ServerSocketReceiveEventHandler _receiveHandler;
private ManualResetEvent _wait;
public SyncReceive(ServerSocketReceiveEventHandler onReceive) {
this._receiveHandler = onReceive;
this._wait = new ManualResetEvent(false);
}
public ManualResetEvent Wait {
get { return _wait; }
}
public ServerSocketReceiveEventHandler ReceiveHandler {
get { return _receiveHandler; }
}
#region IDisposable 成员
public void Dispose() {
this._wait.Set();
}
#endregion
}
#region IDisposable 成员
void IDisposable.Dispose() {
this.Close();
}
#endregion
}
public delegate void ServerSocketClosedEventHandler(object sender, ServerSocketClosedEventArgs e);
public delegate void ServerSocketAcceptedEventHandler(object sender, ServerSocketAcceptedEventArgs e);
public delegate void ServerSocketErrorEventHandler(object sender, ServerSocketErrorEventArgs e);
public delegate void ServerSocketReceiveEventHandler(object sender, ServerSocketReceiveEventArgs e);
public class ServerSocketClosedEventArgs : EventArgs {
private int _accepts;
private int _acceptSocketId;
public ServerSocketClosedEventArgs(int accepts, int acceptSocketId) {
this._accepts = accepts;
this._acceptSocketId = acceptSocketId;
}
public int Accepts {
get { return _accepts; }
}
public int AcceptSocketId {
get { return _acceptSocketId; }
}
}
public class ServerSocketAcceptedEventArgs : EventArgs {
private int _accepts;
private AcceptSocket _acceptSocket;
public ServerSocketAcceptedEventArgs(int accepts, AcceptSocket acceptSocket) {
this._accepts = accepts;
this._acceptSocket = acceptSocket;
}
public int Accepts {
get { return _accepts; }
}
public AcceptSocket AcceptSocket {
get { return _acceptSocket; }
}
}
public class ServerSocketErrorEventArgs : EventArgs {
private int _errors;
private Exception _exception;
private AcceptSocket _acceptSocket;
public ServerSocketErrorEventArgs(int errors, Exception exception, AcceptSocket acceptSocket) {
this._errors = errors;
this._exception = exception;
this._acceptSocket = acceptSocket;
}
public int Errors {
get { return _errors; }
}
public Exception Exception {
get { return _exception; }
}
public AcceptSocket AcceptSocket {
get { return _acceptSocket; }
}
}
public class ServerSocketReceiveEventArgs : EventArgs {
private int _receives;
private SocketMessager _messager;
private AcceptSocket _acceptSocket;
public ServerSocketReceiveEventArgs(int receives, SocketMessager messager, AcceptSocket acceptSocket) {
this._receives = receives;
this._messager = messager;
this._acceptSocket = acceptSocket;
}
public int Receives {
get { return _receives; }
}
public SocketMessager Messager {
get { return _messager; }
}
public AcceptSocket AcceptSocket {
get { return _acceptSocket; }
}
} |
2881099/dotnetGen_postgresql | 1,329 | ServerWinForm/Form1.Designer.cs | namespace ServerWinForm
{
partial class Form1
{
/// <summary>
/// 必需的设计器变量。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 清理所有正在使用的资源。
/// </summary>
/// <param name="disposing">如果应释放托管资源,为 true;否则为 false。</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows 窗体设计器生成的代码
/// <summary>
/// 设计器支持所需的方法 - 不要
/// 使用代码编辑器修改此方法的内容。
/// </summary>
private void InitializeComponent()
{
this.SuspendLayout();
//
// Form1
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(533, 330);
this.Margin = new System.Windows.Forms.Padding(2, 2, 2, 2);
this.Name = "Form1";
this.Text = "Form1";
this.Load += new System.EventHandler(this.Form1_Load);
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.Form1_FormClosing);
this.ResumeLayout(false);
}
#endregion
}
}
|
2881099/dotnetGen_sqlserver | 12,738 | Server/ServerSocket.cs | using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
public class ServerSocket : IDisposable {
private TcpListener _tcpListener;
private Thread _tcpListenerThread;
private Dictionary<int, AcceptSocket> _clients = new Dictionary<int, AcceptSocket>();
private object _clients_lock = new object();
private int _id = 1;
private int _port;
private bool _running;
private ManualResetEvent _stopWait;
public event ServerSocketAcceptedEventHandler Accepted;
public event ServerSocketClosedEventHandler Closed;
public event ServerSocketReceiveEventHandler Receive;
public event ServerSocketErrorEventHandler Error;
public ServerSocket(int port) {
this._port = port;
}
public void Start() {
if (this._running == false) {
this._running = true;
try {
this._tcpListener = new TcpListener(IPAddress.Any, this._port);
this._tcpListener.Start();
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
return;
}
this._tcpListenerThread = new Thread(delegate() {
while (this._running) {
try {
TcpClient tcpClient = this._tcpListener.AcceptTcpClient();
new Thread(delegate() {
try {
AcceptSocket acceptSocket = new AcceptSocket(this, tcpClient, this._id);
this.OnAccepted(acceptSocket);
} catch (Exception ex) {
this.OnError(ex);
}
}).Start();
} catch (Exception ex) {
this.OnError(ex);
}
}
int[] keys = new int[this._clients.Count];
try {
this._clients.Keys.CopyTo(keys, 0);
} catch {
lock (this._clients_lock) {
keys = new int[this._clients.Count];
this._clients.Keys.CopyTo(keys, 0);
}
}
foreach (int key in keys) {
AcceptSocket client = null;
if (this._clients.TryGetValue(key, out client)) {
client.Close();
}
}
this._clients.Clear();
this._stopWait.Set();
});
this._tcpListenerThread.Start();
}
}
public void Stop() {
if (this._tcpListener != null) {
this._tcpListener.Stop();
}
if (this._running == true) {
this._stopWait = new ManualResetEvent(false);
this._stopWait.Reset();
this._running = false;
this._stopWait.WaitOne();
}
}
internal void AccessDenied(AcceptSocket client) {
client.Write(SocketMessager.SYS_ACCESS_DENIED, delegate(object sender2, ServerSocketReceiveEventArgs e2) {
}, TimeSpan.FromSeconds(3));
client.Close();
}
public void Write(SocketMessager messager) {
int[] keys = new int[this._clients.Count];
try {
this._clients.Keys.CopyTo(keys, 0);
} catch {
lock (this._clients_lock) {
keys = new int[this._clients.Count];
this._clients.Keys.CopyTo(keys, 0);
}
}
foreach (int key in keys) {
AcceptSocket client = null;
if (this._clients.TryGetValue(key, out client)) {
client.Write(messager);
}
}
}
public AcceptSocket GetAcceptSocket(int id) {
AcceptSocket socket = null;
this._clients.TryGetValue(id, out socket);
return socket;
}
internal void CloseClient(AcceptSocket client) {
this._clients.Remove(client.Id);
}
protected virtual void OnAccepted(ServerSocketAcceptedEventArgs e) {
e.AcceptSocket.Write(SocketMessager.SYS_HELLO_WELCOME, delegate(object sender2, ServerSocketReceiveEventArgs e2) {
if (e2.Messager.Id == SocketMessager.SYS_HELLO_WELCOME.Id &&
string.Compare(e2.Messager.Action, SocketMessager.SYS_HELLO_WELCOME.Action) == 0) {
e.AcceptSocket._accepted = true;
}
}, TimeSpan.FromSeconds(5));
if (e.AcceptSocket._accepted) {
if (this.Accepted != null) {
try {
this.Accepted(this, e);
} catch (Exception ex) {
this.OnError(ex);
}
}
} else {
e.AcceptSocket.AccessDenied();
}
}
private void OnAccepted(AcceptSocket client) {
lock (_clients_lock) {
_clients.Add(this._id++, client);
}
ServerSocketAcceptedEventArgs e = new ServerSocketAcceptedEventArgs(this._clients.Count, client);
this.OnAccepted(e);
}
protected virtual void OnClosed(ServerSocketClosedEventArgs e) {
if (this.Closed != null) {
this.Closed(this, e);
}
}
internal void OnClosed(AcceptSocket client) {
ServerSocketClosedEventArgs e = new ServerSocketClosedEventArgs(this._clients.Count, client.Id);
this.OnClosed(e);
}
protected virtual void OnReceive(ServerSocketReceiveEventArgs e) {
if (this.Receive != null) {
this.Receive(this, e);
}
}
internal void OnReceive2(ServerSocketReceiveEventArgs e) {
this.OnReceive(e);
}
protected virtual void OnError(ServerSocketErrorEventArgs e) {
if (this.Error != null) {
this.Error(this, e);
}
}
protected void OnError(Exception ex) {
ServerSocketErrorEventArgs e = new ServerSocketErrorEventArgs(-1, ex, null);
this.Error(this, e);
}
internal void OnError2(ServerSocketErrorEventArgs e) {
this.OnError(e);
}
#region IDisposable 成员
public void Dispose() {
this.Stop();
}
#endregion
}
public class AcceptSocket : BaseSocket, IDisposable {
private ServerSocket _server;
private TcpClient _tcpClient;
private Thread _thread;
private bool _running;
private int _id;
private int _receives;
private int _errors;
private object _errors_lock = new object();
private object _write_lock = new object();
private Dictionary<int, SyncReceive> _receiveHandlers = new Dictionary<int, SyncReceive>();
private object _receiveHandlers_lock = new object();
private DateTime _lastActive;
internal bool _accepted;
public AcceptSocket(ServerSocket server, TcpClient tcpClient, int id) {
this._running = true;
this._id = id;
this._server = server;
this._tcpClient = tcpClient;
this._lastActive = DateTime.Now;
this._thread = new Thread(delegate() {
while (this._running) {
try {
NetworkStream ns = this._tcpClient.GetStream();
ns.ReadTimeout = 1000 * 20;
if (ns.DataAvailable) {
SocketMessager messager = base.Read(ns);
Server.Protocol.debugAppendLog?.Invoke(messager.ToString());
if (string.Compare(messager.Action, SocketMessager.SYS_TEST_LINK.Action) != 0) {
ServerSocketReceiveEventArgs e = new ServerSocketReceiveEventArgs(this._receives++, messager, this);
SyncReceive receive = null;
if (this._receiveHandlers.TryGetValue(messager.Id, out receive)) {
new Thread(delegate() {
try {
receive.ReceiveHandler(this, e);
} catch (Exception ex) {
this.OnError(ex);
} finally {
receive.Wait.Set();
}
}).Start();
} else {
new Thread(delegate() {
this.OnReceive(e);
}).Start();
}
}
this._lastActive = DateTime.Now;
} else if (_accepted) {
TimeSpan ts = DateTime.Now - _lastActive;
if (ts.TotalSeconds > 5) {
this.Write(SocketMessager.SYS_TEST_LINK);
}
}
if (!ns.DataAvailable) Thread.CurrentThread.Join(100);
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
}
}
this.Close();
this.OnClosed();
});
this._thread.Start();
}
public void Close() {
this._running = false;
this._tcpClient.Close();
this._server.CloseClient(this);
int[] keys = new int[this._receiveHandlers.Count];
try {
this._receiveHandlers.Keys.CopyTo(keys, 0);
} catch {
lock (this._receiveHandlers_lock) {
keys = new int[this._receiveHandlers.Count];
this._receiveHandlers.Keys.CopyTo(keys, 0);
}
}
foreach (int key in keys) {
SyncReceive receiveHandler = null;
if (this._receiveHandlers.TryGetValue(key, out receiveHandler)) {
receiveHandler.Wait.Set();
}
}
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Clear();
}
}
public void Write(SocketMessager messager) {
this.Write(messager, null, TimeSpan.Zero);
}
public void Write(SocketMessager messager, ServerSocketReceiveEventHandler receiveHandler) {
this.Write(messager, receiveHandler, TimeSpan.FromSeconds(20));
}
public void Write(SocketMessager messager, ServerSocketReceiveEventHandler receiveHandler, TimeSpan timeout) {
if (!messager._isChangeId) {
messager.Id = -messager.Id;
}
SyncReceive syncReceive = null;
try {
if (receiveHandler != null) {
syncReceive = new SyncReceive(receiveHandler);
lock (this._receiveHandlers_lock) {
if (!this._receiveHandlers.ContainsKey(messager.Id)) {
this._receiveHandlers.Add(messager.Id, syncReceive);
} else {
this._receiveHandlers[messager.Id] = syncReceive;
}
}
}
lock (_write_lock) {
NetworkStream ns = this._tcpClient.GetStream();
base.Write(ns, messager);
}
this._lastActive = DateTime.Now;
if (syncReceive != null) {
syncReceive.Wait.Reset();
syncReceive.Wait.WaitOne(timeout, false);
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
if (syncReceive != null) {
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
}
}
/// <summary>
/// 拒绝访问,并关闭连接
/// </summary>
public void AccessDenied() {
this._server.AccessDenied(this);
}
protected virtual void OnClosed() {
try {
this._server.OnClosed(this);
} catch (Exception ex) {
this.OnError(ex);
}
}
protected virtual void OnReceive(ServerSocketReceiveEventArgs e) {
try {
this._server.OnReceive2(e);
} catch (Exception ex) {
this.OnError(ex);
}
}
protected virtual void OnError(Exception ex) {
int errors = 0;
lock (this._errors_lock) {
errors = ++this._errors;
}
ServerSocketErrorEventArgs e = new ServerSocketErrorEventArgs(errors, ex, this);
this._server.OnError2(e);
}
public int Id {
get { return _id; }
}
class SyncReceive : IDisposable {
private ServerSocketReceiveEventHandler _receiveHandler;
private ManualResetEvent _wait;
public SyncReceive(ServerSocketReceiveEventHandler onReceive) {
this._receiveHandler = onReceive;
this._wait = new ManualResetEvent(false);
}
public ManualResetEvent Wait {
get { return _wait; }
}
public ServerSocketReceiveEventHandler ReceiveHandler {
get { return _receiveHandler; }
}
#region IDisposable 成员
public void Dispose() {
this._wait.Set();
this._wait.Close();
}
#endregion
}
#region IDisposable 成员
void IDisposable.Dispose() {
this.Close();
}
#endregion
}
public delegate void ServerSocketClosedEventHandler(object sender, ServerSocketClosedEventArgs e);
public delegate void ServerSocketAcceptedEventHandler(object sender, ServerSocketAcceptedEventArgs e);
public delegate void ServerSocketErrorEventHandler(object sender, ServerSocketErrorEventArgs e);
public delegate void ServerSocketReceiveEventHandler(object sender, ServerSocketReceiveEventArgs e);
public class ServerSocketClosedEventArgs : EventArgs {
private int _accepts;
private int _acceptSocketId;
public ServerSocketClosedEventArgs(int accepts, int acceptSocketId) {
this._accepts = accepts;
this._acceptSocketId = acceptSocketId;
}
public int Accepts {
get { return _accepts; }
}
public int AcceptSocketId {
get { return _acceptSocketId; }
}
}
public class ServerSocketAcceptedEventArgs : EventArgs {
private int _accepts;
private AcceptSocket _acceptSocket;
public ServerSocketAcceptedEventArgs(int accepts, AcceptSocket acceptSocket) {
this._accepts = accepts;
this._acceptSocket = acceptSocket;
}
public int Accepts {
get { return _accepts; }
}
public AcceptSocket AcceptSocket {
get { return _acceptSocket; }
}
}
public class ServerSocketErrorEventArgs : EventArgs {
private int _errors;
private Exception _exception;
private AcceptSocket _acceptSocket;
public ServerSocketErrorEventArgs(int errors, Exception exception, AcceptSocket acceptSocket) {
this._errors = errors;
this._exception = exception;
this._acceptSocket = acceptSocket;
}
public int Errors {
get { return _errors; }
}
public Exception Exception {
get { return _exception; }
}
public AcceptSocket AcceptSocket {
get { return _acceptSocket; }
}
}
public class ServerSocketReceiveEventArgs : EventArgs {
private int _receives;
private SocketMessager _messager;
private AcceptSocket _acceptSocket;
public ServerSocketReceiveEventArgs(int receives, SocketMessager messager, AcceptSocket acceptSocket) {
this._receives = receives;
this._messager = messager;
this._acceptSocket = acceptSocket;
}
public int Receives {
get { return _receives; }
}
public SocketMessager Messager {
get { return _messager; }
}
public AcceptSocket AcceptSocket {
get { return _acceptSocket; }
}
} |
2881099/dotnetGen_sqlserver | 55,173 | Server/CodeBuild(Const).cs | using System;
using System.Collections.Generic;
using System.Data;
using System.Text;
using Model;
namespace Server {
internal partial class CodeBuild {
protected class CONST {
public static readonly string corePath = @"src\";
public static readonly string moduleAdminPath = @"src\Module\Admin\";
public static readonly string webHostPath = @"src\WebHost\";
public static readonly string sln =
#region 内容太长已被收起
@"
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 14
VisualStudioVersion = 14.0.25420.1
MinimumVisualStudioVersion = 10.0.40219.1
Project(""{{2150E333-8FDC-42A3-9474-1A3956D46DE8}}"") = ""src"", ""src"", ""{{{1}}}""
EndProject
Project(""{{2150E333-8FDC-42A3-9474-1A3956D46DE8}}"") = ""Solution Items"", ""Solution Items"", ""{{{2}}}""
ProjectSection(SolutionItems) = preProject
build.bat = build.bat
readme.md = readme.md
EndProjectSection
EndProject
Project(""{{2150E333-8FDC-42A3-9474-1A3956D46DE8}}"") = ""Module"", ""Module"", ""{{{3}}}""
EndProject
Project(""{{2150E333-8FDC-42A3-9474-1A3956D46DE8}}"") = ""Test"", ""Test"", ""{{{4}}}""
EndProject
Project(""{{9A19103F-16F7-4668-BE54-9A1E7A4F7556}}"") = ""{0}.db"", ""src\{0}.db\{0}.db.csproj"", ""{{{6}}}""
EndProject
Project(""{{9A19103F-16F7-4668-BE54-9A1E7A4F7556}}"") = ""Infrastructure"", ""src\Infrastructure\Infrastructure.csproj"", ""{{{7}}}""
EndProject
Project(""{{9A19103F-16F7-4668-BE54-9A1E7A4F7556}}"") = ""WebHost"", ""src\WebHost\WebHost.csproj"", ""{{{8}}}""
EndProject
Project(""{{9A19103F-16F7-4668-BE54-9A1E7A4F7556}}"") = ""Admin"", ""src\Module\Admin\Admin.csproj"", ""{{{9}}}""
EndProject
Project(""{{9A19103F-16F7-4668-BE54-9A1E7A4F7556}}"") = ""Test"", ""src\Module\Test\Test.csproj"", ""{{{10}}}""
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{{{6}}}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{{{6}}}.Debug|Any CPU.Build.0 = Debug|Any CPU
{{{6}}}.Release|Any CPU.ActiveCfg = Release|Any CPU
{{{6}}}.Release|Any CPU.Build.0 = Release|Any CPU
{{{7}}}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{{{7}}}.Debug|Any CPU.Build.0 = Debug|Any CPU
{{{7}}}.Release|Any CPU.ActiveCfg = Release|Any CPU
{{{7}}}.Release|Any CPU.Build.0 = Release|Any CPU
{{{8}}}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{{{8}}}.Debug|Any CPU.Build.0 = Debug|Any CPU
{{{8}}}.Release|Any CPU.ActiveCfg = Release|Any CPU
{{{8}}}.Release|Any CPU.Build.0 = Release|Any CPU
{{{9}}}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{{{9}}}.Debug|Any CPU.Build.0 = Debug|Any CPU
{{{9}}}.Release|Any CPU.ActiveCfg = Release|Any CPU
{{{9}}}.Release|Any CPU.Build.0 = Release|Any CPU
{{{10}}}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{{{10}}}.Debug|Any CPU.Build.0 = Debug|Any CPU
{{{10}}}.Release|Any CPU.ActiveCfg = Release|Any CPU
{{{10}}}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{{{3}}} = {{{1}}}
{{{6}}} = {{{1}}}
{{{7}}} = {{{1}}}
{{{8}}} = {{{1}}}
{{{9}}} = {{{3}}}
{{{10}}} = {{{3}}}
EndGlobalSection
EndGlobal
";
#endregion
public static readonly string DAL_DBUtility_SqlHelper_cs =
#region 内容太长已被收起
@"using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using System.Threading.Tasks;
using Microsoft.Extensions.Caching.Distributed;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using System.Data.SqlClient;
namespace {0}.BLL {{
/// <summary>
/// dng.Mssql代理类
/// </summary>
public abstract partial class SqlHelper : {0}.DAL.SqlHelper {{
}}
}}
namespace {0}.DAL {{
/// <summary>
/// dng.Mssql代理类
/// </summary>
public abstract partial class SqlHelper {{
internal static Executer Instance {{ get; private set; }}
public static SqlConnectionPool Pool => Instance.MasterPool;
public static List<SqlConnectionPool> SlavePools => Instance.SlavePools;
/// <summary>
/// 是否跟踪记录SQL执行性能日志
/// </summary>
public static bool IsTracePerformance {{ get => Instance.IsTracePerformance; set => Instance.IsTracePerformance = value; }}
public static void Initialization(IDistributedCache cache, IConfiguration cacheStrategy, string masterConnectionString, string[] slaveConnectionString, ILogger log) {{
CacheStrategy = cacheStrategy;
Instance = new Executer(cache, masterConnectionString, slaveConnectionString, log);
}}
public static string Addslashes(string filter, params object[] parms) {{ return Executer.Addslashes(filter, parms); }}
/// <summary>
/// 若使用读写分离,查询【从库】条件cmdText.StartsWith(""SELECT ""),否则查询【主库】
/// </summary>
/// <param name=""readerHander""></param>
/// <param name=""cmdText""></param>
/// <param name=""cmdParms""></param>
public static void ExecuteReader(Action<SqlDataReader> readerHander, string cmdText, params SqlParameter[] cmdParms) => Instance.ExecuteReader(readerHander, CommandType.Text, cmdText, cmdParms);
/// <summary>
/// 若使用读写分离,查询【从库】条件cmdText.StartsWith(""SELECT ""),否则查询【主库】
/// </summary>
/// <param name=""cmdText""></param>
/// <param name=""cmdParms""></param>
public static object[][] ExecuteArray(string cmdText, params SqlParameter[] cmdParms) => Instance.ExecuteArray(CommandType.Text, cmdText, cmdParms);
/// <summary>
/// 在【主库】执行
/// </summary>
/// <param name=""cmdText""></param>
/// <param name=""cmdParms""></param>
public static int ExecuteNonQuery(string cmdText, params SqlParameter[] cmdParms) => Instance.ExecuteNonQuery(CommandType.Text, cmdText, cmdParms);
/// <summary>
/// 在【主库】执行
/// </summary>
/// <param name=""cmdText""></param>
/// <param name=""cmdParms""></param>
public static object ExecuteScalar(string cmdText, params SqlParameter[] cmdParms) => Instance.ExecuteScalar(CommandType.Text, cmdText, cmdParms);
/// <summary>
/// 若使用读写分离,查询【从库】条件cmdText.StartsWith(""SELECT ""),否则查询【主库】
/// </summary>
/// <param name=""readerHander""></param>
/// <param name=""cmdText""></param>
/// <param name=""cmdParms""></param>
public static Task ExecuteReaderAsync(Func<SqlDataReader, Task> readerHander, string cmdText, params SqlParameter[] cmdParms) => Instance.ExecuteReaderAsync(readerHander, CommandType.Text, cmdText, cmdParms);
/// <summary>
/// 若使用读写分离,查询【从库】条件cmdText.StartsWith(""SELECT ""),否则查询【主库】
/// </summary>
/// <param name=""cmdText""></param>
/// <param name=""cmdParms""></param>
public static Task<object[][]> ExecuteArrayAsync(string cmdText, params SqlParameter[] cmdParms) => Instance.ExecuteArrayAsync(CommandType.Text, cmdText, cmdParms);
/// <summary>
/// 在【主库】执行
/// </summary>
/// <param name=""cmdText""></param>
/// <param name=""cmdParms""></param>
public static Task<int> ExecuteNonQueryAsync(string cmdText, params SqlParameter[] cmdParms) => Instance.ExecuteNonQueryAsync(CommandType.Text, cmdText, cmdParms);
/// <summary>
/// 在【主库】执行
/// </summary>
/// <param name=""cmdText""></param>
/// <param name=""cmdParms""></param>
public static Task<object> ExecuteScalarAsync(string cmdText, params SqlParameter[] cmdParms) => Instance.ExecuteScalarAsync(CommandType.Text, cmdText, cmdParms);
/// <summary>
/// 开启事务(不支持异步),60秒未执行完将自动提交
/// </summary>
/// <param name=""handler"">事务体 () => {{}}</param>
public static void Transaction(Action handler) => Instance.Transaction(handler);
/// <summary>
/// 开启事务(不支持异步)
/// </summary>
/// <param name=""handler"">事务体 () => {{}}</param>
/// <param name=""timeout"">超时,未执行完将自动提交</param>
public static void Transaction(Action handler, TimeSpan timeout) => Instance.Transaction(handler, timeout);
/// <summary>
/// 生成类似Mongodb的ObjectId有序、不重复Guid
/// </summary>
/// <returns></returns>
public static Guid NewMongodbId() => Executer.NewMongodbId();
/// <summary>
/// 循环或批量删除缓存键,项目启动时检测:Cache.Remove(""key1|key2"") 若成功删除 key1、key2,说明支持批量删除
/// </summary>
/// <param name=""keys"">缓存键[数组]</param>
public static void CacheRemove(params string[] keys) => Instance.CacheRemove(keys);
/// <summary>
/// 循环或批量删除缓存键,项目启动时检测:Cache.Remove(""key1|key2"") 若成功删除 key1、key2,说明支持批量删除
/// </summary>
/// <param name=""keys"">缓存键[数组]</param>
async static public Task CacheRemoveAsync(params string[] keys) => await Instance.CacheRemoveAsync(keys);
public static IDistributedCache Cache => Instance.Cache;
internal static IConfiguration CacheStrategy {{ get; private set; }}
/// <summary>
/// 缓存壳
/// </summary>
/// <typeparam name=""T"">缓存类型</typeparam>
/// <param name=""key"">缓存键</param>
/// <param name=""timeoutSeconds"">缓存秒数</param>
/// <param name=""getData"">获取源数据的函数</param>
/// <param name=""serialize"">序列化函数</param>
/// <param name=""deserialize"">反序列化函数</param>
/// <returns></returns>
public static T CacheShell<T>(string key, int timeoutSeconds, Func<T> getData, Func<T, string> serialize = null, Func<string, T> deserialize = null) =>
Instance.CacheShell(key, timeoutSeconds, getData, serialize, deserialize);
/// <summary>
/// 缓存壳(哈希表)
/// </summary>
/// <typeparam name=""T"">缓存类型</typeparam>
/// <param name=""key"">缓存键</param>
/// <param name=""field"">字段</param>
/// <param name=""timeoutSeconds"">缓存秒数</param>
/// <param name=""getData"">获取源数据的函数</param>
/// <param name=""serialize"">序列化函数</param>
/// <param name=""deserialize"">反序列化函数</param>
/// <returns></returns>
public static T CacheShell<T>(string key, string field, int timeoutSeconds, Func<T> getData, Func<(T, long), string> serialize = null, Func<string, (T, long)> deserialize = null) =>
Instance.CacheShell(key, field, timeoutSeconds, getData, serialize, deserialize);
/// <summary>
/// 缓存壳
/// </summary>
/// <typeparam name=""T"">缓存类型</typeparam>
/// <param name=""key"">缓存键</param>
/// <param name=""timeoutSeconds"">缓存秒数</param>
/// <param name=""getDataAsync"">获取源数据的函数</param>
/// <param name=""serialize"">序列化函数</param>
/// <param name=""deserialize"">反序列化函数</param>
/// <returns></returns>
async public static Task<T> CacheShellAsync<T>(string key, int timeoutSeconds, Func<Task<T>> getDataAsync, Func<T, string> serialize = null, Func<string, T> deserialize = null) =>
await Instance.CacheShellAsync(key, timeoutSeconds, getDataAsync, serialize, deserialize);
/// <summary>
/// 缓存壳(哈希表)
/// </summary>
/// <typeparam name=""T"">缓存类型</typeparam>
/// <param name=""key"">缓存键</param>
/// <param name=""field"">字段</param>
/// <param name=""timeoutSeconds"">缓存秒数</param>
/// <param name=""getDataAsync"">获取源数据的函数</param>
/// <param name=""serialize"">序列化函数</param>
/// <param name=""deserialize"">反序列化函数</param>
/// <returns></returns>
async public static Task<T> CacheShellAsync<T>(string key, string field, int timeoutSeconds, Func<Task<T>> getDataAsync, Func<(T, long), string> serialize = null, Func<string, (T, long)> deserialize = null) =>
await Instance.CacheShellAsync(key, field, timeoutSeconds, getDataAsync, serialize, deserialize);
}}
}}";
#endregion
public static readonly string BLL_Build_ItemCache_cs =
#region 内容太长已被收起
@"using System;
using System.Collections.Generic;
namespace {0}.BLL {{
public partial class ItemCache {{
private static Dictionary<string, long> _dic1 = new Dictionary<string, long>();
private static Dictionary<long, Dictionary<string, string>> _dic2 = new Dictionary<long, Dictionary<string, string>>();
private static LinkedList<long> _linked = new LinkedList<long>();
private static object _dic1_lock = new object();
private static object _dic2_lock = new object();
private static object _linked_lock = new object();
public static void Clear() {{
lock(_dic1_lock) {{
_dic1.Clear();
}}
lock(_dic2_lock) {{
_dic2.Clear();
}}
lock(_linked_lock) {{
_linked.Clear();
}}
}}
public static void Remove(string key) {{
if (string.IsNullOrEmpty(key)) return;
long time;
if (_dic1.TryGetValue(key, out time) == false) return;
lock (_dic1_lock) {{
_dic1.Remove(key);
}}
if (_dic2.ContainsKey(time)) {{
lock (_dic2_lock) {{
_dic2.Remove(time);
}}
}}
lock (_linked_lock) {{
_linked.Remove(time);
}}
}}
public static string Get(string key) {{
if (string.IsNullOrEmpty(key)) return null;
long time;
if (_dic1.TryGetValue(key, out time) == false) return null;
Dictionary<string, string> dic;
if (_dic2.TryGetValue(time, out dic) == false) {{
if (_dic1.ContainsKey(key)) {{
lock (_dic1_lock) {{
_dic1.Remove(key);
}}
}}
return null;
}}
if (DateTime.Now.Subtract(new DateTime(2016, 5, 1)).TotalSeconds > time) {{
if (_dic1.ContainsKey(key)) {{
lock (_dic1_lock) {{
_dic1.Remove(key);
}}
}}
if (_dic2.ContainsKey(time)) {{
lock (_dic2_lock) {{
_dic2.Remove(time);
}}
}}
lock (_linked_lock) {{
_linked.Remove(time);
}}
return null;
}}
string ret;
if (dic.TryGetValue(key, out ret) == false) return null;
return ret;
}}
public static void Set(string key, string value, int expire) {{
if (string.IsNullOrEmpty(key) || expire <= 0) return;
long time_cur = (long)DateTime.Now.Subtract(new DateTime(2016, 5, 1)).TotalSeconds;
long time = time_cur + expire;
long time2;
if (_dic1.TryGetValue(key, out time2) == false) {{
lock (_dic1_lock) {{
if (_dic1.TryGetValue(key, out time2) == false) {{
_dic1.Add(key, time2 = time);
}}
}}
}}
if (time2 != time) {{
lock (_dic1_lock) {{
_dic1[key] = time;
}}
lock (_dic2_lock) {{
_dic2.Remove(time2);
}}
}}
Dictionary<string, string> dic;
bool isNew = false;
if (_dic2.TryGetValue(time, out dic) == false) {{
lock (_dic2_lock) {{
if (_dic2.TryGetValue(time, out dic) == false) {{
_dic2.Add(time, dic = new Dictionary<string, string>());
isNew = true;
}}
if (dic.ContainsKey(key) == false) dic.Add(key, value);
else dic[key] = value;
}}
}} else {{
lock (_dic2_lock) {{
if (dic.ContainsKey(key) == false) dic.Add(key, value);
else dic[key] = value;
}}
}}
if (isNew == true) {{
lock (_linked_lock) {{
if (_linked.Count == 0) {{
_linked.AddFirst(time);
}} else {{
LinkedListNode<long> node = _linked.First;
while (node != null) {{
if (node.Value < time_cur) {{
_linked.Remove(node);
Dictionary<string, string> dic_del;
if (_dic2.TryGetValue(node.Value, out dic_del)) {{
lock (_dic2_lock) {{
_dic2.Remove(node.Value);
foreach (KeyValuePair<string, string> dic_del_in in dic_del) {{
if (_dic1.ContainsKey(dic_del_in.Key)) {{
lock (_dic1_lock) {{
_dic1.Remove(dic_del_in.Key);
}}
}}
}}
}}
}}
node = _linked.First;
}} else break;
}}
if (node == null)
_linked.AddFirst(time);
else if (node != null && _linked.Last.Value < time)
_linked.AddLast(time);
else {{
while (node != null && node.Value < time) node = node.Next;
if (node != null && node.Value != time) {{
_linked.AddBefore(node, time);
}}
}}
}}
}}
}}
}}
}}
}}";
#endregion
public static readonly string Model_Build_ExtensionMethods_cs =
#region 内容太长已被收起
@"using System;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Newtonsoft.Json;
using {0}.Model;
public static partial class {0}ExtensionMethods {{
{1}
public static string GetJson(IEnumerable items) {{
StringBuilder sb = new StringBuilder();
sb.Append(""["");
IEnumerator ie = items.GetEnumerator();
if (ie.MoveNext()) {{
while (true) {{
sb.Append(string.Concat(ie.Current));
if (ie.MoveNext()) sb.Append("","");
else break;
}}
}}
sb.Append(""]"");
return sb.ToString();
}}
public static IDictionary[] GetBson(IEnumerable items, Delegate func = null) {{
List<IDictionary> ret = new List<IDictionary>();
IEnumerator ie = items.GetEnumerator();
while (ie.MoveNext()) {{
if (ie.Current == null) ret.Add(null);
else if (func == null) ret.Add(ie.Current.GetType().GetMethod(""ToBson"").Invoke(ie.Current, new object[] {{ false }}) as IDictionary);
else {{
object obj = func.GetMethodInfo().Invoke(func.Target, new object[] {{ ie.Current }});
if (obj is IDictionary) ret.Add(obj as IDictionary);
else {{
Hashtable ht = new Hashtable();
PropertyInfo[] pis = obj.GetType().GetProperties();
foreach (PropertyInfo pi in pis) ht[pi.Name] = pi.GetValue(obj);
ret.Add(ht);
}}
}}
}}
return ret.ToArray();
}}
}}";
#endregion
public static readonly string Db_csproj =
#region 内容太长已被收起
@"<Project Sdk=""Microsoft.NET.Sdk"">
<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
<AssemblyName>{0}.db</AssemblyName>
</PropertyGroup>
<ItemGroup>
<PackageReference Include=""dng.Mssql"" Version=""1.3.9"" />
<PackageReference Include=""CSRedisCore"" Version=""3.0.46"" />
</ItemGroup>
</Project>
";
#endregion
public static readonly string Infrastructure_csproj =
#region 内容太长已被收起
@"<Project Sdk=""Microsoft.NET.Sdk"">
<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
<WarningLevel>3</WarningLevel>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include=""..\{0}.db\{0}.db.csproj"" />
</ItemGroup>
<ItemGroup>
<PackageReference Include=""Caching.CSRedis"" Version=""3.0.46"" />
<PackageReference Include=""Microsoft.AspNetCore.Mvc"" Version=""2.1.1"" />
<PackageReference Include=""Microsoft.AspNetCore.Session"" Version=""2.1.1"" />
<PackageReference Include=""Microsoft.AspNetCore.Diagnostics"" Version=""2.1.1"" />
<PackageReference Include=""Microsoft.Extensions.Configuration.EnvironmentVariables"" Version=""2.1.1"" />
<PackageReference Include=""Microsoft.Extensions.Configuration.FileExtensions"" Version=""2.1.1"" />
<PackageReference Include=""Microsoft.Extensions.Configuration.Json"" Version=""2.1.1"" />
<PackageReference Include=""NLog.Extensions.Logging"" Version=""1.4.0"" />
<PackageReference Include=""NLog.Web.AspNetCore"" Version=""4.8.0"" />
<PackageReference Include=""Swashbuckle.AspNetCore"" Version=""4.0.1"" />
<PackageReference Include=""Swashbuckle.AspNetCore.Annotations"" Version=""4.0.1"" />
<PackageReference Include=""System.Text.Encoding.CodePages"" Version=""4.5.1"" />
</ItemGroup>
</Project>
";
#endregion
public static readonly string WebHost_Extensions_StarupExtensions_cs =
#region 内容太长已被收起
@"using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc.Razor;
using Microsoft.CodeAnalysis;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.FileProviders;
using Microsoft.Extensions.Logging;
using Microsoft.Net.Http.Headers;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.Loader;
public static class StarupExtensions {{
public static ConfigurationBuilder LoadInstalledModules(this ConfigurationBuilder build, IList<ModuleInfo> modules, IHostingEnvironment env) {{
var moduleRootFolder = new DirectoryInfo(Path.Combine(env.ContentRootPath, ""Module""));
var moduleFolders = moduleRootFolder.GetDirectories();
foreach (var moduleFolder in moduleFolders) {{
Assembly assembly = null;
IModuleInitializer moduleInitializer = null;
try {{
assembly = AssemblyLoadContext.Default.LoadFromAssemblyPath(Path.Combine(moduleFolder.FullName, moduleFolder.Name + "".dll""));
var moduleInitializerType = assembly.GetTypes().FirstOrDefault(x => typeof(IModuleInitializer).IsAssignableFrom(x));
if ((moduleInitializerType != null) && (moduleInitializerType != typeof(IModuleInitializer))) {{
moduleInitializer = (IModuleInitializer)Activator.CreateInstance(moduleInitializerType);
}}
}} catch (FileLoadException) {{
throw;
}}
if (assembly.FullName.Contains(moduleFolder.Name))
modules.Add(new ModuleInfo {{
Name = moduleFolder.Name,
Assembly = assembly,
Initializer = moduleInitializer,
Path = moduleFolder.FullName
}});
}}
return build;
}}
public static ConfigurationBuilder AddCustomizedJsonFile(this ConfigurationBuilder build, IList<ModuleInfo> modules, IHostingEnvironment env, string productPath) {{
build.SetBasePath(env.ContentRootPath).AddJsonFile(""appsettings.json"", true, true);
foreach (var module in modules) {{
var jsonpath = $""Module/{{module.Name}}/appsettings.json"";
if (File.Exists(Path.Combine(env.ContentRootPath, jsonpath)))
build.AddJsonFile(jsonpath, true, true);
}}
if (env.IsProduction()) {{
build.AddJsonFile(Path.Combine(productPath, ""appsettings.json""), true, true);
foreach (var module in modules) {{
var jsonpath = Path.Combine(productPath, $""Module_{{module.Name}}_appsettings.json"");
if (File.Exists(Path.Combine(env.ContentRootPath, jsonpath)))
build.AddJsonFile(jsonpath, true, true);
}}
}}
return build;
}}
public static IServiceCollection AddCustomizedMvc(this IServiceCollection services, IList<ModuleInfo> modules) {{
var mvcBuilder = services.AddMvc().AddJsonOptions(a => {{
a.SerializerSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter());
a.SerializerSettings.DateFormatHandling = DateFormatHandling.IsoDateFormat;
a.SerializerSettings.DateTimeZoneHandling = DateTimeZoneHandling.Utc;
}})
.AddRazorOptions(o => {{
foreach (var module in modules) {{
var a = MetadataReference.CreateFromFile(module.Assembly.Location);
o.AdditionalCompilationReferences.Add(a);
}}
}})
.AddViewLocalization()
.AddDataAnnotationsLocalization();
foreach (var module in modules) {{
mvcBuilder.AddApplicationPart(module.Assembly);
}}
services.Configure<RazorViewEngineOptions>(options => {{ options.ViewLocationExpanders.Add(new ModuleViewLocationExpander()); }});
return services;
}}
public static IApplicationBuilder UseCustomizedStaticFiles(this IApplicationBuilder app, IList<ModuleInfo> modules) {{
app.UseDefaultFiles();
app.UseStaticFiles(new StaticFileOptions() {{
OnPrepareResponse = (context) => {{
var headers = context.Context.Response.GetTypedHeaders();
headers.CacheControl = new CacheControlHeaderValue() {{
Public = true,
MaxAge = TimeSpan.FromDays(60)
}};
}}
}});
return app;
}}
}}
";
#endregion
public static readonly string WebHost_Extensions_SwaggerExtensions_cs =
#region 内容太长已被收起
@"using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.DependencyInjection;
using Swashbuckle.AspNetCore.SwaggerGen;
using System.IO;
using System.Linq;
namespace Swashbuckle.AspNetCore.Swagger {{
public class FormDataOperationFilter : IOperationFilter {{
public void Apply(Operation operation, OperationFilterContext context) {{
if (context.ApiDescription.TryGetMethodInfo(out var method) == false) return;
var actattrs = method.GetCustomAttributes(false);
if (actattrs.OfType<HttpPostAttribute>().Any() ||
actattrs.OfType<HttpPutAttribute>().Any())
if (operation.Consumes.Count == 0)
operation.Consumes.Add(""application/x-www-form-urlencoded"");
}}
}}
public static class SwashbuckleSwaggerExtensions {{
public static IServiceCollection AddCustomizedSwaggerGen(this IServiceCollection services) {{
services.AddSwaggerGen(options => {{
foreach (var doc in _docs) options.SwaggerDoc(doc, new Info {{ Version = doc }});
options.DocInclusionPredicate((docName, apiDesc) => {{
if (apiDesc.TryGetMethodInfo(out var method) == false) return false;
var versions = method.DeclaringType.GetCustomAttributes(false)
.OfType<ApiExplorerSettingsAttribute>()
.Select(attr => attr.GroupName);
if (docName == ""未分类"" && versions.Count() == 0) return true;
return versions.Any(v => v == docName);
}});
options.IgnoreObsoleteActions();
//options.IgnoreObsoleteControllers(); // 类、方法标记 [Obsolete],可以阻止【Swagger文档】生成
options.EnableAnnotations();
options.DescribeAllEnumsAsStrings();
options.CustomSchemaIds(a => a.FullName);
options.OperationFilter<FormDataOperationFilter>();
string root = Path.Combine(Directory.GetCurrentDirectory(), ""Module"");
string xmlFile = string.Empty;
string[] dirs = Directory.GetDirectories(root);
foreach (var d in dirs) {{
xmlFile = Path.Combine(d, $""{{new DirectoryInfo(d).Name}}.xml"");
if (File.Exists(xmlFile))
options.IncludeXmlComments(xmlFile); // 使用前需开启项目注释 xmldoc
}}
var InfrastructureXml = Directory.GetFiles(Directory.GetCurrentDirectory(), ""Infrastructure.xml"", SearchOption.AllDirectories);
if (InfrastructureXml.Any())
options.IncludeXmlComments(InfrastructureXml[0]);
}});
return services;
}}
static string[] _docs = new[] {{ ""未分类"", ""公共"", ""cms"", ""后台管理"" }};
public static IApplicationBuilder UseCustomizedSwagger(this IApplicationBuilder app, IHostingEnvironment env) {{
return app.UseSwagger().UseSwaggerUI(options => {{
foreach (var doc in _docs) options.SwaggerEndpoint($""/swagger/{{doc}}/swagger.json"", doc);
}});
}}
}}
}}
";
#endregion
public static readonly string WebHost_nlog_config =
#region 内容太长已被收起
@"<?xml version=""1.0"" encoding=""utf-8"" ?>
<nlog xmlns=""http://www.nlog-project.org/schemas/NLog.xsd"" xmlns:xsi=""http://www.w3.org/2001/XMLSchema-instance""
autoReload=""true""
internalLogLevel=""Warn""
internalLogFile=""internal-nlog.txt"">
<!-- Load the ASP.NET Core plugin -->
<extensions>
<add assembly=""NLog.Web.AspNetCore""/>
</extensions>
<!-- Layout: https://github.com/NLog/NLog/wiki/Layout%20Renderers -->
<targets>
<target xsi:type=""File"" name=""allfile"" fileName=""../nlog/all-${{shortdate}}.log""
layout=""${{longdate}}|${{logger}}|${{uppercase:${{level}}}}|${{message}} ${{exception}}|${{aspnet-Request-Url}}"" />
<target xsi:type=""File"" name=""ownFile-web"" fileName=""../nlog/own-${{shortdate}}.log""
layout=""${{longdate}}|${{logger}}|${{uppercase:${{level}}}}| ${{message}} ${{exception}}|${{aspnet-Request-Url}}"" />
<target xsi:type=""File"" name=""SQLExecuter"" fileName=""../nlog/SQLExecuter-${{shortdate}}.log""
layout=""${{longdate}} ${{message}} ${{exception}}|${{aspnet-Request-Url}} ${{document-uri}} "" />
<target xsi:type=""Null"" name=""blackhole"" />
</targets>
<rules>
<logger name=""*"" minlevel=""Error"" writeTo=""allfile"" />
<logger name=""Microsoft.*"" minlevel=""Error"" writeTo=""blackhole"" final=""true"" />
<logger name=""*"" minlevel=""Error"" writeTo=""ownFile-web"" />
<logger name=""{0}_DAL_sqlhelper"" minlevel=""Warn"" writeTo=""SQLExecuter"" />
</rules>
</nlog>
";
#endregion
public static readonly string WebHost_appsettings_json =
#region 内容太长已被收起
@"{{
""Logging"": {{
""IncludeScopes"": false,
""LogLevel"": {{
""Default"": ""Debug"",
""System"": ""Information"",
""Microsoft"": ""Information""
}}
}},
""ConnectionStrings"": {{
""{0}_mssql"": ""{1};Pooling=true;Max Pool Size=100"",
""redis1"": ""127.0.0.1:6379,password=,defaultDatabase=0,poolsize=50,ssl=false,writeBuffer=20480,prefix={0}"",
""redis2"": ""127.0.0.1:6379,password=,defaultDatabase=0,poolsize=50,ssl=false,writeBuffer=20480,prefix={0}""
}},
""{0}_BLL_ITEM_CACHE"": {{
""Timeout"": 180
}}
}}
";
#endregion
public static readonly string WebHost_Program_cs =
#region 内容太长已被收起
@"using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using System.IO;
namespace {0}.WebHost {{
public class Program {{
public static void Main(string[] args) {{
var config = new ConfigurationBuilder()
.AddCommandLine(args)
.Build();
//dotnet run --urls=http://0.0.0.0:5000
var host = new WebHostBuilder()
.UseConfiguration(config)
.UseKestrel()
.UseContentRoot(Directory.GetCurrentDirectory())
.UseIISIntegration()
.UseStartup<Startup>()
.Build();
host.Run();
}}
}}
}}
";
#endregion
public static readonly string WebHost_Startup_cs =
#region 内容太长已被收起
@"using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc.Razor;
using Microsoft.Extensions.Caching.Distributed;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using NLog.Extensions.Logging;
using Swashbuckle.AspNetCore.Swagger;
using System;
using System.Collections.Generic;
using System.Text;
namespace {0}.WebHost {{
public class Startup {{
public Startup(IHostingEnvironment env) {{
var builder = new ConfigurationBuilder()
.LoadInstalledModules(Modules, env)
.AddCustomizedJsonFile(Modules, env, ""/var/webos/{0}/"");
this.Configuration = builder.AddEnvironmentVariables().Build();
this.env = env;
Newtonsoft.Json.JsonConvert.DefaultSettings = () => {{
var st = new Newtonsoft.Json.JsonSerializerSettings();
st.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter());
st.DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat;
st.DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.RoundtripKind;
return st;
}};
//去掉以下注释可开启 RedisHelper 静态类
//var csredis = new CSRedis.CSRedisClient(Configuration[""ConnectionStrings:redis1""]); //单redis节点模式
//RedisHelper.Initialization(csredis);
}}
public static List<ModuleInfo> Modules = new List<ModuleInfo>();
public IConfiguration Configuration {{ get; }}
public IHostingEnvironment env {{ get; }}
public void ConfigureServices(IServiceCollection services) {{
//下面这行代码依赖redis-server,注释后系统将以memory作为缓存存储的介质
//services.AddSingleton<IDistributedCache>(new Microsoft.Extensions.Caching.Redis.CSRedisCache(RedisHelper.Instance));
services.AddSingleton<IConfiguration>(Configuration);
services.AddSingleton<IHostingEnvironment>(env);
services.AddScoped<CustomExceptionFilter>();
services.AddSession(a => {{
a.IdleTimeout = TimeSpan.FromMinutes(30);
a.Cookie.Name = ""Session_{0}"";
}});
services.AddCors(options => options.AddPolicy(""cors_all"", builder => builder.AllowAnyHeader().AllowAnyMethod().AllowAnyOrigin()));
services.AddCustomizedMvc(Modules);
Modules.ForEach(module => module.Initializer?.ConfigureServices(services, env));
if (env.IsDevelopment())
services.AddCustomizedSwaggerGen();
}}
public void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory, IApplicationLifetime lifetime) {{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
Console.OutputEncoding = Encoding.GetEncoding(""GB2312"");
Console.InputEncoding = Encoding.GetEncoding(""GB2312"");
loggerFactory.AddConsole(Configuration.GetSection(""Logging""));
loggerFactory.AddNLog().AddDebug();
NLog.LogManager.LoadConfiguration(""nlog.config"");
if (env.IsDevelopment())
app.UseDeveloperExceptionPage();
{0}.BLL.SqlHelper.Initialization(app.ApplicationServices.GetService<IDistributedCache>(), Configuration.GetSection(""{0}_BLL_ITEM_CACHE""),
Configuration[""ConnectionStrings:{0}_mssql""], /* 此参数可以配置【从数据库】 */ null, loggerFactory.CreateLogger(""{0}_DAL_sqlhelper""));
app.UseSession();
app.UseCors(""cors_all"");
app.UseMvc();
app.UseCustomizedStaticFiles(Modules);
Modules.ForEach(module => module.Initializer?.Configure(app, env, loggerFactory, lifetime));
if (env.IsDevelopment())
app.UseCustomizedSwagger(env);
}}
}}
}}
";
#endregion
public static readonly string WebHost_csproj =
#region 内容太长已被收起
@"<Project Sdk=""Microsoft.NET.Sdk.Web"">
<PropertyGroup>
<TargetFramework>netcoreapp2.1</TargetFramework>
<WarningLevel>3</WarningLevel>
<ServerGarbageCollection>false</ServerGarbageCollection>
<MvcRazorCompileOnPublish>false</MvcRazorCompileOnPublish>
</PropertyGroup>
<ItemGroup>
<Folder Include=""wwwroot\"" />
<Compile Remove=""Module\**"" />
<Compile Remove=""wwwroot\module\**"" />
<Content Remove=""Module\**"" />
<Content Remove=""wwwroot\module\**"" />
<EmbeddedResource Remove=""Module\**"" />
<EmbeddedResource Remove=""wwwroot\module\**"" />
<None Remove=""Module\**"" />
<None Remove=""wwwroot\module\**"" />
<Content Update=""nlog.config"">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<ProjectReference Include=""..\Infrastructure\Infrastructure.csproj"" />
</ItemGroup>
<ItemGroup>
<PackageReference Include=""Microsoft.AspNetCore.App"" />
</ItemGroup>
<Target Name=""PostBuild"" AfterTargets=""PostBuildEvent"">
<Exec Command=""gulp --gulpfile gulpfile.js copy-module"" />
</Target>
</Project>
";
#endregion
public static readonly string Module_Admin_Controllers_SysController =
#region 内容太长已被收起
@"using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
using {0}.BLL;
using {0}.Model;
namespace {0}.Module.Admin.Controllers {{
[Route(""[controller]"")]
[Obsolete]
public class SysController : Controller {{
[HttpGet(@""connection"")]
public ContentResult Get_connection() {{
var sb = new StringBuilder();
var pools = new List<System.Data.SqlClient.SqlConnectionPool>();
pools.Add(SqlHelper.Pool);
pools.AddRange(SqlHelper.SlavePools);
for (var a = 0; a < pools.Count; a++) {{
var pool = pools[a];
sb.AppendLine($@""【{{pool.Policy.Name}}】 状态:{{(pool.IsAvailable ? ""正常"" : $""[{{pool.UnavailableTime}}] {{pool.UnavailableException.Message}}"")}}
-------------------------------------------------------------------------------------------------------
{{pool.StatisticsFullily}}
"");
}}
return new ContentResult {{ ContentType = ""text/plan;charset=utf-8"", Content = sb.ToString() }};
}}
[HttpGet(@""connection/redis"")]
public ContentResult Get_connection_redis() {{
var sb = new StringBuilder();
foreach(var pool in RedisHelper.Nodes.Values) {{
sb.AppendLine($@""【{{pool.Policy.Name}}】 状态:{{(pool.IsAvailable ? ""正常"" : $""[{{pool.UnavailableTime}}] {{pool.UnavailableException.Message}}"")}}
-------------------------------------------------------------------------------------------------------
Slots:{{RedisHelper.Instance.SlotCache.Count}}/16384, {{pool.StatisticsFullily}}
"");
}}
return new ContentResult {{ ContentType = ""text/plan;charset=utf-8"", Content = sb.ToString() }};
}}
[HttpGet(@""init_sysdir"")]
public APIReturn Get_init_sysdir() {{
/*
if (Sysdir.SelectByParent_id(null).Count() > 0)
return new APIReturn(-33, ""本系统已经初始化过,页面没经过任何操作退出。"");
SysdirInfo dir1, dir2, dir3;
dir1 = Sysdir.Insert(null, DateTime.Now, ""运营管理"", 1, null);{1}
*/
return new APIReturn(0, ""管理目录已初始化完成。"");
}}
}}
}}
";
#endregion
public static readonly string Module_Admin_Controllers_LoginController =
#region 内容太长已被收起
@"using System;
using System.Collections.Generic;
using System.Collections;
using System.Linq;
using System.Threading.Tasks;
using System.ComponentModel.DataAnnotations;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using {0}.BLL;
using {0}.Model;
namespace {0}.Module.Admin.Controllers {{
[Route("""")]
public class HomeController {{
[HttpGet]
public RedirectResult Index() {{
return new RedirectResult(""/module/Admin"");
}}
}}
[Route(""[controller]"")]
[Obsolete]
public class LoginController : BaseController {{
public LoginController(ILogger<LoginController> logger) : base(logger) {{ }}
[HttpGet, 匿名访问]
public ViewResult Index() {{
return View();
}}
[HttpPost, 匿名访问]
public APIReturn Post(LoginModel data) {{
HttpContext.Session.SetString(""login.username"", data.Username);
return APIReturn.成功;
}}
public class LoginModel {{
[FromForm, Required(ErrorMessage = ""请输入登陆名"")]
public string Username {{ get; set; }}
[FromForm, Required(ErrorMessage = ""请输入密码"")]
public string Password {{ get; set; }}
}}
}}
}}
";
#endregion
public static readonly string Module_Admin_Views_Login_Index_cshtml =
#region 内容太长已被收起
@"@{{
Layout = """";
}}
<!DOCTYPE html>
<html>
<head>
<meta charset=""utf-8"">
<meta http-equiv=""X-UA-Compatible"" content=""IE=edge"">
<title>{0}后台管理中心 - 登陆</title>
<!-- Tell the browser to be responsive to screen width -->
<meta content=""width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no"" name=""viewport"">
<link rel=""stylesheet"" href=""/module/admin/htm/bootstrap/css/bootstrap.min.css"">
<link rel=""stylesheet"" href=""/module/admin/htm/plugins/font-awesome/css/font-awesome.min.css"" />
<link rel=""stylesheet"" href=""/module/admin/htm/css/system.css"">
<script type=""text/javascript"" src=""/module/admin/htm/js/jQuery-2.1.4.min.js""></script>
<script type=""text/javascript"" src=""/module/admin/htm/js/lib.js""></script>
<!--[if lt IE 9]>
<script type='text/javascript' src='/module/admin/htm/plugins/html5shiv/html5shiv.min.js'></script>
<script type='text/javascript' src='/module/admin/htm/plugins/respond/respond.min.js'></script>
<![endif]-->
<style type=""text/css"">
.login-box-body--has-errors{{animation:shake .5s .25s 1;-webkit-animation:shake .5s .25s 1}}
@@keyframes shake{{0%,100%{{transform:translateX(0)}}20%,60%{{transform:translateX(-10px)}}40%,80%{{transform:translateX(10px)}}}}
@@-webkit-keyframes shake{{0%,100%{{-webkit-transform:translateX(0)}}20%,60%{{-webkit-transform:translateX(-10px)}}40%,80%{{-webkit-transform:translateX(10px)}}}}
</style>
</head>
<body class=""hold-transition login-page"">
<div class=""login-box"">
<div class=""login-logo"">
<a href=""/module/admin/""><b>{0}</b>后台管理中心</a>
</div>
<div id=""error_msg"" style=""display:none;"">
<div class=""alert alert-warning alert-dismissible"">
<button type=""button"" class=""close"" data-dismiss=""alert"" aria-hidden=""true"">×</button>
<h4><i class=""icon fa fa-warning""></i>警告!</h4>
{{0}}
</div>
</div>
<!-- /.login-logo -->
<div class=""login-box-body"">
<p class=""login-box-msg""></p>
<iframe name=""iframe_form_login"" hidden></iframe>
<form id=""form_login"" method=""post"" target=""iframe_form_login"">
@Html.AntiForgeryToken()
<input type=""hidden"" name=""__callback"" value=""login_callback"" />
<div class=""form-group has-feedback"">
<input name=""username"" type=""text"" class=""form-control"" placeholder=""Username"">
<span class=""glyphicon glyphicon-envelope form-control-feedback""></span>
</div>
<div class=""form-group has-feedback"">
<input name=""password"" type=""password"" class=""form-control"" placeholder=""Password"">
<span class=""glyphicon glyphicon-lock form-control-feedback""></span>
</div>
<div class=""row"">
<div class=""col-xs-8"">
</div>
<!-- /.col -->
<div class=""col-xs-4"">
<button type=""submit"" class=""btn btn-primary btn-block btn-flat"">登 陆</button>
</div>
<!-- /.col -->
</div>
</form>
</div>
<!-- /.login-box-body -->
</div>
<!-- /.login-box -->
<!-- jQuery 2.2.0 -->
<script src=""/module/admin/htm/plugins/jQuery/jQuery-2.2.0.min.js""></script>
<script src=""/module/admin/htm/bootstrap/js/bootstrap.min.js""></script>
<script type=""text/javascript"">
(function () {{
var msgtpl = $('#error_msg').html();
top.login_callback = function (rt) {{
if (rt.success) return location.href = '/module/admin/';
$('#error_msg').html(msgtpl.format(rt.message)).show();
$('div.login-box-body').addClass('login-box-body--has-errors');
setTimeout(function () {{
$('div.login-box-body').removeClass('login-box-body--has-errors');
}}, 2000);
}};
}})();
</script>
</body>
</html>
";
#endregion
public static readonly string Module_Admin_Controller =
#region 内容太长已被收起
@"using System;
using System.Collections.Generic;
using System.Collections;
using System.Linq;
using System.IO;
using System.Net;
using System.Net.NetworkInformation;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Configuration;
using Newtonsoft.Json.Linq;
using {0}.BLL;
using {0}.Model;
namespace {0}.Module.Admin.Controllers {{
[Route(""[controller]"")]
public class {1}Controller : BaseController {{
public {1}Controller(ILogger<{1}Controller> logger) : base(logger) {{ }}
[HttpGet]
async public Task<ActionResult> List({12}[FromQuery] int limit = 20, [FromQuery] int page = 1) {{
var select = {19}{1}.Select{8};{9}
var items = await select.Count(out var count){14}.Page(page, limit).ToListAsync();
ViewBag.items = items;
ViewBag.count = count;
return View();
}}
[HttpGet(@""add"")]
public ActionResult Edit() {{
return View();
}}
[HttpGet(@""edit"")]
async public Task<ActionResult> Edit({4}) {{
{1}Info item = await {19}{1}.GetItemAsync({5});
if (item == null) return APIReturn.记录不存在_或者没有权限;
ViewBag.item = item;
return View();
}}
/***************************************** POST *****************************************/
[HttpPost(@""add"")]
[ValidateAntiForgeryToken]
async public Task<APIReturn> _Add({10}) {{
{1}Info item = new {1}Info();{13}{7}
item = await {19}{1}.InsertAsync(item);{16}
return APIReturn.成功.SetData(""item"", item.ToBson());
}}
[HttpPost(@""edit"")]
[ValidateAntiForgeryToken]
async public Task<APIReturn> _Edit({4}{11}) {{
{1}Info item = await {19}{1}.GetItemAsync({5});
if (item == null) return APIReturn.记录不存在_或者没有权限;{6}{7}
int affrows = await {19}{1}.UpdateAsync(item);{17}
if (affrows > 0) return APIReturn.成功.SetMessage($""更新成功,影响行数:{{affrows}}"");
return APIReturn.失败;
}}
[HttpPost(""del"")]
[ValidateAntiForgeryToken]{18}
}}
}}
";
#endregion
public static readonly string Module_Admin_wwwroot_index_html =
#region 内容太长已被收起
@"<!DOCTYPE html>
<html lang=""zh-cmn-Hans"">
<head>
<meta charset=""utf-8"" />
<meta http-equiv=""X-UA-Compatible"" content=""IE=edge"" />
<title>{0}管理系统</title>
<meta content=""width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no"" name=""viewport"" />
<link href=""./htm/bootstrap/css/bootstrap.min.css"" rel=""stylesheet"" />
<link href=""./htm/plugins/font-awesome/css/font-awesome.min.css"" rel=""stylesheet"" />
<link href=""./htm/css/skins/_all-skins.css"" rel=""stylesheet"" />
<link href=""./htm/plugins/pace/pace.min.css"" rel=""stylesheet"" />
<link href=""./htm/plugins/datepicker/datepicker3.css"" rel=""stylesheet"" />
<link href=""./htm/plugins/timepicker/bootstrap-timepicker.min.css"" rel=""stylesheet"" />
<link href=""./htm/plugins/select2/select2.min.css"" rel=""stylesheet"" />
<link href=""./htm/plugins/treetable/css/jquery.treetable.css"" rel=""stylesheet"" />
<link href=""./htm/plugins/treetable/css/jquery.treetable.theme.default.css"" rel=""stylesheet"" />
<link href=""./htm/plugins/multiple-select/multiple-select.css"" rel=""stylesheet"" />
<link href=""./htm/css/system.css"" rel=""stylesheet"" />
<link href=""./htm/css/index.css"" rel=""stylesheet"" />
<script type=""text/javascript"" src=""./htm/js/jQuery-2.1.4.min.js""></script>
<script type=""text/javascript"" src=""./htm/bootstrap/js/bootstrap.min.js""></script>
<script type=""text/javascript"" src=""./htm/plugins/pace/pace.min.js""></script>
<script type=""text/javascript"" src=""./htm/plugins/datepicker/bootstrap-datepicker.js""></script>
<script type=""text/javascript"" src=""./htm/plugins/timepicker/bootstrap-timepicker.min.js""></script>
<script type=""text/javascript"" src=""./htm/plugins/select2/select2.full.min.js""></script>
<script type=""text/javascript"" src=""./htm/plugins/input-mask/jquery.inputmask.js""></script>
<script type=""text/javascript"" src=""./htm/plugins/input-mask/jquery.inputmask.date.extensions.js""></script>
<script type=""text/javascript"" src=""./htm/plugins/input-mask/jquery.inputmask.extensions.js""></script>
<script type=""text/javascript"" src=""./htm/plugins/treetable/jquery.treetable.js""></script>
<script type=""text/javascript"" src=""./htm/plugins/multiple-select/multiple-select.js""></script>
<script type=""text/javascript"" src=""./htm/js/lib.js""></script>
<script type=""text/javascript"" src=""./htm/js/bmw.js""></script>
<!--[if lt IE 9]>
<script type='text/javascript' src='./htm/plugins/html5shiv/html5shiv.min.js'></script>
<script type='text/javascript' src='./htm/plugins/respond/respond.min.js'></script>
<![endif]-->
</head>
<body class=""hold-transition skin-blue sidebar-mini"">
<div class=""wrapper"">
<!-- Main Header-->
<header class=""main-header"">
<!-- Logo--><a href=""./"" class=""logo"">
<!-- mini logo for sidebar mini 50x50 pixels--><span class=""logo-mini""><b>{0}</b></span>
<!-- logo for regular state and mobile devices--><span class=""logo-lg""><b>{0}管理系统</b></span>
</a>
<!-- Header Navbar-->
<nav role=""navigation"" class=""navbar navbar-static-top"">
<!-- Sidebar toggle button--><a href=""#"" data-toggle=""offcanvas"" role=""button"" class=""sidebar-toggle""><span class=""sr-only"">Toggle navigation</span></a>
<!-- Navbar Right Menu-->
<div class=""navbar-custom-menu"">
<ul class=""nav navbar-nav"">
<!-- User Account Menu-->
<li class=""dropdown user user-menu"">
<!-- Menu Toggle Button--><a href=""#"" data-toggle=""dropdown"" class=""dropdown-toggle"">
<!-- The user image in the navbar--><img src=""/htm/img/user2-160x160.jpg"" alt=""User Image"" class=""user-image"">
<!-- hidden-xs hides the username on small devices so only the image appears.--><span class=""hidden-xs""></span>
</a>
<ul class=""dropdown-menu"">
<!-- The user image in the menu-->
<li class=""user-header"">
<img src=""/htm/img/user2-160x160.jpg"" alt=""User Image"" class=""img-circle"">
<p></p>
</li>
<!-- Menu Footer-->
<li class=""user-footer"">
<div class=""pull-right"">
<a href=""#"" onclick=""$('form#form_logout').submit();return false;"" class=""btn btn-default btn-flat"">安全退出</a>
<form id=""form_logout"" method=""post"" action=""./exit.aspx""></form>
</div>
</li>
</ul>
</li>
</ul>
</div>
</nav>
</header>
<!-- Left side column. contains the logo and sidebar-->
<aside class=""main-sidebar"">
<!-- sidebar: style can be found in sidebar.less-->
<section class=""sidebar"">
<!-- Sidebar Menu-->
<ul class=""sidebar-menu"">
<!-- Optionally, you can add icons to the links-->
<li class=""treeview active"">
<a href=""#""><i class=""fa fa-laptop""></i><span>通用管理</span><i class=""fa fa-angle-left pull-right""></i></a>
<ul class=""treeview-menu"">{1}
</ul>
</li>
</ul>
<!-- /.sidebar-menu-->
</section>
<!-- /.sidebar-->
</aside>
<!-- Content Wrapper. Contains page content-->
<div class=""content-wrapper"">
<!-- Main content-->
<section id=""right_content"" class=""content"">
<div style=""display:none;"">
<!-- Your Page Content Here-->
<h1>这是一个测试首页</h1>
<h2>swagger webapi:<a href='/swagger/' target='_blank'>/swagger/</a><h2>
<h2>登陆地址:<a href='/login' target='_blank'>/login</a><h2>
<h2><a href='/sys/connection' target='_blank'>查看 mssql连接池</a><h2>
<h2><a href='/sys/connection/redis' target='_blank'>查看 Redis连接池</a><h2>
</div>
</section>
<!-- /.content-->
</div>
<!-- /.content-wrapper-->
</div>
<!-- ./wrapper-->
<script type=""text/javascript"" src=""./htm/js/system.js""></script>
<script type=""text/javascript"" src=""./htm/js/admin.js""></script>
<script type=""text/javascript"">
if (!location.hash) $('#right_content div:first').show();
// 路由功能
//针对上面的html初始化路由列表
function hash_encode(str) {{ return url_encode(base64.encode(str)).replace(/%/g, '_'); }}
function hash_decode(str) {{ return base64.decode(url_decode(str.replace(/_/g, '%'))); }}
window.div_left_router = {{}};
$('li.treeview.active ul li a').each(function(index, ele) {{
var href = $(ele).attr('href');
$(ele).attr('href', '#base64url' + hash_encode(href));
window.div_left_router[href] = $(ele).text();
}});
(function () {{
function Vipspa() {{
}}
Vipspa.prototype.start = function (config) {{
Vipspa.mainView = $(config.view);
startRouter();
window.onhashchange = function () {{
if (location._is_changed) return location._is_changed = false;
startRouter();
}};
}};
function startRouter() {{
var hash = location.hash;
if (hash === '') return //location.hash = $('li.treeview.active ul li a:first').attr('href');//'#base64url' + hash_encode('/resume_type/');
if (hash.indexOf('#base64url') !== 0) return;
var act = hash_decode(hash.substr(10, hash.length - 10));
//叶湘勤增加的代码,加载或者提交form后,显示内容
function ajax_success(refererUrl) {{
if (refererUrl == location.pathname) {{ startRouter(); return function(){{}}; }}
var hash = '#base64url' + hash_encode(refererUrl);
if (location.hash != hash) {{
location._is_changed = true;
location.hash = hash;
}}'\''
return function (data, status, xhr) {{
var div;
Function.prototype.ajax = $.ajax;
top.mainViewNav = {{
url: refererUrl,
trans: function (url) {{
var act = url;
act = act.substr(0, 1) === '/' || act.indexOf('://') !== -1 || act.indexOf('data:') === 0 ? act : join_url(refererUrl, act);
return act;
}},
goto: function (url_or_form, target) {{
var form = url_or_form;
if (typeof form === 'string') {{
var act = this.trans(form);
if (String(target).toLowerCase() === '_blank') return window.open(act);
location.hash = '#base64url' + hash_encode(act);
}}
else {{
if (!window.ajax_form_iframe_max) window.ajax_form_iframe_max = 1;
window.ajax_form_iframe_max++;
var iframe = $('<iframe name=""ajax_form_iframe{{0}}""></iframe>'.format(window.ajax_form_iframe_max));
Vipspa.mainView.append(iframe);
var act = $(form).attr('action') || '';
act = act.substr(0, 1) === '/' || act.indexOf('://') !== -1 ? act : join_url(refererUrl, act);
if ($(form).find(':file[name]').length > 0) $(form).attr('enctype', 'multipart/form-data');
$(form).attr('action', act);
$(form).attr('target', iframe.attr('name'));
iframe.on('load', function () {{
var doc = this.contentWindow ? this.contentWindow.document : this.document;
if (doc.body.innerHTML.length === 0) return;
if (doc.body.innerHTML.indexOf('Error:') === 0) return alert(doc.body.innerHTML.substr(6));
//以下 '<script ' + '是防止与本页面相匹配,不要删除
if (doc.body.innerHTML.indexOf('<script ' + 'type=""text/javascript"">location.href=""') === -1) {{
ajax_success(doc.location.pathname + doc.location.search)(doc.body.innerHTML, 200, null);
}}
}});
}}
}},
reload: startRouter,
query: qs_parseByUrl(refererUrl)
}};
top.mainViewInit = function () {{
if (!div) return setTimeout(top.mainViewInit, 10);
admin_init(function (selector) {{
if (/<[^>]+>/.test(selector)) return $(selector);
return div.find(selector);
}}, top.mainViewNav);
}};
if (/<body[^>]*>/i.test(data))
data = data.match(/<body[^>]*>(([^<]|<(?!\/body>))*)<\/body>/i)[1];
div = Vipspa.mainView.html(data);
}};
}};
$.ajax({{
type: 'GET',
url: act,
dataType: 'html',
success: ajax_success(act),
error: function (jqXHR, textStatus, errorThrown) {{
var data = jqXHR.responseText;
if (/<body[^>]*>/i.test(data))
data = data.match(/<body[^>]*>(([^<]|<(?!\/body>))*)<\/body>/i)[1];
Vipspa.mainView.html(data);
}}
}});
}}
window.vipspa = new Vipspa();
}})();
$(function () {{
vipspa.start({{
view: '#right_content',
}});
}});
// 页面加载进度条
$(document).ajaxStart(function() {{ Pace.restart(); }});
</script>
</body>
</html>";
#endregion
public static readonly string Module_Test_Controller =
#region 内容太长已被收起
@"using System;
using System.Collections.Generic;
using System.Collections;
using System.Linq;
using System.IO;
using System.Net;
using System.Net.NetworkInformation;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Configuration;
using Newtonsoft.Json.Linq;
using {0}.BLL;
using {0}.Model;
namespace {0}.Module.Test.Controllers {{
[Route(""[controller]"")]
public class {1}Controller : BaseController {{
public {1}Controller(ILogger<{1}Controller> logger) : base(logger) {{ }}
[HttpGet]
public APIReturn List() {{
return APIReturn.成功;
}}
}}
}}
";
#endregion
public static readonly string Module_Test_Init_cs =
#region 内容太长已被收起
@"using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Text;
using {0}.BLL;
using {0}.Model;
namespace {0}.Module.{1} {{
/// <summary>
/// 配置本 Module 依赖注入等,由 WebHost/Startup.cs 加载触发执行
/// </summary>
public class Init : IModuleInitializer {{
public void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory, IApplicationLifetime lifetime) {{
}}
public void ConfigureServices(IServiceCollection services, IHostingEnvironment env) {{
}}
}}
}}
";
#endregion
public static readonly string Module_csproj =
#region 内容太长已被收起
@"<Project Sdk=""Microsoft.NET.Sdk"">
<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
<WarningLevel>3</WarningLevel>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include=""..\..\Infrastructure\Infrastructure.csproj"" />
</ItemGroup>
</Project>
";
#endregion
}
}
}
|
27182812/ChatGLM-LLaMA-chinese-insturct | 16,830 | src/transformers/models/deta/convert_deta_resnet_to_pytorch.py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convert DETA checkpoints from the original repository.
URL: https://github.com/jozhang97/DETA/tree/master"""
import argparse
import json
from pathlib import Path
import requests
import torch
from huggingface_hub import cached_download, hf_hub_download, hf_hub_url
from PIL import Image
from transformers import DetaConfig, DetaForObjectDetection, DetaImageProcessor
from transformers.utils import logging
logging.set_verbosity_info()
logger = logging.get_logger(__name__)
def get_deta_config():
config = DetaConfig(
num_queries=900,
encoder_ffn_dim=2048,
decoder_ffn_dim=2048,
num_feature_levels=5,
assign_first_stage=True,
with_box_refine=True,
two_stage=True,
)
# set labels
config.num_labels = 91
repo_id = "huggingface/label-files"
filename = "coco-detection-id2label.json"
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = {int(k): v for k, v in id2label.items()}
config.id2label = id2label
config.label2id = {v: k for k, v in id2label.items()}
return config
# here we list all keys to be renamed (original name on the left, our name on the right)
def create_rename_keys(config):
rename_keys = []
# stem
# fmt: off
rename_keys.append(("backbone.0.body.conv1.weight", "model.backbone.model.embedder.embedder.convolution.weight"))
rename_keys.append(("backbone.0.body.bn1.weight", "model.backbone.model.embedder.embedder.normalization.weight"))
rename_keys.append(("backbone.0.body.bn1.bias", "model.backbone.model.embedder.embedder.normalization.bias"))
rename_keys.append(("backbone.0.body.bn1.running_mean", "model.backbone.model.embedder.embedder.normalization.running_mean"))
rename_keys.append(("backbone.0.body.bn1.running_var", "model.backbone.model.embedder.embedder.normalization.running_var"))
# stages
for stage_idx in range(len(config.backbone_config.depths)):
for layer_idx in range(config.backbone_config.depths[stage_idx]):
# shortcut
if layer_idx == 0:
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.downsample.0.weight",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.shortcut.convolution.weight",
)
)
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.downsample.1.weight",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.shortcut.normalization.weight",
)
)
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.downsample.1.bias",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.shortcut.normalization.bias",
)
)
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.downsample.1.running_mean",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.shortcut.normalization.running_mean",
)
)
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.downsample.1.running_var",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.shortcut.normalization.running_var",
)
)
# 3 convs
for i in range(3):
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.conv{i+1}.weight",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.layer.{i}.convolution.weight",
)
)
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.bn{i+1}.weight",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.layer.{i}.normalization.weight",
)
)
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.bn{i+1}.bias",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.layer.{i}.normalization.bias",
)
)
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.bn{i+1}.running_mean",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.layer.{i}.normalization.running_mean",
)
)
rename_keys.append(
(
f"backbone.0.body.layer{stage_idx + 1}.{layer_idx}.bn{i+1}.running_var",
f"model.backbone.model.encoder.stages.{stage_idx}.layers.{layer_idx}.layer.{i}.normalization.running_var",
)
)
# transformer encoder
for i in range(config.encoder_layers):
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.sampling_offsets.weight", f"model.encoder.layers.{i}.self_attn.sampling_offsets.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.sampling_offsets.bias", f"model.encoder.layers.{i}.self_attn.sampling_offsets.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.attention_weights.weight", f"model.encoder.layers.{i}.self_attn.attention_weights.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.attention_weights.bias", f"model.encoder.layers.{i}.self_attn.attention_weights.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.value_proj.weight", f"model.encoder.layers.{i}.self_attn.value_proj.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.value_proj.bias", f"model.encoder.layers.{i}.self_attn.value_proj.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.output_proj.weight", f"model.encoder.layers.{i}.self_attn.output_proj.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.output_proj.bias", f"model.encoder.layers.{i}.self_attn.output_proj.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.norm1.weight", f"model.encoder.layers.{i}.self_attn_layer_norm.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.norm1.bias", f"model.encoder.layers.{i}.self_attn_layer_norm.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.linear1.weight", f"model.encoder.layers.{i}.fc1.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.linear1.bias", f"model.encoder.layers.{i}.fc1.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.linear2.weight", f"model.encoder.layers.{i}.fc2.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.linear2.bias", f"model.encoder.layers.{i}.fc2.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.norm2.weight", f"model.encoder.layers.{i}.final_layer_norm.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.norm2.bias", f"model.encoder.layers.{i}.final_layer_norm.bias"))
# transformer decoder
for i in range(config.decoder_layers):
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.sampling_offsets.weight", f"model.decoder.layers.{i}.encoder_attn.sampling_offsets.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.sampling_offsets.bias", f"model.decoder.layers.{i}.encoder_attn.sampling_offsets.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.attention_weights.weight", f"model.decoder.layers.{i}.encoder_attn.attention_weights.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.attention_weights.bias", f"model.decoder.layers.{i}.encoder_attn.attention_weights.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.value_proj.weight", f"model.decoder.layers.{i}.encoder_attn.value_proj.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.value_proj.bias", f"model.decoder.layers.{i}.encoder_attn.value_proj.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.output_proj.weight", f"model.decoder.layers.{i}.encoder_attn.output_proj.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.output_proj.bias", f"model.decoder.layers.{i}.encoder_attn.output_proj.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm1.weight", f"model.decoder.layers.{i}.encoder_attn_layer_norm.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm1.bias", f"model.decoder.layers.{i}.encoder_attn_layer_norm.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.self_attn.out_proj.weight", f"model.decoder.layers.{i}.self_attn.out_proj.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.self_attn.out_proj.bias", f"model.decoder.layers.{i}.self_attn.out_proj.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm2.weight", f"model.decoder.layers.{i}.self_attn_layer_norm.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm2.bias", f"model.decoder.layers.{i}.self_attn_layer_norm.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.linear1.weight", f"model.decoder.layers.{i}.fc1.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.linear1.bias", f"model.decoder.layers.{i}.fc1.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.linear2.weight", f"model.decoder.layers.{i}.fc2.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.linear2.bias", f"model.decoder.layers.{i}.fc2.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm3.weight", f"model.decoder.layers.{i}.final_layer_norm.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm3.bias", f"model.decoder.layers.{i}.final_layer_norm.bias"))
# fmt: on
return rename_keys
def rename_key(dct, old, new):
val = dct.pop(old)
dct[new] = val
def read_in_decoder_q_k_v(state_dict, config):
# transformer decoder self-attention layers
hidden_size = config.d_model
for i in range(config.decoder_layers):
# read in weights + bias of input projection layer of self-attention
in_proj_weight = state_dict.pop(f"transformer.decoder.layers.{i}.self_attn.in_proj_weight")
in_proj_bias = state_dict.pop(f"transformer.decoder.layers.{i}.self_attn.in_proj_bias")
# next, add query, keys and values (in that order) to the state dict
state_dict[f"model.decoder.layers.{i}.self_attn.q_proj.weight"] = in_proj_weight[:hidden_size, :]
state_dict[f"model.decoder.layers.{i}.self_attn.q_proj.bias"] = in_proj_bias[:hidden_size]
state_dict[f"model.decoder.layers.{i}.self_attn.k_proj.weight"] = in_proj_weight[
hidden_size : hidden_size * 2, :
]
state_dict[f"model.decoder.layers.{i}.self_attn.k_proj.bias"] = in_proj_bias[hidden_size : hidden_size * 2]
state_dict[f"model.decoder.layers.{i}.self_attn.v_proj.weight"] = in_proj_weight[-hidden_size:, :]
state_dict[f"model.decoder.layers.{i}.self_attn.v_proj.bias"] = in_proj_bias[-hidden_size:]
# We will verify our results on an image of cute cats
def prepare_img():
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
im = Image.open(requests.get(url, stream=True).raw)
return im
@torch.no_grad()
def convert_deta_checkpoint(model_name, pytorch_dump_folder_path, push_to_hub):
"""
Copy/paste/tweak model's weights to our DETA structure.
"""
# load config
config = get_deta_config()
# load original state dict
if model_name == "deta-resnet-50":
filename = "adet_checkpoint0011.pth"
elif model_name == "deta-resnet-50-24-epochs":
filename = "adet_2x_checkpoint0023.pth"
else:
raise ValueError(f"Model name {model_name} not supported")
checkpoint_path = hf_hub_download(repo_id="nielsr/deta-checkpoints", filename=filename)
state_dict = torch.load(checkpoint_path, map_location="cpu")["model"]
# rename keys
rename_keys = create_rename_keys(config)
for src, dest in rename_keys:
rename_key(state_dict, src, dest)
read_in_decoder_q_k_v(state_dict, config)
# fix some prefixes
for key in state_dict.copy().keys():
if "transformer.decoder.class_embed" in key or "transformer.decoder.bbox_embed" in key:
val = state_dict.pop(key)
state_dict[key.replace("transformer.decoder", "model.decoder")] = val
if "input_proj" in key:
val = state_dict.pop(key)
state_dict["model." + key] = val
if "level_embed" in key or "pos_trans" in key or "pix_trans" in key or "enc_output" in key:
val = state_dict.pop(key)
state_dict[key.replace("transformer", "model")] = val
# finally, create HuggingFace model and load state dict
model = DetaForObjectDetection(config)
model.load_state_dict(state_dict)
model.eval()
device = "cuda" if torch.cuda.is_available() else "cpu"
model.to(device)
# load image processor
processor = DetaImageProcessor(format="coco_detection")
# verify our conversion on image
img = prepare_img()
encoding = processor(images=img, return_tensors="pt")
pixel_values = encoding["pixel_values"]
outputs = model(pixel_values.to(device))
# verify logits
if model_name == "deta-resnet-50":
expected_logits = torch.tensor(
[[-7.3978, -2.5406, -4.1668], [-8.2684, -3.9933, -3.8096], [-7.0515, -3.7973, -5.8516]]
)
expected_boxes = torch.tensor([[0.5043, 0.4973, 0.9998], [0.2542, 0.5489, 0.4748], [0.5490, 0.2765, 0.0570]])
elif model_name == "deta-resnet-50-24-epochs":
expected_logits = torch.tensor(
[[-7.1688, -2.4857, -4.8669], [-7.8630, -3.8154, -4.2674], [-7.2730, -4.1865, -5.5323]]
)
expected_boxes = torch.tensor([[0.5021, 0.4971, 0.9994], [0.2546, 0.5486, 0.4731], [0.1686, 0.1986, 0.2142]])
assert torch.allclose(outputs.logits[0, :3, :3], expected_logits.to(device), atol=1e-4)
assert torch.allclose(outputs.pred_boxes[0, :3, :3], expected_boxes.to(device), atol=1e-4)
print("Everything ok!")
if pytorch_dump_folder_path:
# Save model and processor
logger.info(f"Saving PyTorch model and processor to {pytorch_dump_folder_path}...")
Path(pytorch_dump_folder_path).mkdir(exist_ok=True)
model.save_pretrained(pytorch_dump_folder_path)
processor.save_pretrained(pytorch_dump_folder_path)
# Push to hub
if push_to_hub:
print("Pushing model and processor to hub...")
model.push_to_hub(f"jozhang97/{model_name}")
processor.push_to_hub(f"jozhang97/{model_name}")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--model_name",
type=str,
default="deta-resnet-50",
choices=["deta-resnet-50", "deta-resnet-50-24-epochs"],
help="Name of the model you'd like to convert.",
)
parser.add_argument(
"--pytorch_dump_folder_path",
default=None,
type=str,
help="Path to the folder to output PyTorch model.",
)
parser.add_argument(
"--push_to_hub", action="store_true", help="Whether or not to push the converted model to the 🤗 hub."
)
args = parser.parse_args()
convert_deta_checkpoint(args.model_name, args.pytorch_dump_folder_path, args.push_to_hub)
|
2881099/dotnetGen_postgresql | 1,811 | ServerWinForm/Settings.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace ServerWinForm {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "14.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string cs_head {
get {
return ((string)(this["cs_head"]));
}
set {
this["cs_head"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("38888")]
public int socket_port {
get {
return ((int)(this["socket_port"]));
}
set {
this["socket_port"] = value;
}
}
}
}
|
2881099/dotnetGen_mysql | 7,283 | GenMy/WinFormClass/Socket/BaseSocket.cs | /**********************************************************************************
*
* 此文件代码由 NicPetShop.exe 自动生成,您没有必要修改它或删除它
* NicPetShop.exe 能将数据库的关系映射到 c#,让您使用更方便,您无需要担心它的性能
* NicPetShop.exe 将永久免费给大家使用
*
* Author: Nic
* QQ: 2881099
* Email: kellynic@163.com
* 帮助: http://www.kellynic.com/default.asp?tag=NicPetShop
*
**********************************************************************************/
using System;
using System.IO;
using System.Collections.Generic;
using System.Globalization;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using System.Reflection;
public class BaseSocket {
protected void Write(Stream stream, SocketMessager messager) {
MemoryStream ms = new MemoryStream();
byte[] buff = Encoding.UTF8.GetBytes(messager.GetCanParseString());
ms.Write(buff, 0, buff.Length);
if (messager.Arg != null) {
buff = Deflate.Compress(BaseSocket.Serialize(messager.Arg));
ms.Write(buff, 0, buff.Length);
}
this.Write(stream, ms.ToArray());
ms.Close();
}
private void Write(Stream stream, byte[] data) {
MemoryStream ms = new MemoryStream();
byte[] buff = Encoding.UTF8.GetBytes(Convert.ToString(data.Length + 8, 16).PadRight(8));
ms.Write(buff, 0, buff.Length);
ms.Write(data, 0, data.Length);
buff = ms.ToArray();
ms.Close();
stream.Write(buff, 0, buff.Length);
}
protected SocketMessager Read(Stream stream) {
byte[] data = new byte[8];
int bytes = 0;
int overs = data.Length;
string size = string.Empty;
while (overs > 0) {
bytes = stream.Read(data, 0, overs);
overs -= bytes;
size += Encoding.UTF8.GetString(data, 0, bytes);
}
if (int.TryParse(size, NumberStyles.HexNumber, null, out overs) == false) {
return null;
}
overs -= data.Length;
MemoryStream ms = new MemoryStream();
data = new Byte[1024];
while (overs > 0) {
bytes = stream.Read(data, 0, overs < data.Length ? overs : data.Length);
overs -= bytes;
ms.Write(data, 0, bytes);
}
data = ms.ToArray();
ms.Close();
return SocketMessager.Parse(data);
}
public static int findBytes(byte[] source, byte[] find, int startIndex) {
if (find == null) return -1;
if (find.Length == 0) return -1;
if (source == null) return -1;
if (source.Length == 0) return -1;
if (startIndex < 0) startIndex = 0;
int idx = -1, idx2 = startIndex - 1;
do {
idx2 = idx = Array.FindIndex<byte>(source, Math.Min(idx2 + 1, source.Length), delegate (byte b) {
return b == find[0];
});
if (idx2 != -1) {
for (int a = 1; a < find.Length; a++) {
if (++idx2 >= source.Length || source[idx2] != find[a]) {
idx = -1;
break;
}
}
if (idx != -1) break;
}
} while (idx2 != -1);
return idx;
}
public static byte[] Serialize(object obj) {
IFormatter formatter = new BinaryFormatter();
MemoryStream ms = new MemoryStream();
formatter.Serialize(ms, obj);
byte[] data = ms.ToArray();
ms.Close();
return data;
}
public static object Deserialize(byte[] stream) {
IFormatter formatter = new BinaryFormatter();
formatter.Binder = new TransmissionBinder();
MemoryStream ms = new MemoryStream(stream);
object obj = formatter.Deserialize(ms);
ms.Close();
return obj;
}
}
internal class TransmissionBinder : SerializationBinder {
public override Type BindToType(string assemblyName, string typeName) {
//var ass = AppDomain.CurrentDomain.GetAssemblies();
//foreach (var a in ass) if (a.FullName == assemblyName) return a.GetType(typeName);
//foreach (var a in ass) if (a.GetName().Name == "Common") return a.GetType(typeName);
return Type.GetType(typeName.Replace("Common, ", "GenMy, "));
}
}
public class SocketMessager {
private static int _identity;
public static readonly SocketMessager SYS_TEST_LINK = new SocketMessager("\0");
public static readonly SocketMessager SYS_HELLO_WELCOME = new SocketMessager("Hello, Welcome!");
public static readonly SocketMessager SYS_ACCESS_DENIED = new SocketMessager("Access Denied.");
private int _id;
public bool _isChangeId;
private string _action;
private string _permission;
private DateTime _remoteTime;
private object _arg;
private Exception _exception;
public SocketMessager(string action)
: this(action, null, null) {
}
public SocketMessager(string action, object arg)
: this(action, null, arg) {
}
public SocketMessager(string action, string permission, object arg) {
this._id = Interlocked.Increment(ref _identity);
this._action = action == null ? string.Empty : action;
this._permission = permission == null ? string.Empty : permission;
this._arg = arg;
this._remoteTime = DateTime.Now;
}
public override string ToString() {
return
this._remoteTime.ToString("yyyy-MM-dd HH:mm:ss") + "\t" +
this._id + "\t" +
this._action.Replace("\t", "\\t") + "\t" +
this._permission.Replace("\t", "\\t") + "\t" +
this._arg;
}
public string GetCanParseString() {
if (string.Compare(this._action, SocketMessager.SYS_TEST_LINK.Action) == 0) {
return this.Action;
} else if (
string.Compare(this._action, SocketMessager.SYS_HELLO_WELCOME.Action) == 0 ||
string.Compare(this._action, SocketMessager.SYS_ACCESS_DENIED.Action) == 0) {
return
this._id + "\t" +
this.Action + "\r\n";
} else {
return
this._id + "\t" +
this._action.Replace("\\", "\\\\").Replace("\t", "\\t").Replace("\r\n", "\\n") + "\t" +
this._permission.Replace("\\", "\\\\").Replace("\t", "\\t").Replace("\r\n", "\\n") + "\t" +
this._remoteTime.ToString("yyyy-MM-dd HH:mm:ss") + "\r\n";
}
}
public static SocketMessager Parse(byte[] data) {
if (data == null) return new SocketMessager("NULL");
if (data.Length == 1 && data[0] == 0) return SocketMessager.SYS_TEST_LINK;
int idx = BaseSocket.findBytes(data, new byte[] { 13, 10 }, 0);
string text = Encoding.UTF8.GetString(data, 0, idx);
string[] loc1 = text.Split(new string[] { "\t" }, 4, StringSplitOptions.None);
string loc2 = loc1[0];
string loc3 = loc1.Length > 1 ? loc1[1].Replace("\\\\", "\\").Replace("\\t", "\t").Replace("\\n", "\r\n") : null;
string loc4 = loc1.Length > 2 ? loc1[2].Replace("\\\\", "\\").Replace("\\t", "\t").Replace("\\n", "\r\n") : null;
string loc5 = loc1.Length > 3 ? loc1[3] : null;
MemoryStream ms = new MemoryStream();
ms.Write(data, idx + 2, data.Length - idx - 2);
SocketMessager messager = new SocketMessager(loc3, loc4,
ms.Length > 0 ? BaseSocket.Deserialize(Deflate.Decompress(ms.ToArray())) : null);
if (int.TryParse(loc2, out idx)) messager._id = idx;
if (!string.IsNullOrEmpty(loc5)) DateTime.TryParse(loc5, out messager._remoteTime);
if (messager._arg is Exception) messager._exception = messager._arg as Exception;
return messager;
}
/// <summary>
/// 服务端为 -,客户端为 +
/// </summary>
public int Id {
get { return _id; }
set {
if (_id != value) {
_isChangeId = true;
}
_id = value;
}
}
public string Action {
get { return _action; }
}
public string Permission {
get { return _permission; }
}
public DateTime RemoteTime {
get { return _remoteTime; }
}
public object Arg {
get { return _arg; }
}
public Exception Exception {
get { return _exception; }
}
} |
2881099/dotnetGen_mysql | 7,753 | GenMy/WinFormClass/Socket/ClientSocket.cs | using System;
using System.IO;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
public class ClientSocket : BaseSocket, IDisposable {
private bool _isDisposed;
private IPEndPoint _remotePoint;
private TcpClient _tcpClient;
private Thread _thread;
private bool _running;
private int _receives;
private int _errors;
private object _errors_lock = new object();
private object _write_lock = new object();
private Dictionary<int, SyncReceive> _receiveHandlers = new Dictionary<int, SyncReceive>();
private object _receiveHandlers_lock = new object();
private DateTime _lastActive;
public event ClientSocketClosedEventHandler Closed;
public event ClientSocketReceiveEventHandler Receive;
public event ClientSocketErrorEventHandler Error;
public void Connect(string hostname, int port) {
if (this._isDisposed == false && this._running == false) {
this._running = true;
try {
IPAddress[] ips = Dns.GetHostAddresses(hostname);
if (ips.Length == 0) throw new Exception("无法解析“" + hostname + "”");
this._remotePoint = new IPEndPoint(ips[0], port);
this._tcpClient = new TcpClient();
this._tcpClient.Connect(this._remotePoint);
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
this.OnClosed();
return;
}
this._receives = 0;
this._errors = 0;
this._lastActive = DateTime.Now;
this._thread = new Thread(delegate() {
while (this._running) {
try {
NetworkStream ns = this._tcpClient.GetStream();
ns.ReadTimeout = 1000 * 20;
if (ns.DataAvailable) {
SocketMessager messager = base.Read(ns);
if (string.Compare(messager.Action, SocketMessager.SYS_TEST_LINK.Action) == 0) {
} else if (this._receives == 0 &&
string.Compare(messager.Action, SocketMessager.SYS_HELLO_WELCOME.Action) == 0) {
this._receives++;
this.Write(messager);
} else if (string.Compare(messager.Action, SocketMessager.SYS_ACCESS_DENIED.Action) == 0) {
throw new Exception(SocketMessager.SYS_ACCESS_DENIED.Action);
} else {
ClientSocketReceiveEventArgs e = new ClientSocketReceiveEventArgs(this._receives++, messager);
SyncReceive receive = null;
if (this._receiveHandlers.TryGetValue(messager.Id, out receive)) {
new Thread(delegate() {
try {
receive.ReceiveHandler(this, e);
} catch (Exception ex) {
this.OnError(ex);
} finally {
receive.Wait.Set();
}
}).Start();
} else if (this.Receive != null) {
new Thread(delegate() {
this.OnReceive(e);
}).Start();
}
}
this._lastActive = DateTime.Now;
} else {
TimeSpan ts = DateTime.Now - _lastActive;
if (ts.TotalSeconds > 3) {
this.Write(SocketMessager.SYS_TEST_LINK);
}
}
if (!ns.DataAvailable) Thread.CurrentThread.Join(1);
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
}
}
this.Close();
this.OnClosed();
});
this._thread.Start();
}
}
public void Close() {
this._running = false;
if (this._tcpClient != null) {
this._tcpClient.Close();
}
int[] keys = new int[this._receiveHandlers.Count];
try {
this._receiveHandlers.Keys.CopyTo(keys, 0);
} catch {
lock (this._receiveHandlers_lock) {
keys = new int[this._receiveHandlers.Count];
this._receiveHandlers.Keys.CopyTo(keys, 0);
}
}
foreach (int key in keys) {
SyncReceive receiveHandler = null;
if (this._receiveHandlers.TryGetValue(key, out receiveHandler)) {
receiveHandler.Wait.Set();
}
}
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Clear();
}
}
public void Write(SocketMessager messager) {
this.Write(messager, null, TimeSpan.Zero);
}
public void Write(SocketMessager messager, ClientSocketReceiveEventHandler receiveHandler) {
this.Write(messager, receiveHandler, TimeSpan.FromSeconds(20));
}
public void Write(SocketMessager messager, ClientSocketReceiveEventHandler receiveHandler, TimeSpan timeout) {
SyncReceive syncReceive = null;
try {
if (receiveHandler != null) {
syncReceive = new SyncReceive(receiveHandler);
lock (this._receiveHandlers_lock) {
if (!this._receiveHandlers.ContainsKey(messager.Id)) {
this._receiveHandlers.Add(messager.Id, syncReceive);
} else {
this._receiveHandlers[messager.Id] = syncReceive;
}
}
}
lock (_write_lock) {
NetworkStream ns = this._tcpClient.GetStream();
base.Write(ns, messager);
}
this._lastActive = DateTime.Now;
if (syncReceive != null) {
syncReceive.Wait.Reset();
syncReceive.Wait.WaitOne(timeout, false);
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
if (syncReceive != null) {
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
}
}
protected virtual void OnClosed(EventArgs e) {
if (this.Closed != null) {
new Thread(delegate() {
try {
this.Closed(this, e);
} catch (Exception ex) {
this.OnError(ex);
}
}).Start();
}
}
protected void OnClosed() {
this.OnClosed(new EventArgs());
}
protected virtual void OnReceive(ClientSocketReceiveEventArgs e) {
if (this.Receive != null) {
try {
this.Receive(this, e);
} catch (Exception ex) {
this.OnError(ex);
}
}
}
protected virtual void OnError(ClientSocketErrorEventArgs e) {
if (this.Error != null) {
this.Error(this, e);
}
}
protected void OnError(Exception ex) {
int errors = 0;
lock (this._errors_lock) {
errors = ++this._errors;
}
ClientSocketErrorEventArgs e = new ClientSocketErrorEventArgs(ex, errors);
this.OnError(e);
}
public bool Running {
get { return this._running; }
}
class SyncReceive : IDisposable {
private ClientSocketReceiveEventHandler _receiveHandler;
private ManualResetEvent _wait;
public SyncReceive(ClientSocketReceiveEventHandler receiveHandler) {
this._receiveHandler = receiveHandler;
this._wait = new ManualResetEvent(false);
}
public ClientSocketReceiveEventHandler ReceiveHandler {
get { return _receiveHandler; }
}
public ManualResetEvent Wait {
get { return _wait; }
}
#region IDisposable 成员
public void Dispose() {
this._wait.Set();
this._wait.Close();
}
#endregion
}
#region IDisposable 成员
public void Dispose() {
this._isDisposed = true;
this.Close();
}
#endregion
}
public delegate void ClientSocketClosedEventHandler(object sender, EventArgs e);
public delegate void ClientSocketErrorEventHandler(object sender, ClientSocketErrorEventArgs e);
public delegate void ClientSocketReceiveEventHandler(object sender, ClientSocketReceiveEventArgs e);
public class ClientSocketErrorEventArgs : EventArgs {
private int _errors;
private Exception _exception;
public ClientSocketErrorEventArgs(Exception exception, int errors) {
this._exception = exception;
this._errors = errors;
}
public int Errors {
get { return _errors; }
}
public Exception Exception {
get { return _exception; }
}
}
public class ClientSocketReceiveEventArgs : EventArgs {
private int _receives;
private SocketMessager _messager;
public ClientSocketReceiveEventArgs(int receives, SocketMessager messager) {
this._receives = receives;
this._messager = messager;
}
public int Receives {
get { return _receives; }
}
public SocketMessager Messager {
get { return _messager; }
}
} |
2881099/dotnetGen_postgresql | 4,228 | ServerWinForm/ServerWinForm.csproj | <?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="14.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.50727</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{4622A792-72AA-4519-94D7-E00A90609394}</ProjectGuid>
<OutputType>WinExe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>ServerWinForm</RootNamespace>
<AssemblyName>ServerWinForm</AssemblyName>
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
<FileUpgradeFlags>
</FileUpgradeFlags>
<UpgradeBackupLocation>
</UpgradeBackupLocation>
<OldToolsVersion>2.0</OldToolsVersion>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Data" />
<Reference Include="System.Deployment" />
<Reference Include="System.Drawing" />
<Reference Include="System.Windows.Forms" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="Form1.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="Form1.Designer.cs">
<DependentUpon>Form1.cs</DependentUpon>
</Compile>
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<EmbeddedResource Include="Form1.resx">
<SubType>Designer</SubType>
<DependentUpon>Form1.cs</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
<DesignTime>True</DesignTime>
</Compile>
<None Include="app.config" />
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<None Include="Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
<Compile Include="Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
<DependentUpon>Settings.settings</DependentUpon>
</Compile>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj">
<Project>{F0054101-9AC9-4E0E-9E78-44EA89FC5C19}</Project>
<Name>Common</Name>
</ProjectReference>
<ProjectReference Include="..\Server\Server.csproj">
<Project>{EFE1F5D6-AB1F-4FA6-8E10-9B8A197B31C7}</Project>
<Name>Server</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> |
2881099/dotnetGen_postgresql | 6,045 | MakeCode/FrmView.designer.cs | namespace MakeCode {
partial class FrmView {
/// <summary>
/// 必需的设计器变量。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 清理所有正在使用的资源。
/// </summary>
/// <param name="disposing">如果应释放托管资源,为 true;否则为 false。</param>
protected override void Dispose(bool disposing) {
if (disposing && (components != null)) {
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows 窗体设计器生成的代码
/// <summary>
/// 设计器支持所需的方法 - 不要
/// 使用代码编辑器修改此方法的内容。
/// </summary>
private void InitializeComponent() {
System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle17 = new System.Windows.Forms.DataGridViewCellStyle();
System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle18 = new System.Windows.Forms.DataGridViewCellStyle();
this.btnOk = new System.Windows.Forms.Button();
this.dgvGridview = new System.Windows.Forms.DataGridView();
this.dgvColIcon = new System.Windows.Forms.DataGridViewImageColumn();
this.dgvColName = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.dgvColDBType = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.dgvColAllowDBNull = new System.Windows.Forms.DataGridViewCheckBoxColumn();
this.dgvColView = new System.Windows.Forms.DataGridViewLinkColumn();
((System.ComponentModel.ISupportInitialize)(this.dgvGridview)).BeginInit();
this.SuspendLayout();
//
// btnOk
//
this.btnOk.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.btnOk.Location = new System.Drawing.Point(262, 372);
this.btnOk.Name = "btnOk";
this.btnOk.Size = new System.Drawing.Size(62, 21);
this.btnOk.TabIndex = 1;
this.btnOk.Text = "Ok";
this.btnOk.UseVisualStyleBackColor = true;
this.btnOk.Click += new System.EventHandler(this.btnOk_Click);
//
// dgvGridview
//
this.dgvGridview.AllowUserToAddRows = false;
this.dgvGridview.AllowUserToResizeRows = false;
this.dgvGridview.BackgroundColor = System.Drawing.SystemColors.ActiveCaptionText;
this.dgvGridview.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize;
this.dgvGridview.Columns.AddRange(new System.Windows.Forms.DataGridViewColumn[] {
this.dgvColIcon,
this.dgvColName,
this.dgvColDBType,
this.dgvColAllowDBNull,
this.dgvColView});
this.dgvGridview.Location = new System.Drawing.Point(12, 12);
this.dgvGridview.Name = "dgvGridview";
this.dgvGridview.ReadOnly = true;
this.dgvGridview.RowHeadersWidthSizeMode = System.Windows.Forms.DataGridViewRowHeadersWidthSizeMode.DisableResizing;
this.dgvGridview.RowTemplate.Height = 23;
this.dgvGridview.Size = new System.Drawing.Size(562, 352);
this.dgvGridview.TabIndex = 0;
//
// dgvColIcon
//
this.dgvColIcon.AutoSizeMode = System.Windows.Forms.DataGridViewAutoSizeColumnMode.ColumnHeader;
dataGridViewCellStyle17.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleCenter;
dataGridViewCellStyle17.NullValue = null;
dataGridViewCellStyle17.SelectionBackColor = System.Drawing.Color.White;
this.dgvColIcon.DefaultCellStyle = dataGridViewCellStyle17;
this.dgvColIcon.HeaderText = " ";
this.dgvColIcon.Name = "dgvColIcon";
this.dgvColIcon.ReadOnly = true;
this.dgvColIcon.Width = 21;
//
// dgvColName
//
this.dgvColName.HeaderText = "Name";
this.dgvColName.Name = "dgvColName";
this.dgvColName.ReadOnly = true;
this.dgvColName.Width = 210;
//
// dgvColDBType
//
this.dgvColDBType.HeaderText = "SqlType";
this.dgvColDBType.Name = "dgvColDBType";
this.dgvColDBType.ReadOnly = true;
this.dgvColDBType.Width = 130;
//
// dgvColAllowDBNull
//
this.dgvColAllowDBNull.HeaderText = "AllowDBNull";
this.dgvColAllowDBNull.Name = "dgvColAllowDBNull";
this.dgvColAllowDBNull.ReadOnly = true;
this.dgvColAllowDBNull.Resizable = System.Windows.Forms.DataGridViewTriState.True;
this.dgvColAllowDBNull.SortMode = System.Windows.Forms.DataGridViewColumnSortMode.Automatic;
this.dgvColAllowDBNull.Width = 80;
//
// dgvColView
//
dataGridViewCellStyle18.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleCenter;
dataGridViewCellStyle18.SelectionBackColor = System.Drawing.Color.White;
this.dgvColView.DefaultCellStyle = dataGridViewCellStyle18;
this.dgvColView.HeaderText = "Relation";
this.dgvColView.Name = "dgvColView";
this.dgvColView.ReadOnly = true;
this.dgvColView.Resizable = System.Windows.Forms.DataGridViewTriState.True;
this.dgvColView.SortMode = System.Windows.Forms.DataGridViewColumnSortMode.Automatic;
this.dgvColView.Text = "View";
this.dgvColView.Width = 60;
//
// FrmView
//
this.AcceptButton = this.btnOk;
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.BackColor = System.Drawing.Color.BlanchedAlmond;
this.CancelButton = this.btnOk;
this.ClientSize = new System.Drawing.Size(586, 405);
this.Controls.Add(this.dgvGridview);
this.Controls.Add(this.btnOk);
this.ForeColor = System.Drawing.Color.Black;
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedToolWindow;
this.MaximizeBox = false;
this.Name = "FrmView";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
((System.ComponentModel.ISupportInitialize)(this.dgvGridview)).EndInit();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Button btnOk;
public System.Windows.Forms.DataGridView dgvGridview;
private System.Windows.Forms.DataGridViewImageColumn dgvColIcon;
private System.Windows.Forms.DataGridViewTextBoxColumn dgvColName;
private System.Windows.Forms.DataGridViewTextBoxColumn dgvColDBType;
private System.Windows.Forms.DataGridViewCheckBoxColumn dgvColAllowDBNull;
private System.Windows.Forms.DataGridViewLinkColumn dgvColView;
}
} |
2881099/dotnetGen_sqlserver | 140,364 | Server/CodeBuild(Code).cs | using System;
using System.Collections.Generic;
using System.Data;
using System.Text;
using Model;
namespace Server {
internal partial class CodeBuild {
public void SetOutput(bool[] outputs) {
if (this._tables.Count == outputs.Length) {
for (int a = 0; a < outputs.Length; a++) {
this._tables[a].IsOutput = outputs[a];
}
}
}
public List<BuildInfo> Build(string solutionName, bool isSolution, bool isMakeAdmin, bool isDownloadRes) {
Logger.remotor.Info("Build: " + solutionName + ",isSolution: " + isSolution + ",isMakeAdmin: " + isMakeAdmin + ",isDownloadRes: " + isDownloadRes + "(" + _client.Server + "," + _client.Username + "," + _client.Password + "," + _client.Database + ")");
List<BuildInfo> loc1 = new List<BuildInfo>();
//solutionName = CodeBuild.UFString(solutionName);
string dbName = CodeBuild.UFString(CodeBuild.GetCSName(_client.Database));
string connectionStringName = _client.Database + "ConnectionString";
string basicName = "Build";
string wwwroot_sitemap = "";
Dictionary<string, bool> isMakedHtmlSelect = new Dictionary<string, bool>();
StringBuilder Model_Build_ExtensionMethods_cs = new StringBuilder();
List<string> admin_controllers_syscontroller_init_sysdir = new List<string>();
StringBuilder sb1 = new StringBuilder();
StringBuilder sb2 = new StringBuilder();
StringBuilder sb3 = new StringBuilder();
StringBuilder sb4 = new StringBuilder();
StringBuilder sb5 = new StringBuilder();
StringBuilder sb6 = new StringBuilder();
StringBuilder sb7 = new StringBuilder();
StringBuilder sb8 = new StringBuilder();
StringBuilder sb9 = new StringBuilder();
StringBuilder sb10 = new StringBuilder();
StringBuilder sb11 = new StringBuilder();
StringBuilder sb12 = new StringBuilder();
StringBuilder sb13 = new StringBuilder();
StringBuilder sb14 = new StringBuilder();
StringBuilder sb15 = new StringBuilder();
StringBuilder sb16 = new StringBuilder();
StringBuilder sb17 = new StringBuilder();
StringBuilder sb18 = new StringBuilder();
StringBuilder sb19 = new StringBuilder();
StringBuilder sb20 = new StringBuilder();
StringBuilder sb21 = new StringBuilder();
StringBuilder sb22 = new StringBuilder();
StringBuilder sb23 = new StringBuilder();
StringBuilder sb24 = new StringBuilder();
StringBuilder sb25 = new StringBuilder();
StringBuilder sb26 = new StringBuilder();
StringBuilder sb27 = new StringBuilder();
StringBuilder sb28 = new StringBuilder();
StringBuilder sb29 = new StringBuilder();
AnonymousHandler clearSb = delegate () {
sb1.Remove(0, sb1.Length);
sb2.Remove(0, sb2.Length);
sb3.Remove(0, sb3.Length);
sb4.Remove(0, sb4.Length);
sb5.Remove(0, sb5.Length);
sb6.Remove(0, sb6.Length);
sb7.Remove(0, sb7.Length);
sb8.Remove(0, sb8.Length);
sb9.Remove(0, sb9.Length);
sb10.Remove(0, sb10.Length);
sb11.Remove(0, sb11.Length);
sb12.Remove(0, sb12.Length);
sb13.Remove(0, sb13.Length);
sb14.Remove(0, sb14.Length);
sb15.Remove(0, sb15.Length);
sb16.Remove(0, sb16.Length);
sb17.Remove(0, sb17.Length);
sb18.Remove(0, sb18.Length);
sb19.Remove(0, sb19.Length);
sb20.Remove(0, sb20.Length);
sb21.Remove(0, sb21.Length);
sb22.Remove(0, sb22.Length);
sb23.Remove(0, sb23.Length);
sb24.Remove(0, sb24.Length);
sb25.Remove(0, sb25.Length);
sb26.Remove(0, sb26.Length);
sb27.Remove(0, sb27.Length);
sb28.Remove(0, sb28.Length);
sb29.Remove(0, sb29.Length);
};
if (isSolution) {
#region solution.sln
sb1.AppendFormat(CONST.sln, solutionName,
Guid.NewGuid().ToString().ToUpper(),
Guid.NewGuid().ToString().ToUpper(),
Guid.NewGuid().ToString().ToUpper(),
Guid.NewGuid().ToString().ToUpper(),
Guid.NewGuid().ToString().ToUpper(),
Guid.NewGuid().ToString().ToUpper(),
Guid.NewGuid().ToString().ToUpper(),
Guid.NewGuid().ToString().ToUpper(),
Guid.NewGuid().ToString().ToUpper(),
Guid.NewGuid().ToString().ToUpper());
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"..\", solutionName, ".sln"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Project Infrastructure
#region Controllers\BaseController.cs
sb1.Append(Server.Properties.Resources.Infrastructure_Controllers_BaseController_cs);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Infrastructure\Controllers\BaseController.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Controllers\CustomExceptionFilter.cs
sb1.Append(Server.Properties.Resources.Infrastructure_Controllers_CustomExceptionFilter_cs);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Infrastructure\Controllers\CustomExceptionFilter.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Extensions\GlobalExtensions.cs
sb1.Append(Server.Properties.Resources.Infrastructure_Extensions_GlobalExtensions_cs);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Infrastructure\Extensions\GlobalExtensions.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region ModuleBasic\IModuleInitializer.cs
sb1.Append(Server.Properties.Resources.Infrastructure_ModuleBasic_IModuleInitializer_cs);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Infrastructure\ModuleBasic\IModuleInitializer.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region ModuleBasic\ModuleInfo.cs
sb1.Append(Server.Properties.Resources.Infrastructure_ModuleBasic_ModuleInfo_cs);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Infrastructure\ModuleBasic\ModuleInfo.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region ModuleBasic\ModuleViewLocationExpander.cs
sb1.Append(Server.Properties.Resources.Infrastructure_ModuleBasic_ModuleViewLocationExpander_cs);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Infrastructure\ModuleBasic\ModuleViewLocationExpander.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Infrastructure.csproj
sb1.AppendFormat(CONST.Infrastructure_csproj, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Infrastructure\Infrastructure.csproj"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#endregion
}
foreach (TableInfo table in _tables) {
if (table.IsOutput == false) continue;
if (table.Type == "P") continue;
//if (table.Uniques.Count == 0)
// throw new Exception("检查到表 “" + table.Owner + "." + table.Name + "” 没有设定惟一键!");
if (table.Columns.Count == 0) continue;
#region commom variable define
string uClass_Name = CodeBuild.UFString(table.ClassName);
string nClass_Name = table.ClassName;
string nTable_Name = "[" + table.Owner + "].[" + table.Name + "]";
string Class_Name_BLL_Full = string.Format(@"{0}.BLL.{1}", solutionName, uClass_Name);
string Class_Name_Model_Full = string.Format(@"{0}.Model.{1}", solutionName, uClass_Name);
string pkCsParam = "";
string pkCsParamNoType = "";
string pkCsParamNoTypeFieldInit = "";
string pkCsParamNoTypeByval = "";
string pkSqlParamFormat = "";
string pkSqlParam = "";
string pkSpNotNull = "";
string pkEvalsQuerystring = "";
string CsParam1 = "";
string CsParamNoType1 = "";
string CsParam2 = "";
string CsParamNoType2 = "";
string CsParam3 = "";
string CsParamNoType3 = "";
string csItemAllFieldCopy = "";
string pkMvcRoute = "";
string orderBy = table.Clustereds.Count > 0 ?
string.Join(", ", table.Clustereds.ConvertAll<string>(delegate (ColumnInfo cli) {
return "a.[" + cli.Name + "]" + (cli.Orderby == DataSort.ASC ? string.Empty : string.Concat(" ", cli.Orderby));
}).ToArray()) :
table.Uniques.Count > 0 ?
string.Join(", ", table.Uniques[0].ConvertAll<string>(delegate (ColumnInfo cli) {
return "a.[" + cli.Name + "]" + (cli.Orderby == DataSort.ASC ? string.Empty : string.Concat(" ", cli.Orderby));
}).ToArray()) : "";
int pkSqlParamFormat_idx = -1;
if (table.PrimaryKeys.Count > 0) {
foreach (ColumnInfo columnInfo in table.PrimaryKeys) {
pkCsParam += CodeBuild.GetCSType(columnInfo.Type) + " " + CodeBuild.UFString(columnInfo.Name) + ", ";
pkCsParamNoType += CodeBuild.UFString(columnInfo.Name) + ", ";
pkCsParamNoTypeFieldInit += UFString(columnInfo.Name) + " = " + UFString(columnInfo.Name) + ", ";
pkCsParamNoTypeByval += string.Format(GetCSTypeValue(columnInfo.Type), UFString(columnInfo.Name)) + ", ";
pkSqlParamFormat += "[" + columnInfo.Name + "] = {" + ++pkSqlParamFormat_idx + "} AND ";
pkSqlParam += "[" + columnInfo.Name + "] = @" + columnInfo.Name + " AND ";
pkSpNotNull += "NOT @" + columnInfo.Name + " IS NULL AND ";
pkEvalsQuerystring += string.Format("{0}=<%# Eval(\"{0}\") %>&", CodeBuild.UFString(columnInfo.Name));
pkMvcRoute += "{" + CodeBuild.UFString(columnInfo.Name) + "}/";
}
pkCsParam = pkCsParam.Substring(0, pkCsParam.Length - 2);
pkCsParamNoType = pkCsParamNoType.Substring(0, pkCsParamNoType.Length - 2);
pkCsParamNoTypeByval = pkCsParamNoTypeByval.Substring(0, pkCsParamNoTypeByval.Length - 2);
pkSqlParamFormat = pkSqlParamFormat.Substring(0, pkSqlParamFormat.Length - 5);
pkSqlParam = pkSqlParam.Substring(0, pkSqlParam.Length - 5);
pkSpNotNull = pkSpNotNull.Substring(0, pkSpNotNull.Length - 5);
pkEvalsQuerystring = pkEvalsQuerystring.Substring(0, pkEvalsQuerystring.Length - 1);
}
foreach (ColumnInfo columnInfo in table.Columns) {
string getcstype = CodeBuild.GetCSType(columnInfo.Type);
CsParam1 += getcstype + " " + CodeBuild.UFString(columnInfo.Name) + ", ";
CsParamNoType1 += CodeBuild.UFString(columnInfo.Name) + ", ";
csItemAllFieldCopy += string.Format(@"
item.{0} = newitem.{0};", UFString(columnInfo.Name));
if (columnInfo.IsIdentity) {
//CsParamNoType2 += "null, ";
} else if (columnInfo.IsPrimaryKey && getcstype == "Guid?" && table.PrimaryKeys.Count == 1) {
} else {
CsParam2 += getcstype + " " + CodeBuild.UFString(columnInfo.Name) + ", ";
CsParamNoType2 += string.Format("\r\n {0} = {0}, ", UFString(columnInfo.Name));
if (getcstype == "DateTime?" && (columnInfo.Name.ToLower() == "create_time" || columnInfo.Name.ToLower() == "update_time") ||
getcstype == "bool?" && (columnInfo.Name.ToLower() == "is_deleted")) ;
else {
CsParam3 += getcstype + " " + UFString(columnInfo.Name) + ", ";
CsParamNoType3 += string.Format("\r\n {0} = {0}, ", UFString(columnInfo.Name));
}
}
}
CsParam1 = CsParam1.Substring(0, CsParam1.Length - 2);
CsParamNoType1 = CsParamNoType1.Substring(0, CsParamNoType1.Length - 2);
if (CsParam2.Length > 0) CsParam2 = CsParam2.Substring(0, CsParam2.Length - 2);
if (CsParamNoType2.Length > 0) CsParamNoType2 = CsParamNoType2.Substring(0, CsParamNoType2.Length - 2);
if (CsParam3.Length > 0) CsParam3 = CsParam3.Substring(0, CsParam3.Length - 2);
if (CsParamNoType3.Length > 0) CsParamNoType3 = CsParamNoType3.Substring(0, CsParamNoType3.Length - 2);
#endregion
#region Model *.cs
sb1.AppendFormat(
@"using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace {0}.Model {{
[JsonObject(MemberSerialization.OptIn)]
public partial class {1}Info {{
#region fields
", solutionName, uClass_Name);
Dictionary<string, string> innerjoinObjs = new Dictionary<string, string>();
bool Is_System_ComponentModel = false;
int column_idx = -1;
foreach (ColumnInfo column in table.Columns) {
column_idx++;
string csType = CodeBuild.GetCSType(column.Type);
string nColumn_Name = column.Name;
string uColumn_Name = CodeBuild.UFString(column.Name);
string comment = _column_coments.ContainsKey(table.FullName) && _column_coments[table.FullName].ContainsKey(column.Name) ? _column_coments[table.FullName][column.Name] : column.Name;
string prototype_comment = comment == column.Name ? "" : string.Format(@"/// <summary>
/// {0}
/// </summary>
", comment.Replace("\r\n", "\n").Replace("\n", "\r\n /// "));
sb1.AppendFormat(
@" private {0} _{1};
", csType, uColumn_Name);
string tmpinfo = string.Empty;
List<string> tsvarr = new List<string>();
List<ForeignKeyInfo> fks = table.ForeignKeys.FindAll(delegate (ForeignKeyInfo fk) {
int fkc1idx = 0;
string fkcsBy = "By";
string fkcsParms = string.Empty;
string fkcsIfNull = string.Empty;
ColumnInfo fkc = fk.Columns.Find(delegate (ColumnInfo c1) {
fkc1idx++;
fkcsParms += string.Format(GetCSTypeValue(c1.Type), "_" + UFString(c1.Name)) + ", ";
fkcsIfNull += " && _" + UFString(c1.Name) + " != null";
return c1.Name == column.Name;
});
if (fk.ReferencedTable != null) {
fk.ReferencedColumns.ForEach(delegate (ColumnInfo c1) {
fkcsBy += CodeBuild.UFString(c1.Name) + "And";
});
} else {
fk.ReferencedColumnNames.ForEach(delegate (string c1) {
fkcsBy += CodeBuild.UFString(c1) + "And";
});
}
if (fkc == null) return false;
string FK_uClass_Name = fk.ReferencedTable != null ? CodeBuild.UFString(fk.ReferencedTable.ClassName) :
CodeBuild.UFString(TableInfo.GetClassName(fk.ReferencedTableName));
string FK_uClass_Name_full = fk.ReferencedTable != null ? FK_uClass_Name :
string.Format(@"{0}.Model.{1}", solutionName, FK_uClass_Name);
string FK_uEntry_Name = fk.ReferencedTable != null ? CodeBuild.GetCSName(fk.ReferencedTable.Name) :
CodeBuild.GetCSName(TableInfo.GetEntryName(fk.ReferencedTableName));
string tableNamefe3 = fk.ReferencedTable != null ? fk.ReferencedTable.Name : FK_uEntry_Name;
string memberName = fk.Columns[0].Name.IndexOf(tableNamefe3) == -1 ? tableNamefe3 :
(fk.Columns[0].Name.Substring(0, fk.Columns[0].Name.IndexOf(tableNamefe3)) + tableNamefe3);
if (fk.Columns[0].Name.IndexOf(tableNamefe3) == 0 && fk.ReferencedTable != null) memberName = fk.ReferencedTable.ClassName;
tsvarr.Add(string.Format(@"_obj_{0} = null;", memberName));
if (fkc1idx == fk.Columns.Count) {
fkcsBy = fkcsBy.Remove(fkcsBy.Length - 3);
fkcsParms = fkcsParms.Remove(fkcsParms.Length - 2);
if (fk.ReferencedColumns.Count > 0 && fk.ReferencedColumns[0].IsPrimaryKey ||
fk.ReferencedTable == null && fk.ReferencedIsPrimaryKey) {
fkcsBy = string.Empty;
}
sb1.AppendFormat(
@" private {0}Info _obj_{1};
", FK_uClass_Name_full, memberName);
tmpinfo += string.Format(
@" public {0}Info Obj_{1} {{
get {{
if (_obj_{1} == null{6}) _obj_{1} = {2}.BLL.{5}.GetItem{3}({4});
return _obj_{1};
}}
internal set {{ _obj_{1} = value; }}
}}
", FK_uClass_Name_full, memberName, solutionName, fkcsBy, fkcsParms, FK_uClass_Name, fkcsIfNull);
//若不存在 Obj_外键表名,则增加,否则InnerJoin.ToList时会报错 “Obj_外键表名 不存在”
//比如表只有一 creator_person_id 时,需附加成生一个 Obj_person 属性
string fkTableClassName = fk.ReferencedTable.ClassName;
if (memberName == fkTableClassName) {
//如果有 Obj_外键表名 属性,则不增加什么代码
if (innerjoinObjs.ContainsKey(fkTableClassName)) innerjoinObjs.Remove(fkTableClassName);
innerjoinObjs.Add(fkTableClassName, "");
} else {
if (innerjoinObjs.ContainsKey(fkTableClassName)) {
if (!string.IsNullOrEmpty(innerjoinObjs[fkTableClassName]))
//如果有多个相同外键,比如 a_person_id, b_person_id
innerjoinObjs[fkTableClassName] = string.Format(
@"
/// <summary>
/// 配合 InnerJoin .ToList 查询临时使用
/// </summary>
public {0}Info Obj_{1} {{ get; internal set; }}", UFString(fkTableClassName), fkTableClassName, memberName);
} else
//如果只有一个外键,比如 a_person_id
innerjoinObjs.Add(fkTableClassName, string.Format(
@"
/// <summary>
/// 与 Obj_{2} 同引用
/// </summary>
public {0}Info Obj_{1} {{
get {{ return this.Obj_{2}; }}
internal set {{ this.Obj_{2} = value; }}
}}", UFString(fkTableClassName), fkTableClassName, memberName));
}
}
return fkc != null;
});
if (fks.Count > 0) {
string tmpsetvalue = string.Format(
@" {2}[JsonProperty] public {0} {1} {{
get {{ return _{1}; }}
set {{
if (_{1} != value) ", csType, uColumn_Name, prototype_comment);
string tsvstr = string.Join(@"
", tsvarr.ToArray());
if (fks.Count > 1) {
tmpsetvalue += string.Format(@"{{
{0}
}}", tsvstr);
} else {
tmpsetvalue += tsvstr;
}
tmpsetvalue += string.Format(@"
_{0} = value;
}}
}}
", uColumn_Name);
sb2.Append(tmpsetvalue);
sb2.Append(tmpinfo);
} else {
sb2.AppendFormat(
@" {2}[JsonProperty] public {0} {1} {{
get {{ return _{1}; }}
set {{ _{1} = value; }}
}}
", csType, uColumn_Name, prototype_comment);
}
sb3.AppendFormat("{0} {1}, ", csType, uColumn_Name);
sb4.AppendFormat(
@" _{0} = {0};
", uColumn_Name);
sb5.AppendFormat(@"
__jsonIgnore.ContainsKey(""{0}"") ? string.Empty : string.Format("", {0} : {{0}}"", {1}), ", uColumn_Name, CodeBuild.GetToStringFieldConcat(column));
sb10.AppendFormat(@"
if (!__jsonIgnore.ContainsKey(""{0}"")) ht[""{0}""] = {0};", uColumn_Name);
sb7.AppendFormat(@"
{0}, ""|"",", GetToStringStringify(column));
sb8.AppendFormat(@"
if (string.Compare(""null"", ret[{2}]) != 0) item.{0} = {1};",
uColumn_Name, string.Format(CodeBuild.GetStringifyParse(column.Type), "ret[" + column_idx +"]"), column_idx);
}
if (sb2.Length != 0) {
sb2.Remove(sb2.Length - 2, 2);
sb3.Remove(sb3.Length - 2, 2);
sb5.Remove(sb5.Length - 2, 2);
sb7.Remove(sb7.Length - 6, 6);
}
Dictionary<string, string> dic_objs = new Dictionary<string, string>();
// m -> n
_tables.ForEach(delegate (TableInfo t2) {
if (t2.ForeignKeys.Count > 2) {
foreach (TableInfo t3 in _tables) {
if (t3.FullName == t2.FullName) continue;
ForeignKeyInfo fk3 = t3.ForeignKeys.Find(delegate (ForeignKeyInfo ffk3) {
return ffk3.ReferencedTable.FullName == t2.FullName;
});
if (fk3 != null) {
if (fk3.Columns[0].IsPrimaryKey)
if (fk3.Table.PrimaryKeys.Count == 1) return; //如果有外键是主键,并且它不是复合组合,则跳过
}
}
}
ForeignKeyInfo fk_Common = null;
List<ForeignKeyInfo> fks = t2.ForeignKeys.FindAll(delegate (ForeignKeyInfo ffk) {
if (ffk.ReferencedTable.FullName == table.FullName/* &&
ffk.Table.FullName != table.FullName*/) { //注释这行条件为了增加 parent_id 的 obj 对象
fk_Common = ffk;
return true;
}
return false;
});
if (fks.Count == 0) return;
ForeignKeyInfo fk = fks.Count > 1 ? fks.Find(delegate (ForeignKeyInfo ffk) {
return string.Compare(table.Name + "_" + table.PrimaryKeys[0].Name, ffk.Columns[0].Name, true) == 0;
}) : fks[0];
if (fk == null) fk = fks[0];
//if (fk.Table.FullName == table.FullName) return; //注释这行条件为了增加 parent_id 的 obj 对象
List<ForeignKeyInfo> fk2 = t2.ForeignKeys.FindAll(delegate (ForeignKeyInfo ffk2) {
return ffk2.Columns[0].IsPrimaryKey && ffk2 != fk;
});
// 1 -> 1
ForeignKeyInfo fk1v1 = table.ForeignKeys.Find(delegate (ForeignKeyInfo ffk2) {
return ffk2.ReferencedTable.FullName == t2.FullName
&& ffk2.ReferencedColumns[0].IsPrimaryKey && ffk2.Columns[0].IsPrimaryKey; //这行条件为了增加 parent_id 的 obj 对象
});
if (fk1v1 != null) return;
//t2.Columns
string t2name = t2.Name;
string tablename = table.Name;
string addname = t2name;
if (t2name.StartsWith(tablename + "_")) {
addname = t2name.Substring(tablename.Length + 1);
} else if (t2name.EndsWith("_" + tablename)) {
addname = t2name.Remove(addname.Length - tablename.Length - 1);
} else if (fk2.Count == 1 && t2name.EndsWith("_" + tablename)) {
addname = t2name.Remove(t2name.Length - tablename.Length - 1);
} else if (fk2.Count == 1 && t2name.EndsWith("_" + fk2[0].ReferencedTable.Name)) {
addname = t2name;
}
string addname_schema = addname == t2.Name && t2.Owner != table.Owner ? t2.ClassName : addname;
string parms1 = "";
string parmsNoneType1 = "";
string parms1_add = "";
string parmsNoneType1_add = "";
string parms2 = "";
string parmsNoneType2 = "";
string parms2_add = "";
string parmsNoneType2_add = "";
string parms3 = "";
string parmsNoneType3 = "";
string parms4 = "";
string parmsNoneType4 = "";
string parmsNoneType5 = "";
string pkNamesNoneType = "";
string updateDiySet = "";
string add_or_flag = "Add";
int ms = 0;
//若中间表,两外键指向相同表,则选择 表名_主键名 此字段作为主参考字段
string main_column = fk.Columns[0].Name;
var pkName = fk.ReferencedColumns[0].Name;
foreach (ColumnInfo columnInfo in t2.Columns) {
var cstype = GetCSType(columnInfo.Type);
bool is_addignore = columnInfo.IsPrimaryKey && cstype == "Guid?" ||
columnInfo.Name.ToLower() == "update_time" && cstype == "DateTime?" ||
columnInfo.Name.ToLower() == "create_time" && cstype == "DateTime?";
if (string.Compare(columnInfo.Name, main_column, true) == 0) {
parmsNoneType2 += string.Format("\r\n {0} = this.{1}, ", UFString(columnInfo.Name), UFString(pkName));
//if (!is_addignore) parmsNoneType2_add += string.Format("\r\n {0} = this.{1}, ", UFString(columnInfo.Name), UFString(pkName));
parmsNoneType4 += string.Format(GetCSTypeValue(columnInfo.Type), "this." + UFString(pkName)) + ", ";
parmsNoneType5 += string.Format("\r\n item.{0} = this.{1};", UFString(columnInfo.Name), UFString(pkName));
if (columnInfo.IsPrimaryKey) pkNamesNoneType += string.Format(GetCSTypeValue(fk.ReferencedColumns[0].Type), "this." + UFString(pkName)) + ", ";
continue;
}
if (columnInfo.IsPrimaryKey) pkNamesNoneType += string.Format(GetCSTypeValue(columnInfo.Type), UFString(columnInfo.Name)) + ", ";
//UFString(columnInfo.Name) + ", ";
else if (columnInfo.Name.ToLower() == "create_time" && cstype == "DateTime?") ;
else updateDiySet += string.Format("\r\n\t\t\t\t.Set{0}({0})", UFString(columnInfo.Name));
if (columnInfo.IsIdentity) {
//parmsNoneType2 += "0, ";
continue;
}
parms2 += cstype + " " + UFString(columnInfo.Name) + ", ";
parmsNoneType2 += string.Format("\r\n {0} = {0}, ", UFString(columnInfo.Name));
if (!is_addignore) {
parms2_add += cstype + " " + UFString(columnInfo.Name) + ", ";
parmsNoneType2_add += string.Format("\r\n {0} = {0}, ", UFString(columnInfo.Name));
}
ForeignKeyInfo fkk3 = t2.ForeignKeys.Find(delegate (ForeignKeyInfo fkk33) {
return fkk33.Columns[0].Name == columnInfo.Name;
});
if (fkk3 == null) {
parms1 += cstype + " " + UFString(columnInfo.Name) + ", ";
parmsNoneType1 += UFString(columnInfo.Name) + ", ";
if (!is_addignore) {
parms1_add += cstype + " " + UFString(columnInfo.Name) + ", ";
parmsNoneType1_add += UFString(columnInfo.Name) + ", ";
}
} else {
string fkk3_ReferencedTable_ObjName = fkk3.ReferencedTable.Name;
string endStr = "_" + fkk3.ReferencedTable.Name + "_" + fkk3.ReferencedColumns[0].Name;
if (columnInfo.Name.EndsWith(endStr))
fkk3_ReferencedTable_ObjName = columnInfo.Name.Remove(columnInfo.Name.Length - fkk3.ReferencedColumns[0].Name.Length - 1);
fkk3_ReferencedTable_ObjName = UFString(fkk3_ReferencedTable_ObjName);
parms1 += UFString(fkk3.ReferencedTable.ClassName) + "Info " + fkk3_ReferencedTable_ObjName + ", ";
parmsNoneType1 += fkk3_ReferencedTable_ObjName + "." + UFString(fkk3.ReferencedColumns[0].Name) + ", ";
if (!is_addignore) {
parms1_add += UFString(fkk3.ReferencedTable.ClassName) + "Info " + fkk3_ReferencedTable_ObjName + ", ";
parmsNoneType1_add += fkk3_ReferencedTable_ObjName + "." + UFString(fkk3.ReferencedColumns[0].Name) + ", ";
}
if (columnInfo.IsPrimaryKey) {
parms3 += UFString(fkk3.ReferencedTable.ClassName) + "Info " + fkk3_ReferencedTable_ObjName + ", ";
parmsNoneType3 += fkk3_ReferencedTable_ObjName + "." + UFString(fkk3.ReferencedColumns[0].Name) + ", ";
parms4 += cstype + " " + UFString(columnInfo.Name) + ", ";
parmsNoneType4 += string.Format(GetCSTypeValue(columnInfo.Type), UFString(columnInfo.Name)) + ", ";
}
//UFString(columnInfo.Name) + " ?? default(" + columnInfo.CsType.Replace("?", "") + "), ";
if (add_or_flag != "Flag" && fk.Columns[0].IsPrimaryKey) //中间表关系键,必须为主键
t2.Uniques.ForEach(delegate (List<ColumnInfo> cs) {
if (cs.Count < 2) return;
ms = 0;
foreach (ColumnInfo c in cs) {
if (t2.ForeignKeys.Find(delegate (ForeignKeyInfo ffkk2) {
return ffkk2.Columns[0].Name == c.Name;
}) != null) ms++;
}
if (ms == cs.Count) {
add_or_flag = "Flag";
}
});
}
}
if (parms1.Length > 0) parms1 = parms1.Remove(parms1.Length - 2);
if (parmsNoneType1.Length > 0) parmsNoneType1 = parmsNoneType1.Remove(parmsNoneType1.Length - 2);
if (parms1_add.Length > 0) parms1_add = parms1_add.Remove(parms1_add.Length - 2);
if (parmsNoneType1_add.Length > 0) parmsNoneType1_add = parmsNoneType1_add.Remove(parmsNoneType1_add.Length - 2);
if (parms2.Length > 0) parms2 = parms2.Remove(parms2.Length - 2);
if (parmsNoneType2.Length > 0) parmsNoneType2 = parmsNoneType2.Remove(parmsNoneType2.Length - 2);
if (parms2_add.Length > 0) parms2_add = parms2_add.Remove(parms2_add.Length - 2);
if (parmsNoneType2_add.Length > 0) parmsNoneType2_add = parmsNoneType2_add.Remove(parmsNoneType2_add.Length - 2);
if (parms3.Length > 0) parms3 = parms3.Remove(parms3.Length - 2);
if (parmsNoneType3.Length > 0) parmsNoneType3 = parmsNoneType3.Remove(parmsNoneType3.Length - 2);
if (parms4.Length > 0) parms4 = parms4.Remove(parms4.Length - 2);
if (parmsNoneType4.Length > 0) parmsNoneType4 = parmsNoneType4.Remove(parmsNoneType4.Length - 2);
if (pkNamesNoneType.Length > 0) pkNamesNoneType = pkNamesNoneType.Remove(pkNamesNoneType.Length - 2);
if (add_or_flag == "Flag") {
if (parms1 != parms2) {
sb6.AppendFormat(@"
public {0}Info Flag{1}({2}) => Flag{1}({3});", UFString(t2.ClassName), UFString(addname_schema), parms1, parmsNoneType1);
sb16.AppendFormat(@"
async public Task<{0}Info> Flag{1}Async({2}) => await Flag{1}Async({3});", UFString(t2.ClassName), UFString(addname_schema), parms1, parmsNoneType1);
}
sb6.AppendFormat(@"
public {0}Info Flag{1}({2}) {{
{0}Info item = BLL.{0}.GetItem({5});
if (item == null) item = BLL.{0}.Insert(new {0}Info {{{3}}});{6}
return item;
}}
", UFString(t2.ClassName), UFString(addname_schema), parms2, parmsNoneType2.Replace("\t\t\t", "\t\t\t\t"), solutionName, pkNamesNoneType, updateDiySet.Length > 0 ? "\r\n\t\t\telse item.UpdateDiy" + updateDiySet + ".ExecuteNonQuery();" : string.Empty);
sb16.AppendFormat(@"
async public Task<{0}Info> Flag{1}Async({2}) {{
{0}Info item = await BLL.{0}.GetItemAsync({5});
if (item == null) item = await BLL.{0}.InsertAsync(new {0}Info {{{3}}});{6}
return item;
}}
", UFString(t2.ClassName), UFString(addname_schema), parms2, parmsNoneType2.Replace("\t\t\t", "\t\t\t\t"), solutionName, pkNamesNoneType, updateDiySet.Length > 0 ? "\r\n\t\t\telse await item.UpdateDiy" + updateDiySet + ".ExecuteNonQueryAsync();" : string.Empty);
} else {
//sb6.Append(addname + "," + t2.Name);
if (parms1_add != parms2_add) {
sb6.AppendFormat(@"
public {0}Info Add{1}({2}) => Add{1}({3});", UFString(t2.ClassName), UFString(addname_schema), parms1_add, parmsNoneType1_add);
sb16.AppendFormat(@"
async public Task<{0}Info> Add{1}Async({2}) => await Add{1}Async({3});", UFString(t2.ClassName), UFString(addname_schema), parms1_add, parmsNoneType1_add);
}
sb6.AppendFormat(@"
public {0}Info Add{1}({2}) => Add{1}(new {0}Info {{{3}}});
public {0}Info Add{1}({0}Info item) {{{5}
return BLL.{0}.Insert(item);
}}
", UFString(t2.ClassName), UFString(addname_schema), parms2_add, parmsNoneType2_add, solutionName, parmsNoneType5);
sb16.AppendFormat(@"
async public Task<{0}Info> Add{1}Async({2}) => await Add{1}Async(new {0}Info {{{3}}});
async public Task<{0}Info> Add{1}Async({0}Info item) {{{5}
return await BLL.{0}.InsertAsync(item);
}}
", UFString(t2.ClassName), UFString(addname_schema), parms2_add, parmsNoneType2_add, solutionName, parmsNoneType5);
}
if (add_or_flag == "Flag") {
string deleteByUniqui = string.Empty;
for (int deleteByUniqui_a = 0; deleteByUniqui_a < fk.Table.Uniques.Count; deleteByUniqui_a++)
if (fk.Table.Uniques[deleteByUniqui_a].Count > 1 && fk.Table.Uniques[deleteByUniqui_a][0].IsPrimaryKey == false) {
foreach (ColumnInfo deleteByuniquiCol in fk.Table.Uniques[deleteByUniqui_a])
deleteByUniqui = deleteByUniqui + "And" + UFString(deleteByuniquiCol.Name);
deleteByUniqui = "By" + deleteByUniqui.Substring(3);
break;
}
sb6.AppendFormat(@"
public {0}Info Unflag{1}({2}) => Unflag{1}({3});
public {0}Info Unflag{1}({4}) => BLL.{0}.Delete{9}({5});
public List<{0}Info> Unflag{1}ALL() => BLL.{0}.DeleteBy{8}(this.{7});
", UFString(t2.ClassName), UFString(addname_schema), parms3, parmsNoneType3, parms4, parmsNoneType4,
solutionName, string.Format(GetCSTypeValue(fk.ReferencedColumns[0].Type), UFString(pkName)),
UFString(fk.Columns[0].Name), deleteByUniqui);
sb16.AppendFormat(@"
async public Task<{0}Info> Unflag{1}Async({2}) => await Unflag{1}Async({3});
async public Task<{0}Info> Unflag{1}Async({4}) => await BLL.{0}.Delete{9}Async({5});
async public Task<List<{0}Info>> Unflag{1}ALLAsync() => await BLL.{0}.DeleteBy{8}Async(this.{7});
", UFString(t2.ClassName), UFString(addname_schema), parms3, parmsNoneType3, parms4, parmsNoneType4,
solutionName, string.Format(GetCSTypeValue(fk.ReferencedColumns[0].Type), UFString(pkName)),
UFString(fk.Columns[0].Name), deleteByUniqui);
if (ms > 2) {
} else {
string civ = string.Format(GetCSTypeValue(fk.ReferencedColumns[0].Type), "_" + UFString(pkName));
string f5 = t2name;
//if (addname != f5) {
string fk20_ReferencedTable_Name = fk2[0].ReferencedTable.Name;
string fk_ReferencedTable_Name = fk.ReferencedTable.Name;
if (f5.StartsWith(fk20_ReferencedTable_Name + "_"))
f5 = f5.Substring(fk20_ReferencedTable_Name.Length + 1);
else if (f5.EndsWith("_" + fk20_ReferencedTable_Name))
f5 = f5.Remove(f5.Length - fk20_ReferencedTable_Name.Length - 1);
else if (string.Compare(t2name, fk20_ReferencedTable_Name + "_" + fk_ReferencedTable_Name) != 0 &&
string.Compare(t2name, fk_ReferencedTable_Name + "_" + fk20_ReferencedTable_Name) != 0)
f5 = addname_schema;
//}
string objs_value = string.Format(@"
private List<{0}Info> _obj_{1}s;
public List<{0}Info> Obj_{1}s => _obj_{1}s ?? (_obj_{1}s = BLL.{0}.SelectBy{5}_{4}({3}).ToList());", UFString(fk2[0].ReferencedTable.ClassName), addname_schema, solutionName, civ, fk2[0].ReferencedTable.PrimaryKeys[0].Name, UFString(f5));
//如果中间表字段 > 2,那么应该把其中间表也查询出来
if (t2.Columns.Count > 2) {
string _f6 = fk.Columns[0].Name;
string _f7 = fk.ReferencedTable.PrimaryKeys[0].Name;
string _f8 = fk2[0].Columns[0].Name;
string _f9 = GetCSType(fk2[0].ReferencedTable.PrimaryKeys[0].Type).Replace("?", "");
if (fk.ReferencedTable.ClassName == fk2[0].ReferencedTable.ClassName &&
string.Compare(main_column, fk.Columns[0].Name, true) != 0) {
_f6 = fk2[0].Columns[0].Name;
_f7 = fk2[0].ReferencedTable.PrimaryKeys[0].Name;
_f8 = fk.Columns[0].Name;
_f9 = GetCSType(fk2[0].Table.PrimaryKeys[0].Type).Replace("?", "");
}
objs_value = string.Format(@"
public {2}Info Obj_{3} {{ set; get; }}
private List<{0}Info> _obj_{1}s;
/// <summary>
/// 遍历时,可通过 Obj_{3} 可获取中间表数据
/// </summary>
public List<{0}Info> Obj_{1}s => _obj_{1}s ?? (_obj_{1}s = BLL.{0}.Select.InnerJoin<BLL.{2}>(""b"", @""b.[{6}] = a.[{5}]"").Where(@""b.[{4}] = {{0}}"", {7}).ToList());", UFString(fk2[0].ReferencedTable.ClassName), addname_schema, UFString(t2.ClassName), t2.ClassName,
_f6, _f7, _f8, civ.Replace(".Value", ""));
}
string objs_key = string.Format("Obj_{0}s", addname);
if (dic_objs.ContainsKey(objs_key))
dic_objs[objs_key] = objs_value;
else
dic_objs.Add(objs_key, objs_value);
}
} else {
string f2 = fk.Columns[0].Name.CompareTo("parent_id") == 0 ? t2name : fk.Columns[0].Name.Replace(tablename + "_" + table.PrimaryKeys[0].Name, "") + t2name;
if (fk.Columns[0].IsPrimaryKey && fk.Table.PrimaryKeys.Count == 1) { //1对1关系,不应该生成 obj_xxxs
string obj_value = string.Format(@"
private {0}Info _obj_{1};
public {0}Info Obj_{1} {{
get {{ return _obj_{1} ?? (_{4} == null ? null : (_obj_{1} = BLL.{0}.GetItem(_{5}))); }}
internal set {{ _obj_{1} = value; }}
}}", UFString(t2.ClassName), t2.ClassName, solutionName, UFString(fk.Columns[0].Name), UFString(fk.ReferencedColumns[0].Name), string.Format(GetCSTypeValue(fk.ReferencedColumns[0].Type), UFString(fk.ReferencedColumns[0].Name)));
string objs_key = string.Format("Obj_{0}", f2);
if (!dic_objs.ContainsKey(objs_key))
dic_objs.Add(objs_key, obj_value);
} else {
string objs_value = string.Format(@"
private List<{0}Info> _obj_{1}s;
public List<{0}Info> Obj_{1}s => _obj_{1}s ?? (_obj_{1}s = BLL.{0}.SelectBy{3}(_{4}).Limit(500).ToList());", UFString(t2.ClassName), f2, solutionName, UFString(fk.Columns[0].Name), UFString(table.PrimaryKeys[0].Name));
string objs_key = string.Format("Obj_{0}s", f2);
if (!dic_objs.ContainsKey(objs_key))
dic_objs.Add(objs_key, objs_value);
}
}
});
string[] dic_objs_values = new string[dic_objs.Count];
dic_objs.Values.CopyTo(dic_objs_values, 0);
sb9.Append(string.Join("", dic_objs_values));
string[] innerjoinObjs_values = new string[innerjoinObjs.Count];
innerjoinObjs.Values.CopyTo(innerjoinObjs_values, 0);
sb9.Append(string.Join("", innerjoinObjs_values));
string pkupdatediy = "";
if (table.PrimaryKeys.Count > 0) {
string newguid = "";
foreach (ColumnInfo guidpk in table.PrimaryKeys)
if (GetCSType(guidpk.Type) == "Guid?") newguid += string.Format(@"
this.{0} = BLL.SqlHelper.NewMongodbId();", UFString(guidpk.Name));
if (table.Columns.Count > table.PrimaryKeys.Count || !string.IsNullOrEmpty(newguid)) {
ColumnInfo colUpdateTime = table.Columns.Find(delegate (ColumnInfo fcc) { return fcc.Name.ToLower() == "update_time" && GetCSType(fcc.Type) == "DateTime?"; });
ColumnInfo colCreateTime = table.Columns.Find(delegate (ColumnInfo fcc) { return fcc.Name.ToLower() == "create_time" && GetCSType(fcc.Type) == "DateTime?"; });
sb6.Insert(0, string.Format(@"
public {1}Info Save() {{{2}
if (this.{4} != null) {{
if (BLL.{1}.Update(this) == 0) return BLL.{1}.Insert(this);
return this;
}}{5}{3}
return BLL.{1}.Insert(this);
}}", solutionName, uClass_Name, colUpdateTime != null ? @"
this." + UFString(colUpdateTime.Name) + " = DateTime.Now;" : "", colCreateTime != null ? @"
this." + UFString(colCreateTime.Name) + " = DateTime.Now;" : "", pkCsParamNoType.Replace(", ", " != null && this."), newguid));
sb16.Insert(0, string.Format(@"
async public Task<{1}Info> SaveAsync() {{{2}
if (this.{4} != null) {{
if (await BLL.{1}.UpdateAsync(this) == 0) return await BLL.{1}.InsertAsync(this);
return this;
}}{5}{3}
return await BLL.{1}.InsertAsync(this);
}}", solutionName, uClass_Name, colUpdateTime != null ? @"
this." + UFString(colUpdateTime.Name) + " = DateTime.Now;" : "", colCreateTime != null ? @"
this." + UFString(colCreateTime.Name) + " = DateTime.Now;" : "", pkCsParamNoType.Replace(", ", " != null && this."), newguid));
}
string[] pkisnullfields = pkCsParamNoTypeByval.Split(new string[] { ", " }, StringSplitOptions.None);
string pkisnull = "";
foreach (string pkisnullfield in pkisnullfields) {
if (pkisnullfield.EndsWith(".Value")) pkisnull += string.Format("_{0} == null || ", pkisnullfield.Replace(".Value", ""));
}
string pkisnullf3 = "";
if (!string.IsNullOrEmpty(pkisnull)) pkisnullf3 = string.Format("{0} ? null : ", pkisnull.Substring(0, pkisnull.Length - 4));
pkupdatediy = string.Format(@"
public {0}.DAL.{1}.SqlUpdateBuild UpdateDiy => {3}BLL.{1}.UpdateDiy(new List<{1}Info> {{ this }});
", solutionName, uClass_Name, pkCsParamNoTypeByval.Replace(", ", ", _"), pkisnullf3);
}
sb1.AppendFormat(
@" #endregion
public {0}Info() {{ }}", uClass_Name);
sb1.AppendFormat(@"
{1}{2}
#region 序列化,反序列化
protected static readonly string StringifySplit = ""@<{0}(Info]?#>"";
public string Stringify() {{
return string.Concat({7});
}}
public static {0}Info Parse(string stringify) {{
if (string.IsNullOrEmpty(stringify) || stringify == ""null"") return null;
string[] ret = stringify.Split(new char[] {{ '|' }}, {6}, StringSplitOptions.None);
if (ret.Length != {6}) throw new Exception($""格式不正确,{0}Info:{{stringify}}"");
{0}Info item = new {0}Info();{8}
return item;
}}
#endregion
#region override
private static Lazy<Dictionary<string, bool>> __jsonIgnoreLazy = new Lazy<Dictionary<string, bool>>(() => {{
FieldInfo field = typeof({0}Info).GetField(""JsonIgnore"");
Dictionary<string, bool> ret = new Dictionary<string, bool>();
if (field != null) string.Concat(field.GetValue(null)).Split(',').ToList().ForEach(f => {{
if (!string.IsNullOrEmpty(f)) ret[f] = true;
}});
return ret;
}});
private static Dictionary<string, bool> __jsonIgnore => __jsonIgnoreLazy.Value;
public override string ToString() {{
string json = string.Concat({3}, "" }}"");
return string.Concat(""{{"", json.Substring(1));
}}
public IDictionary ToBson(bool allField = false) {{
IDictionary ht = new Hashtable();{10}
return ht;
}}
public object this[string key] {{
get {{ return this.GetType().GetProperty(key).GetValue(this); }}
set {{ this.GetType().GetProperty(key).SetValue(this, value); }}
}}
#endregion
#region properties
{4}{9}
#endregion
{13}
#region sync methods
{5}
#endregion
#region async methods
{11}
#endregion
}}
}}
", uClass_Name, "", "", sb5.ToString(), sb2.ToString(), sb6.ToString(), table.Columns.Count, sb7.ToString(), sb8.ToString(), sb9.ToString(), sb10.ToString(), sb16.ToString(), sb17.ToString(), pkupdatediy);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, solutionName, @".db\Model\", basicName, @"\", uClass_Name, "Info.cs"), Deflate.Compress(Is_System_ComponentModel ? sb1.ToString().Replace("using System.Reflection;", "using System.ComponentModel;\r\nusing System.Reflection;") : sb1.ToString())));
clearSb();
Model_Build_ExtensionMethods_cs.AppendFormat(@"
public static string ToJson(this {0}Info item) => string.Concat(item);
public static string ToJson(this {0}Info[] items) => GetJson(items);
public static string ToJson(this IEnumerable<{0}Info> items) => GetJson(items);
public static IDictionary[] ToBson(this {0}Info[] items, Func<{0}Info, object> func = null) => GetBson(items, func);
public static IDictionary[] ToBson(this IEnumerable<{0}Info> items, Func<{0}Info, object> func = null) => GetBson(items, func);", uClass_Name, solutionName);
if (table.PrimaryKeys.Count > 0)
Model_Build_ExtensionMethods_cs.AppendFormat(@"
public static {1}.DAL.{0}.SqlUpdateBuild UpdateDiy(this List<{0}Info> items) => {1}.BLL.{0}.UpdateDiy(items);", uClass_Name, solutionName);
Model_Build_ExtensionMethods_cs.AppendFormat(@"
");
#endregion
#region DAL *.cs
#region use t-sql
string sqlTable = "declare @table table(";
string sqlFields = "";
string sqlDelete = string.Format("DELETE FROM {0} ", nTable_Name);
string sqlUpdate = string.Format("UPDATE {0} SET ", nTable_Name);
string sqlInsert = string.Format("INSERT INTO {0}(", nTable_Name);
string sqlSelect = string.Format("SELECT <top> \" + GetFields(null) + \"");
string insertField = string.Empty;
string insertParms = string.Empty;
string temp1 = string.Empty;
string temp2 = string.Empty;
string temp3 = string.Empty;
string temp4 = string.Empty;
foreach (ColumnInfo columnInfo in table.Columns) {
if (columnInfo.IsIdentity == false) {
temp1 += string.Format("[{0}] = @{0}, ", columnInfo.Name);
temp2 += string.Format("[{0}], ", columnInfo.Name);
temp3 += string.Format("@{0}, ", columnInfo.Name);
}
temp4 += string.Format("a.[{0}], ", columnInfo.Name);
sqlTable += string.Format("[{0}] {1},", columnInfo.Name, columnInfo.SqlType);
}
temp1 = temp1.Substring(0, temp1.Length - 2);
temp2 = temp2.Substring(0, temp2.Length - 2);
temp3 = temp3.Substring(0, temp3.Length - 2);
temp4 = temp4.Substring(0, temp4.Length - 2);
sqlTable = sqlTable.Substring(0, sqlTable.Length - 1) + ")\\r\\n";
sqlFields = temp4;
sqlDelete += "WHERE ";
sqlUpdate += temp1 + string.Format(" WHERE {0}", pkSqlParam);
sqlInsert += string.Format("{0}) OUTPUT \" + Field.Replace(\"a.\", \"INSERTED.\") + \" INTO @table VALUES({1})\\r\\nselect * from @table", temp2, temp3);
sqlSelect += string.Format(", row_number() over(<order by>) AS rownum FROM {0}", nTable_Name);
insertField = temp2;
insertParms = temp3;
temp1 = "";
temp2 = "";
temp3 = "";
temp4 = "";
sb1.AppendFormat(
@"using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Data;
using System.Data.SqlClient;
using System.Threading.Tasks;
using {0}.Model;
namespace {0}.DAL {{
public partial class {1} : IDAL {{
#region transact-sql define
public string Table {{ get {{ return TSQL.Table; }} }}
public string Field {{ get {{ return TSQL.Field; }} }}
public string Sort {{ get {{ return TSQL.Sort; }} }}
internal class TSQL {{
internal static readonly string Table = ""{3}"";
internal static readonly string Field = ""{5}"";
internal static readonly string Sort = ""{6}"";
internal static readonly string Delete = ""DELETE FROM {3} OUTPUT "" + Field.Replace(@""a.["", @""DELETED.["") + ""WHERE "";
internal static readonly string InsertField = ""{2}"";
internal static readonly string InsertValues = ""{4}"";
internal static readonly string InsertMultiFormat = ""INSERT INTO {3}("" + InsertField + "") OUTPUT "" + Field.Replace(@""a.["", @""INSERTED.["") + "" VALUES{{0}}"";
internal static readonly string Insert = string.Format(InsertMultiFormat, $""({{InsertValues}})"");
}}
#endregion
#region common call
protected static SqlParameter[] GetParameters({1}Info item) {{
return new SqlParameter[] {{
{7}
}};
}}", solutionName, uClass_Name, insertField, nTable_Name, insertParms, sqlFields, orderBy, AppendParameters(table, " "));
sb1.AppendFormat(@"
public {0}Info GetItem(SqlDataReader dr) {{
int dataIndex = -1;
return GetItem(dr, ref dataIndex) as {0}Info;
}}
public object GetItem(SqlDataReader dr, ref int dataIndex) {{
{0}Info item = new {0}Info();", uClass_Name);
int getItemIndex = 0;
foreach (ColumnInfo columnInfo in table.Columns) {
++getItemIndex;
if (columnInfo.Type == SqlDbType.Image ||
columnInfo.Type == SqlDbType.Binary ||
columnInfo.Type == SqlDbType.VarBinary ||
columnInfo.Type == SqlDbType.Timestamp) {
if (sb4.Length == 0) {
sb4.AppendFormat(@"
public byte[] GetBytes(SqlDataReader dr, int dataIndex) {{
if (dr.IsDBNull(dataIndex)) return null;
var ms = new MemoryStream();
byte[] bt = new byte[1048576 * 8];
int size = 0;
while ((size = (int)dr.GetBytes(dataIndex, ms.Position, bt, 0, bt.Length)) > 0) ms.Write(bt, 0, size);
return ms.ToArray();
}}");
}
sb1.AppendFormat(
@"
if (!dr.IsDBNull(++dataIndex)) item.{0} = GetBytes(dr, dataIndex);", UFString(columnInfo.Name));
} else if (CodeBuild.GetDataReaderMethod(columnInfo.Type) == "GetValue") {
sb1.AppendFormat(
@"
if (!dr.IsDBNull(++dataIndex)) item.{0} = {2}dr.{1}(dataIndex);", UFString(columnInfo.Name), CodeBuild.GetDataReaderMethod(columnInfo.Type), CodeBuild.GetDbToCsConvert(columnInfo.Type));
} else {
sb1.AppendFormat(
@"
if (!dr.IsDBNull(++dataIndex)) item.{0} = dr.{1}(dataIndex);", UFString(columnInfo.Name), CodeBuild.GetDataReaderMethod(columnInfo.Type));
}
if (columnInfo.IsPrimaryKey)
sb1.AppendFormat(@" if (item.{0} == null) {{ dataIndex += {1}; return null; }}", UFString(columnInfo.Name), table.Columns.Count - getItemIndex);
}
sb1.AppendFormat(@"
return item;
}}
private void CopyItemAllField({0}Info item, {0}Info newitem) {{{1}
}}", uClass_Name, csItemAllFieldCopy);
sb1.Append(sb4.ToString());
sb1.AppendFormat(@"
#endregion", uClass_Name, table.Columns.Count + 1);
string dal_async_code = string.Format(@"
async public Task<{0}Info> GetItemAsync(SqlDataReader dr) {{
var read = await GetItemAsync(dr, -1);
return read.result as {0}Info;
}}
async public Task<(object result, int dataIndex)> GetItemAsync(SqlDataReader dr, int dataIndex) {{
{0}Info item = new {0}Info();", uClass_Name);
getItemIndex = 0;
foreach (ColumnInfo columnInfo in table.Columns) {
++getItemIndex;
dal_async_code += string.Format(@"
if (!await dr.IsDBNullAsync(++dataIndex)) item.{0} = await dr.GetFieldValueAsync<{1}>(dataIndex);", UFString(columnInfo.Name), GetCSType(columnInfo.Type).Replace("?", ""));
if (columnInfo.IsPrimaryKey)
dal_async_code += string.Format(@" if (item.{0} == null) {{ dataIndex += {1}; return (null, dataIndex); }}", UFString(columnInfo.Name), table.Columns.Count - getItemIndex);
}
dal_async_code += string.Format(@"
return (item, dataIndex);
}}", uClass_Name);
Dictionary<string, bool> del_exists = new Dictionary<string, bool>();
foreach (List<ColumnInfo> cs in table.Uniques) {
string parms = string.Empty;
string parmsBy = "By";
string sqlParms = string.Empty;
string sqlParmsA = string.Empty;
string sqlParmsANoneType = string.Empty;
int sqlParmsAIndex = 0;
foreach (ColumnInfo columnInfo in cs) {
parms += CodeBuild.GetCSType(columnInfo.Type) + " " + CodeBuild.UFString(columnInfo.Name) + ", ";
parmsBy += CodeBuild.UFString(columnInfo.Name) + "And";
sqlParms += "[" + columnInfo.Name + "] = @" + columnInfo.Name + " AND ";
sqlParmsA += "a.[" + columnInfo.Name + "] = {" + sqlParmsAIndex++ + "} AND ";
sqlParmsANoneType += CodeBuild.UFString(columnInfo.Name) + ", ";
}
parms = parms.Substring(0, parms.Length - 2);
parmsBy = parmsBy.Substring(0, parmsBy.Length - 3);
sqlParms = sqlParms.Substring(0, sqlParms.Length - 5);
sqlParmsA = sqlParmsA.Substring(0, sqlParmsA.Length - 5);
sqlParmsANoneType = sqlParmsANoneType.Substring(0, sqlParmsANoneType.Length - 2);
if (del_exists.ContainsKey(parms)) continue;
del_exists.Add(parms, true);
sb2.AppendFormat(@"
public {0}Info Delete{3}({1}) {{
{0}Info item = null;
SqlHelper.ExecuteReader(dr => {{ item = BLL.{0}.dal.GetItem(dr); }}, string.Concat(TSQL.Delete, @""{2}""),
{4});
return item;
}}", uClass_Name, parms, sqlParms, cs[0].IsPrimaryKey ? string.Empty : parmsBy, AppendParameters(cs, " "));
dal_async_code += string.Format(@"
async public Task<{0}Info> Delete{3}Async({1}) {{
{0}Info item = null;
await SqlHelper.ExecuteReaderAsync(async dr => {{ item = await BLL.{0}.dal.GetItemAsync(dr); }}, string.Concat(TSQL.Delete, @""{2}""),
{4});
return item;
}}", uClass_Name, parms, sqlParms, cs[0].IsPrimaryKey ? string.Empty : parmsBy, AppendParameters(cs, " "));
}
table.ForeignKeys.ForEach(delegate (ForeignKeyInfo fkk) {
string parms = string.Empty;
string parmsBy = "By";
string sqlParms = string.Empty;
foreach (ColumnInfo columnInfo in fkk.Columns) {
parms += CodeBuild.GetCSType(columnInfo.Type) + " " + CodeBuild.UFString(columnInfo.Name) + ", ";
parmsBy += CodeBuild.UFString(columnInfo.Name) + "And";
sqlParms += "[" + columnInfo.Name + "] = @" + columnInfo.Name + " AND ";
}
parms = parms.Substring(0, parms.Length - 2);
parmsBy = parmsBy.Substring(0, parmsBy.Length - 3);
sqlParms = sqlParms.Substring(0, sqlParms.Length - 5);
if (del_exists.ContainsKey(parms)) return;
del_exists.Add(parms, true);
sb2.AppendFormat(@"
public List<{0}Info> Delete{3}({1}) {{
var items = new List<{0}Info>();
SqlHelper.ExecuteReader(dr => {{ items.Add(BLL.{0}.dal.GetItem(dr)); }}, string.Concat(TSQL.Delete, @""{2}""),
{4});
return items;
}}", uClass_Name, parms, sqlParms, parmsBy, AppendParameters(fkk.Columns, " "));
dal_async_code += string.Format(@"
async public Task<List<{0}Info>> Delete{3}Async({1}) {{
var items = new List<{0}Info>();
await SqlHelper.ExecuteReaderAsync(async dr => {{ items.Add(await BLL.{0}.dal.GetItemAsync(dr)); }}, string.Concat(TSQL.Delete, @""{2}""),
{4});
return items;
}}", uClass_Name, parms, sqlParms, parmsBy, AppendParameters(fkk.Columns, " "));
});
if (table.PrimaryKeys.Count > 0) {
#region 如果没有主键的处理UpdateBuild
foreach (ColumnInfo col in table.Columns) {
if (col.IsIdentity ||
col.IsPrimaryKey && col.Type == SqlDbType.UniqueIdentifier ||
table.PrimaryKeys.FindIndex(delegate (ColumnInfo pkf) { return pkf.Name == col.Name && pkf.Type == SqlDbType.UniqueIdentifier; }) != -1) continue;
string lname = LFString(col.Name);
string valueParm = CodeBuild.AppendParameters(col, "");
valueParm = valueParm.Remove(valueParm.LastIndexOf(", ") + 2);
sb5.AppendFormat(@"
public SqlUpdateBuild Set{0}({2} value) {{
if (_dataSource != null && _setAs.ContainsKey(""{0}"") == false) _setAs.Add(""{0}"", (olditem, newitem) => olditem.{0} = newitem.{0});
return this.Set(""[{1}]"", string.Concat(""@{1}_"", _parameters.Count),
{3}Value = value }});
}}", CodeBuild.UFString(col.Name), col.Name, CodeBuild.GetCSType(col.Type), valueParm.Replace("\"@" + col.Name + "\"", "string.Concat(\"@" + col.Name + "_\", _parameters.Count)"));
if ((col.Type == SqlDbType.BigInt ||
col.Type == SqlDbType.Decimal ||
col.Type == SqlDbType.Float ||
col.Type == SqlDbType.Int ||
col.Type == SqlDbType.Money ||
col.Type == SqlDbType.Real ||
col.Type == SqlDbType.SmallInt ||
col.Type == SqlDbType.SmallMoney ||
col.Type == SqlDbType.TinyInt) &&
table.ForeignKeys.FindIndex(delegate (ForeignKeyInfo fkf) { return fkf.Columns.FindIndex(delegate (ColumnInfo fkfpkf) { return fkfpkf.Name == col.Name; }) != -1; }) == -1) {
if ((col.Type == SqlDbType.Int || col.Type == SqlDbType.BigInt) && (lname == "status" || lname.StartsWith("status_") || lname.EndsWith("_status"))) {
sb5.AppendFormat(@"
public SqlUpdateBuild Set{0}Flag(int _0_16, bool isUnFlag = false) {{
if (_dataSource != null && _setAs.ContainsKey(""{0}"") == false) _setAs.Add(""{0}"", (olditem, newitem) => olditem.{0} = newitem.{0});
{2} tmp1 = ({2})Math.Pow(2, _0_16);
return this.Set(@""[{1}]"", $@""COALESCE([{1}],0) {{(isUnFlag ? '^' : '|')}} @{1}_{{_parameters.Count}}"",
{3}Value = tmp1 }});
}}
public SqlUpdateBuild Set{0}UnFlag(int _0_16) {{
return this.Set{0}Flag(_0_16, true);
}}", UFString(col.Name), col.Name, CodeBuild.GetCSType(col.Type), valueParm.Replace("\"@" + col.Name + "\"", "string.Concat(\"@" + col.Name + "_\", _parameters.Count)"));
} else {
sb5.AppendFormat(@"
public SqlUpdateBuild Set{0}Increment({2} value) {{
if (_dataSource != null && _setAs.ContainsKey(""{0}"") == false) _setAs.Add(""{0}"", (olditem, newitem) => olditem.{0} = newitem.{0});
return this.Set(""[{1}]"", string.Concat(""[{1}] + @{1}_"", _parameters.Count),
{3}Value = value }});
}}", CodeBuild.UFString(col.Name), col.Name, CodeBuild.GetCSType(col.Type), valueParm.Replace("\"@" + col.Name + "\"", "string.Concat(\"@" + col.Name + "_\", _parameters.Count)"));
}
}
sb6.AppendFormat(@"
if (ignore.ContainsKey(""{1}"") == false) sub.Set{0}(item.{0});", CodeBuild.UFString(col.Name), col.Name);
}
string dal_insert_code = string.Format(@"
public {0}Info Insert({0}Info item) {{
{0}Info newitem = null;
SqlHelper.ExecuteReader(dr => {{ newitem = GetItem(dr); }}, TSQL.Insert, GetParameters(item));
if (newitem == null) return null;
this.CopyItemAllField(item, newitem);
return item;
}}
public List<{0}Info> Insert(IEnumerable<{0}Info> items) {{
var mp = InsertMakeParam(items);
if (string.IsNullOrEmpty(mp.sql)) return new List<{0}Info>();
List<{0}Info> newitems = new List<{0}Info>();
SqlHelper.ExecuteReader(dr => {{ newitems.Add(BLL.{0}.dal.GetItem(dr)); }}, mp.sql, mp.parms);
return newitems;
}}
public (string sql, SqlParameter[] parms) InsertMakeParam(IEnumerable<{0}Info> items) {{
var itemsArr = items?.Where(a => a != null).ToArray();
if (itemsArr == null || itemsArr.Any() == false) return (null, null);
var values = """";
var parms = new SqlParameter[itemsArr.Length * {1}];
for (var a = 0; a < itemsArr.Length; a++) {{
var item = itemsArr[a];
values += $"",({{TSQL.InsertValues.Replace("", "", a + "", "")}}{{a}})"";
var tmparms = GetParameters(item);
for (var b = 0; b < tmparms.Length; b++) {{
tmparms[b].ParameterName += a;
parms[a * {1} + b] = tmparms[b];
}}
}}
return (string.Format(TSQL.InsertMultiFormat, values.Substring(1)), parms);
}}", uClass_Name, insertParms.Split(new[] { ", " }, StringSplitOptions.None).Length);
dal_async_code += string.Format(@"
async public Task<{0}Info> InsertAsync({0}Info item) {{
{0}Info newitem = null;
await SqlHelper.ExecuteReaderAsync(async dr => {{ newitem = await GetItemAsync(dr); }}, TSQL.Insert, GetParameters(item));
if (newitem == null) return null;
this.CopyItemAllField(item, newitem);
return item;
}}
async public Task<List<{0}Info>> InsertAsync(IEnumerable<{0}Info> items) {{
var mp = InsertMakeParam(items);
if (string.IsNullOrEmpty(mp.sql)) return new List<{0}Info>();
List<{0}Info> newitems = new List<{0}Info>();
await SqlHelper.ExecuteReaderAsync(async dr => {{ newitems.Add(await BLL.{0}.dal.GetItemAsync(dr)); }}, mp.sql, mp.parms);
return newitems;
}}", uClass_Name);
string strdalpkwherein = "";
foreach (ColumnInfo dalpkcol001 in table.PrimaryKeys) strdalpkwherein += string.Format(@"
.Where(@""[{0}] IN ({{0}})"", _dataSource.Select(a => a.{1}).Distinct())", dalpkcol001.Name, UFString(dalpkcol001.Name));
if (!string.IsNullOrEmpty(strdalpkwherein)) strdalpkwherein = strdalpkwherein.Substring(strdalpkwherein.IndexOf(" ") + 6);
sb1.AppendFormat(@"
{1}
public SqlUpdateBuild Update({0}Info item, string[] ignoreFields) {{
var sub = new SqlUpdateBuild(new List<{0}Info> {{ item }}, false);
var ignore = ignoreFields?.ToDictionary(a => a, StringComparer.CurrentCultureIgnoreCase) ?? new Dictionary<string, string>();{8}
return sub;
}}
#region class SqlUpdateBuild
public partial class SqlUpdateBuild {{
protected List<{0}Info> _dataSource;
protected bool _isRefershDataSource;
protected Dictionary<string, {0}Info> _itemsDic;
protected string _fields;
protected string _where;
protected List<SqlParameter> _parameters = new List<SqlParameter>();
protected Dictionary<string, Action<{0}Info, {0}Info>> _setAs = new Dictionary<string, Action<{0}Info, {0}Info>>();
public SqlUpdateBuild(List<{0}Info> dataSource, bool isRefershDataSource) {{
_dataSource = dataSource;
_isRefershDataSource = isRefershDataSource;
_itemsDic = _dataSource == null ? null : _dataSource.ToDictionary(a => $""{{a.{12}}}"");
if (_dataSource != null && _dataSource.Any())
this{13};
}}
public SqlUpdateBuild() {{ }}
public override string ToString() {{
if (string.IsNullOrEmpty(_fields)) return string.Empty;
if (string.IsNullOrEmpty(_where)) throw new Exception(""防止 {9}.DAL.{0}.SqlUpdateBuild 误修改,请必须设置 where 条件。"");
return string.Concat(""UPDATE "", TSQL.Table, "" SET "", _fields.Substring(1), "" OUTPUT "", TSQL.Field.Replace(@""a.["", @""INSERTED.[""), "" WHERE "", _where);
}}
public int ExecuteNonQuery() {{
string sql = this.ToString();
if (string.IsNullOrEmpty(sql)) return 0;
if (_dataSource == null || _dataSource.Any() == false || _isRefershDataSource == false) {{
var affrows = SqlHelper.ExecuteNonQuery(sql, _parameters.ToArray());
BLL.{0}.RemoveCache(_dataSource);
return affrows;
}}
var newitems = new List<{0}Info>();
SqlHelper.ExecuteReader(dr => {{ newitems.Add(BLL.{0}.dal.GetItem(dr)); }}, sql, _parameters.ToArray());
BLL.{0}.RemoveCache(_dataSource.Concat(newitems));
foreach (var newitem in newitems) {{
if (_itemsDic.TryGetValue($""{{newitem.{14}}}"", out var olditem)) foreach (var a in _setAs.Values) a(olditem, newitem);
else {{
_dataSource.Add(newitem);
_itemsDic.Add($""{{newitem.{14}}}"", newitem);
}}
}}
return newitems.Count;
}}
async public Task<int> ExecuteNonQueryAsync() {{
string sql = this.ToString();
if (string.IsNullOrEmpty(sql)) return 0;
if (_dataSource == null || _dataSource.Any() == false || _isRefershDataSource == false) {{
var affrows = await SqlHelper.ExecuteNonQueryAsync(sql, _parameters.ToArray());
await BLL.{0}.RemoveCacheAsync(_dataSource);
return affrows;
}}
var newitems = new List<{0}Info>();
await SqlHelper.ExecuteReaderAsync(async dr => {{ newitems.Add(await BLL.{0}.dal.GetItemAsync(dr)); }}, sql, _parameters.ToArray());
await BLL.{0}.RemoveCacheAsync(_dataSource);
foreach (var newitem in newitems) {{
if (_itemsDic.TryGetValue($""{{newitem.{14}}}"", out var olditem)) foreach (var a in _setAs.Values) a(olditem, newitem);
else {{
_dataSource.Add(newitem);
_itemsDic.Add($""{{newitem.{14}}}"", newitem);
}}
}}
return newitems.Count;
}}
public SqlUpdateBuild Where(string filterFormat, params object[] values) {{
if (!string.IsNullOrEmpty(_where)) _where = string.Concat(_where, "" AND "");
_where = string.Concat(_where, ""("", SqlHelper.Addslashes(filterFormat, values), "")"");
return this;
}}
public SqlUpdateBuild WhereExists<T>(SelectBuild<T> select) {{
return this.Where($""EXISTS({{select.ToString(""1"")}})"");
}}
public SqlUpdateBuild WhereNotExists<T>(SelectBuild<T> select) {{
return this.Where($""NOT EXISTS({{select.ToString(""1"")}})"");
}}
public SqlUpdateBuild Set(string field, string value, params SqlParameter[] parms) {{
if (value.IndexOf('\'') != -1) throw new Exception(""{9}.DAL.{0}.SqlUpdateBuild 可能存在注入漏洞,不允许传递 ' 给参数 value,若使用正常字符串,请使用参数化传递。"");
_fields = string.Concat(_fields, "", "", field, "" = "", value);
if (parms != null && parms.Length > 0) _parameters.AddRange(parms);
return this;
}}{6}
}}
#endregion
{10}
{2}
#region async{11}
#endregion
}}
}}", uClass_Name, sb2.ToString(), sb3.ToString(), pkCsParam.Replace("?", ""), pkSqlParamFormat, pkCsParamNoType, sb5.ToString(),
pkCsParamNoTypeByval.Replace(", ", ", item."), sb6.ToString(), solutionName, dal_insert_code, dal_async_code, pkCsParamNoType.Replace(", ", "}_{a."), strdalpkwherein, pkCsParamNoType.Replace(", ", "}_{newitem."));
#endregion
} else {
sb1.AppendFormat(@"
#region async{1}
#endregion
}}
}}", uClass_Name, dal_async_code);
}
#endregion
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, solutionName, @".db\DAL\", basicName, @"\", uClass_Name, ".cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region BLL *.cs
sb1.AppendFormat(
@"using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
using System.Data.SqlClient;
using Microsoft.Extensions.Logging;
using {0}.Model;
namespace {0}.BLL {{
public partial class {1} {{
internal static readonly {0}.DAL.{1} dal = new {0}.DAL.{1}();
internal static readonly int itemCacheTimeout;
static {1}() {{
if (!int.TryParse(SqlHelper.CacheStrategy[""Timeout_{1}""], out itemCacheTimeout))
int.TryParse(SqlHelper.CacheStrategy[""Timeout""], out itemCacheTimeout);
}}", solutionName, uClass_Name);
Dictionary<string, bool> uniques_dic = new Dictionary<string, bool>();
foreach (List<ColumnInfo> cs in table.Uniques) {
string parms = string.Empty;
foreach (ColumnInfo columnInfo in cs) {
parms += GetCSType(columnInfo.Type).Replace("?", "") + " " + UFString(columnInfo.Name) + ", ";
}
parms = parms.Substring(0, parms.Length - 2);
if (uniques_dic.ContainsKey(parms)) continue;
uniques_dic.Add(parms, true);
}
bool is_deleted_column = table.Columns.Find(delegate (ColumnInfo findIsDeleted) {
string getcstype = GetCSType(findIsDeleted.Type);
return findIsDeleted.Name.ToLower() == "is_deleted" && getcstype == "bool?";
}) != null;
string bll_async_code = "";
Dictionary<string, bool> del_exists2 = new Dictionary<string, bool>();
foreach (List<ColumnInfo> cs in table.Uniques) {
string parms = string.Empty;
string parmsNewItem = string.Empty;
string parmsBy = "By";
string parmsNoneType = string.Empty;
string parmsNodeTypeUpdateCacheRemove = string.Empty;
string cacheCond = string.Empty;
string cacheRemoveCode = string.Empty;
string whereCondi = string.Empty;
foreach (ColumnInfo columnInfo in cs) {
parms += GetCSType(columnInfo.Type).Replace("?", "") + " " + UFString(columnInfo.Name) + ", ";
parmsNewItem += UFString(columnInfo.Name) + " = " + UFString(columnInfo.Name) + ", ";
parmsBy += UFString(columnInfo.Name) + "And";
parmsNoneType += UFString(columnInfo.Name) + ", ";
parmsNodeTypeUpdateCacheRemove += "item." + UFString(columnInfo.Name) + ", \"_,_\", ";
cacheCond += UFString(columnInfo.Name) + " == null || ";
whereCondi += string.Format(".Where{0}({1})", UFString(columnInfo.Name),
//GetCSType(columnInfo.Type).Contains("?") && !cs[0].IsPrimaryKey ? string.Concat("new ", GetCSType(columnInfo.Type), "(", UFString(columnInfo.Name), ")") :
UFString(columnInfo.Name));
}
parms = parms.Substring(0, parms.Length - 2);
parmsNewItem = parmsNewItem.Substring(0, parmsNewItem.Length - 2);
parmsBy = parmsBy.Substring(0, parmsBy.Length - 3);
parmsNoneType = parmsNoneType.Substring(0, parmsNoneType.Length - 2);
parmsNodeTypeUpdateCacheRemove = parmsNodeTypeUpdateCacheRemove.Substring(0, parmsNodeTypeUpdateCacheRemove.Length - 9);
cacheCond = cacheCond.Substring(0, cacheCond.Length - 4);
if (del_exists2.ContainsKey(parms)) continue;
del_exists2.Add(parms, true);
sb2.AppendFormat(@"
public static {0}Info Delete{3}({1}) {{
var item = dal.Delete{3}({2});
if (itemCacheTimeout > 0) RemoveCache(item);
return item;
}}", uClass_Name, parms, parmsNoneType, cs[0].IsPrimaryKey ? string.Empty : parmsBy);
/*
if (uniques_dic.Count > 1)
sb2.AppendFormat(@"
public static int Delete{2}({0}) {{
var affrows = dal.Delete{2}({1});
if (itemCacheTimeout > 0) RemoveCache(GetItem{2}({1}));
return affrows;
}}", parms, parmsNoneType, cs[0].IsPrimaryKey ? string.Empty : parmsBy);
else
sb2.AppendFormat(@"
public static int Delete{2}({0}) {{
var affrows = dal.Delete{2}({1});
if (itemCacheTimeout > 0) RemoveCache(new {3}Info {{ {4} }});
return affrows;
}}", parms, parmsNoneType, cs[0].IsPrimaryKey ? string.Empty : parmsBy, uClass_Name, parmsNewItem);
*/
bll_async_code += string.Format(@"
async public static Task<{0}Info> Delete{3}Async({1}) {{
var item = await dal.Delete{3}Async({2});
if (itemCacheTimeout > 0) await RemoveCacheAsync(item);
return item;
}}", uClass_Name, parms, parmsNoneType, cs[0].IsPrimaryKey ? string.Empty : parmsBy);
/*
if (uniques_dic.Count > 1)
bll_async_code += string.Format(@"
async public static Task<int> Delete{2}Async({0}) {{
var affrows = await dal.Delete{2}Async({1});
if (itemCacheTimeout > 0) await RemoveCacheAsync(GetItem{2}({1}));
return affrows;
}}", parms, parmsNoneType, cs[0].IsPrimaryKey ? string.Empty : parmsBy);
else
bll_async_code += string.Format(@"
async public static Task<int> Delete{2}Async({0}) {{
var affrows = await dal.Delete{2}Async({1});
if (itemCacheTimeout > 0) await RemoveCacheAsync(new {3}Info {{ {4} }});
return affrows;
}}", parms, parmsNoneType, cs[0].IsPrimaryKey ? string.Empty : parmsBy, uClass_Name, parmsNewItem);
*/
sb3.AppendFormat(@"
public static {1}Info GetItem{2}({4}) => SqlHelper.CacheShell(string.Concat(""{0}_BLL:{1}{2}:"", {3}), itemCacheTimeout, () => Select{7}.ToOne());", solutionName, uClass_Name, cs[0].IsPrimaryKey ? string.Empty : parmsBy, parmsNodeTypeUpdateCacheRemove.Replace("item.", ""),
parms, parmsNoneType, cacheCond, whereCondi);
bll_async_code += string.Format(@"
async public static Task<{1}Info> GetItem{2}Async({4}) => await SqlHelper.CacheShellAsync(string.Concat(""{0}_BLL:{1}{2}:"", {3}), itemCacheTimeout, () => Select{7}.ToOneAsync());", solutionName, uClass_Name, cs[0].IsPrimaryKey ? string.Empty : parmsBy, parmsNodeTypeUpdateCacheRemove.Replace("item.", ""),
parms, parmsNoneType, cacheCond, whereCondi);
sb4.AppendFormat(@"
keys[keysIdx++] = string.Concat(""{0}_BLL:{1}{2}:"", {3});", solutionName, uClass_Name, cs[0].IsPrimaryKey ? string.Empty : parmsBy, parmsNodeTypeUpdateCacheRemove);
}
if (table.PrimaryKeys.Count > 0) {
#region 如果没有主键的处理
sb2.AppendFormat(@"|deleteby_fk|");
var bllfields_ = "";
foreach (var col33 in table.Columns) {
string comment = _column_coments.ContainsKey(table.FullName) && _column_coments[table.FullName].ContainsKey(col33.Name) ? _column_coments[table.FullName][col33.Name] : col33.Name;
string prototype_comment = comment == col33.Name ? @"
" : string.Format(@"
/// <summary>
/// {0}
/// </summary>
", comment.Replace("\r\n", "\n").Replace("\n", "\r\n /// "));
bllfields_ += prototype_comment + UFString(col33.Name) + (bllfields_.Length == 0 ? " = 1, " : ", ");
}
if (bllfields_.Length > 0) bllfields_ = bllfields_.Substring(0, bllfields_.Length - 2);
sb1.AppendFormat(@"
#region delete, update, insert
{0}
#region enum _
public enum _ {{{1}
}}
#endregion
", sb2.ToString(), bllfields_);
if (uniques_dic.Count > 1)
sb1.AppendFormat(@"
public static int Update({1}Info item, _ ignore1 = 0, _ ignore2 = 0, _ ignore3 = 0) => Update(item, new[] {{ ignore1, ignore2, ignore3 }});
public static int Update({1}Info item, _[] ignore) => dal.Update(item, ignore?.Where(a => a > 0).Select(a => Enum.GetName(typeof(_), a)).ToArray()).ExecuteNonQuery();
public static {0}.DAL.{1}.SqlUpdateBuild UpdateDiy({2}) => new {0}.DAL.{1}.SqlUpdateBuild(new List<{1}Info> {{ itemCacheTimeout > 0 ? new {1}Info {{ {4} }} : GetItem({3}) }}, false);
public static {0}.DAL.{1}.SqlUpdateBuild UpdateDiy(List<{1}Info> dataSource) => new {0}.DAL.{1}.SqlUpdateBuild(dataSource, true);
public static {0}.DAL.{1}.SqlUpdateBuild UpdateDiyDangerous => new {0}.DAL.{1}.SqlUpdateBuild();
", solutionName, uClass_Name, pkCsParam.Replace("?", ""), pkCsParamNoType, pkCsParamNoTypeFieldInit);
else {
var xxxxtempskdf = "";
foreach (var xxxxtempskdfstr in pkCsParamNoType.Split(new string[] { ", " }, StringSplitOptions.None)) {
xxxxtempskdf += xxxxtempskdfstr + " = " + xxxxtempskdfstr + ", ";
}
sb1.AppendFormat(@"
public static int Update({1}Info item, _ ignore1 = 0, _ ignore2 = 0, _ ignore3 = 0) => Update(item, new[] {{ ignore1, ignore2, ignore3 }});
public static int Update({1}Info item, _[] ignore) => dal.Update(item, ignore?.Where(a => a > 0).Select(a => Enum.GetName(typeof(_), a)).ToArray()).ExecuteNonQuery();
public static {0}.DAL.{1}.SqlUpdateBuild UpdateDiy({2}) => new {0}.DAL.{1}.SqlUpdateBuild(new List<{1}Info> {{ new {1}Info {{ {4} }} }}, false);
public static {0}.DAL.{1}.SqlUpdateBuild UpdateDiy(List<{1}Info> dataSource) => new {0}.DAL.{1}.SqlUpdateBuild(dataSource, true);
public static {0}.DAL.{1}.SqlUpdateBuild UpdateDiyDangerous => new {0}.DAL.{1}.SqlUpdateBuild();
", solutionName, uClass_Name, pkCsParam.Replace("?", ""), pkCsParamNoType, xxxxtempskdf.Substring(0, xxxxtempskdf.Length - 2));
}
bll_async_code += string.Format(@"
public static Task<int> UpdateAsync({1}Info item, _ ignore1 = 0, _ ignore2 = 0, _ ignore3 = 0) => UpdateAsync(item, new[] {{ ignore1, ignore2, ignore3 }});
public static Task<int> UpdateAsync({1}Info item, _[] ignore) => dal.Update(item, ignore?.Where(a => a > 0).Select(a => Enum.GetName(typeof(_), a)).ToArray()).ExecuteNonQueryAsync();
", solutionName, uClass_Name);
if (table.Columns.Count > 5)
sb1.AppendFormat(@"
/// <summary>
/// 适用字段较少的表;避规后续改表风险,字段数较大请改用 {0}.Insert({0}Info item)
/// </summary>
[Obsolete]", uClass_Name);
sb1.AppendFormat(@"
public static {0}Info Insert({1}) {{
return Insert(new {0}Info {{{2}}});
}}", uClass_Name, CsParam3, CsParamNoType3);
if (table.Columns.Count > 5)
bll_async_code += string.Format(@"
/// <summary>
/// 适用字段较少的表;避规后续改表风险,字段数较大请改用 {0}.Insert({0}Info item)
/// </summary>
[Obsolete]", uClass_Name);
bll_async_code += string.Format(@"
public static Task<{0}Info> InsertAsync({1}) {{
return InsertAsync(new {0}Info {{{2}}});
}}", uClass_Name, CsParam3, CsParamNoType3);
var redisRemove = sb4.ToString();
string cspk2GuidSetValue = "";
string cspk2GuidSetValuesss = "";
foreach (ColumnInfo cspk2 in table.Columns) {
string getcstype = GetCSType(cspk2.Type);
if (getcstype == "Guid?" && cspk2.IsPrimaryKey) {
cspk2GuidSetValue += string.Format("\r\n if (item.{0} == null) item.{0} = SqlHelper.NewMongodbId();", UFString(cspk2.Name));
cspk2GuidSetValuesss += string.Format("\r\n foreach (var item in items) if (item != null && item.{0} == null) item.{0} = SqlHelper.NewMongodbId();", UFString(cspk2.Name));
}
if (getcstype == "DateTime?" && cspk2.Name.ToLower() == "create_time" ||
getcstype == "DateTime?" && cspk2.Name.ToLower() == "update_time") {
cspk2GuidSetValue += string.Format("\r\n if (item.{0} == null) item.{0} = DateTime.Now;", UFString(cspk2.Name));
cspk2GuidSetValuesss += string.Format("\r\n foreach (var item in items) if (item != null && item.{0} == null) item.{0} = DateTime.Now;", UFString(cspk2.Name));
}
if (getcstype == "bool?" && cspk2.Name.ToLower() == "is_deleted") {
cspk2GuidSetValue += string.Format("\r\n if (item.{0} == null) item.{0} = false;", UFString(cspk2.Name));
cspk2GuidSetValuesss += string.Format("\r\n foreach (var item in items) if (item != null && item.{0} == null) item.{0} = false;", UFString(cspk2.Name));
}
}
sb1.AppendFormat(@"
public static {0}Info Insert({0}Info item) {{{3}
item = dal.Insert(item);
if (itemCacheTimeout > 0) RemoveCache(item);
return item;
}}
public static List<{0}Info> Insert(IEnumerable<{0}Info> items) {{{4}
var newitems = dal.Insert(items);
if (itemCacheTimeout > 0) RemoveCache(newitems);
return newitems;
}}
internal static void RemoveCache({0}Info item) => RemoveCache(item == null ? null : new [] {{ item }});
internal static void RemoveCache(IEnumerable<{0}Info> items) {{
if (itemCacheTimeout <= 0 || items == null || items.Any() == false) return;
var keys = new string[items.Count() * {5}];
var keysIdx = 0;
foreach (var item in items) {{{2}
}}
if (SqlHelper.Instance.CurrentThreadTransaction != null) SqlHelper.Instance.PreRemove(keys);
else SqlHelper.CacheRemove(keys);
}}
#endregion
{1}
", uClass_Name, sb3.ToString(), redisRemove, cspk2GuidSetValue, cspk2GuidSetValuesss, table.Uniques.Count);
bll_async_code += string.Format(@"
async public static Task<{0}Info> InsertAsync({0}Info item) {{{3}
item = await dal.InsertAsync(item);
if (itemCacheTimeout > 0) await RemoveCacheAsync(item);
return item;
}}
async public static Task<List<{0}Info>> InsertAsync(IEnumerable<{0}Info> items) {{{4}
var newitems = await dal.InsertAsync(items);
if (itemCacheTimeout > 0) await RemoveCacheAsync(newitems);
return newitems;
}}
internal static Task RemoveCacheAsync({0}Info item) => RemoveCacheAsync(item == null ? null : new [] {{ item }});
async internal static Task RemoveCacheAsync(IEnumerable<{0}Info> items) {{
if (itemCacheTimeout <= 0 || items == null || items.Any() == false) return;
var keys = new string[items.Count() * {5}];
var keysIdx = 0;
foreach (var item in items) {{{2}
}}
await SqlHelper.CacheRemoveAsync(keys);
}}
", uClass_Name, "", redisRemove, cspk2GuidSetValue, cspk2GuidSetValuesss, table.Uniques.Count);
#endregion
}
sb1.AppendFormat(@"
public static List<{0}Info> GetItems() => Select.ToList();", uClass_Name, solutionName);
if (is_deleted_column)
sb1.AppendFormat(@"
public static SelectBuild SelectRaw => new SelectBuild(dal);
/// <summary>
/// 开启软删除功能,默认查询 is_deleted = false 的数据,查询所有使用 SelectRaw,软删除数据使用 Update is_deleted = true,物理删除数据使用 Delete 方法
/// </summary>
public static SelectBuild Select => SelectRaw.WhereIs_deleted(false);", uClass_Name, solutionName);
else
sb1.AppendFormat(@"
public static SelectBuild Select => new SelectBuild(dal);", uClass_Name, solutionName);
sb1.AppendFormat(@"
public static SelectBuild SelectAs(string alias = ""a"") => Select.As(alias);", uClass_Name, solutionName);
bll_async_code += string.Format(@"
public static Task<List<{0}Info>> GetItemsAsync() => Select.ToListAsync();", uClass_Name, solutionName);
Dictionary<string, bool> byItems = new Dictionary<string, bool>();
foreach (ForeignKeyInfo fk in table.ForeignKeys) {
string fkcsBy = string.Empty;
string fkcsParms = string.Empty;
string fkcsTypeParms = string.Empty;
string fkcsFilter = string.Empty;
int fkcsFilterIdx = 0;
foreach (ColumnInfo c1 in fk.Columns) {
fkcsBy += UFString(c1.Name) + "And";
fkcsParms += UFString(c1.Name) + ", ";
fkcsTypeParms += GetCSType(c1.Type).Replace("?", "") + " " + UFString(c1.Name) + ", ";
fkcsFilter += @"a.[" + c1.Name + @"] = {" + fkcsFilterIdx++ + "} and ";
}
fkcsBy = fkcsBy.Remove(fkcsBy.Length - 3);
fkcsParms = fkcsParms.Remove(fkcsParms.Length - 2);
fkcsTypeParms = fkcsTypeParms.Remove(fkcsTypeParms.Length - 2);
fkcsFilter = fkcsFilter.Remove(fkcsFilter.Length - 5);
if (byItems.ContainsKey(fkcsBy)) continue;
byItems.Add(fkcsBy, true);
if (!del_exists2.ContainsKey(fkcsTypeParms)) {
sb5.AppendFormat(@"
public static List<{0}Info> DeleteBy{3}({1}) {{
var items = dal.DeleteBy{3}({2});
if (itemCacheTimeout > 0) RemoveCache(items);
return items;
}}", uClass_Name, fkcsTypeParms, fkcsParms, fkcsBy);
bll_async_code = string.Format(@"
async public static Task<List<{0}Info>> DeleteBy{3}Async({1}) {{
var items = await dal.DeleteBy{3}Async({2});
if (itemCacheTimeout > 0) await RemoveCacheAsync(items);
return items;
}}", uClass_Name, fkcsTypeParms, fkcsParms, fkcsBy) + bll_async_code;
del_exists2.Add(fkcsTypeParms, true);
}
if (fk.Columns.Count > 1) {
sb1.AppendFormat(
@"
public static List<{0}Info> GetItemsBy{1}({2}) => Select.Where{1}({3}).ToList();
public static List<{0}Info> GetItemsBy{1}({2}, int limit) => Select.Where{1}({3}).Limit(limit).ToList();
public static SelectBuild SelectBy{1}({2}) => Select.Where{1}({3});", uClass_Name, fkcsBy, fkcsTypeParms, fkcsParms);
bll_async_code += string.Format(
@"
public static Task<List<{0}Info>> GetItemsBy{1}Async({2}) => Select.Where{1}({3}).ToListAsync();
public static Task<List<{0}Info>> GetItemsBy{1}Async({2}, int limit) => Select.Where{1}({3}).Limit(limit).ToListAsync();", uClass_Name, fkcsBy, fkcsTypeParms, fkcsParms);
sb6.AppendFormat(@"
public SelectBuild Where{1}({2}) => base.Where(@""{4}"", {3});", uClass_Name, fkcsBy, fkcsTypeParms, fkcsParms, fkcsFilter, solutionName);
} else if (fk.Columns.Count == 1/* && fk.Columns[0].IsPrimaryKey == false*/) {
string csType = GetCSType(fk.Columns[0].Type);
sb1.AppendFormat(
@"
public static List<{0}Info> GetItemsBy{1}(params {2}[] {1}) => Select.Where{1}({1}).ToList();
public static List<{0}Info> GetItemsBy{1}({2}[] {1}, int limit) => Select.Where{1}({1}).Limit(limit).ToList();
public static SelectBuild SelectBy{1}(params {2}[] {1}) => Select.Where{1}({1});", uClass_Name, fkcsBy, csType);
bll_async_code += string.Format(
@"
public static Task<List<{0}Info>> GetItemsBy{1}Async(params {2}[] {1}) => Select.Where{1}({1}).ToListAsync();
public static Task<List<{0}Info>> GetItemsBy{1}Async({2}[] {1}, int limit) => Select.Where{1}({1}).Limit(limit).ToListAsync();", uClass_Name, fkcsBy, csType);
sb6.AppendFormat(@"
public SelectBuild Where{1}(params {2}[] {1}) => this.Where1Or(@""a.[{3}] = {{0}}"", {1});
public SelectBuild Where{1}({4}.SelectBuild select, bool isNotIn = false) => this.Where($@""a.[{3}] {{(isNotIn ? ""NOT IN"" : ""IN"")}} ({{select.ToString(@""[{5}]"")}})"");", uClass_Name, fkcsBy, csType, fk.Columns[0].Name, UFString(fk.ReferencedTable.ClassName), fk.ReferencedColumns[0].Name);
}
}
// m -> n
_tables.ForEach(delegate (TableInfo t2) {
List<ForeignKeyInfo> fks = t2.ForeignKeys.FindAll(delegate (ForeignKeyInfo ffk) {
if (ffk.ReferencedTable.FullName == table.FullName) {
return true;
}
return false;
});
if (fks.Count == 0) return;
ForeignKeyInfo fk = fks.Count > 1 ? fks.Find(delegate (ForeignKeyInfo ffk) {
return string.Compare(table.Name + "_" + table.PrimaryKeys[0].Name, ffk.Columns[0].Name, true) == 0;
}) : fks[0];
if (fk == null) fk = fks[0];
//if (fk.Table.FullName == table.FullName) return;
List<ForeignKeyInfo> fk2 = t2.ForeignKeys.FindAll(delegate (ForeignKeyInfo ffk2) {
return ffk2.Columns[0].IsPrimaryKey && ffk2 != fk;
});
if (fk2.Count != 1) return;
if (fk.Columns[0].IsPrimaryKey == false) return; //中间表关系键,必须为主键
//t2.Columns
string t2name = t2.Name;
string tablename = table.Name;
string addname = t2name;
if (t2name.StartsWith(tablename + "_")) {
addname = t2name.Substring(tablename.Length + 1);
} else if (t2name.EndsWith("_" + tablename)) {
addname = t2name.Remove(addname.Length - tablename.Length - 1);
} else if (fk2.Count == 1 && t2name.EndsWith("_" + tablename)) {
addname = t2name.Remove(t2name.Length - tablename.Length - 1);
} else if (fk2.Count == 1 && t2name.EndsWith("_" + fk2[0].ReferencedTable.Name)) {
addname = t2name;
}
string addname_schema = addname == t2.Name && t2.Owner != table.Owner ? t2.ClassName : addname;
string orgInfo = UFString(fk2[0].ReferencedTable.ClassName);
string fkcsBy = UFString(addname_schema);
if (byItems.ContainsKey(fkcsBy)) return;
byItems.Add(fkcsBy, true);
string civ = string.Format(GetCSTypeValue(fk2[0].ReferencedTable.PrimaryKeys[0].Type), UFString(fk2[0].ReferencedTable.PrimaryKeys[0].Name));
sb1.AppendFormat(@"
public static SelectBuild SelectBy{1}(params {2}Info[] {5}s) => Select.Where{1}({5}s);
public static SelectBuild SelectBy{1}_{4}(params {3}[] {5}_ids) => Select.Where{1}_{4}({5}_ids);", uClass_Name, fkcsBy, orgInfo, GetCSType(fk2[0].ReferencedTable.PrimaryKeys[0].Type).Replace("?", ""), table.PrimaryKeys[0].Name, LFString(orgInfo));
string _f6 = fk.Columns[0].Name;
string _f7 = fk.ReferencedTable.PrimaryKeys[0].Name;
string _f8 = fk2[0].Columns[0].Name;
string _f9 = GetCSType(fk2[0].ReferencedTable.PrimaryKeys[0].Type).Replace("?", "");
//若中间表,两外键指向相同表,则选择 表名_主键名 此字段作为主参考字段
string main_column = fk.Columns[0].Name;
if (fk.ReferencedTable.ClassName == fk2[0].ReferencedTable.ClassName &&
string.Compare(main_column, fk.Columns[0].Name, true) == 0) {
_f6 = fk2[0].Columns[0].Name;
_f7 = fk2[0].ReferencedTable.PrimaryKeys[0].Name;
_f8 = fk.Columns[0].Name;
_f9 = GetCSType(fk2[0].Table.PrimaryKeys[0].Type).Replace("?", "");
}
sb6.AppendFormat(@"
public SelectBuild Where{1}(params {2}Info[] {10}s) => Where{1}({10}s?.ToArray(), null);
public SelectBuild Where{1}_{7}(params {9}[] {10}_ids) => Where{1}_{7}({10}_ids?.ToArray(), null);
public SelectBuild Where{1}({2}Info[] {10}s, Action<{5}.SelectBuild> subCondition) => Where{1}_{7}({10}s?.Where<{2}Info>(a => a != null).Select<{2}Info, {9}>(a => a.{3}).ToArray(), subCondition);
public SelectBuild Where{1}_{7}({9}[] {10}_ids, Action<{5}.SelectBuild> subCondition) {{
if ({10}_ids == null || {10}_ids.Length == 0) return this;
{5}.SelectBuild subConditionSelect = {5}.Select.Where(string.Format(@""[{6}] = a . [{7}] AND [{8}] IN ('{{0}}')"", string.Join(""','"", {10}_ids.Select(a => string.Concat(a).Replace(""'"", ""''"")))));
subCondition?.Invoke(subConditionSelect);
var subConditionSql = subConditionSelect.ToString(@""[{6}]"").Replace(""] a \r\nWHERE ("", ""] WHERE ("");
if (subCondition != null) subConditionSql = subConditionSql.Replace(""a.["", ""[{11}].[{12}].["");
return base.Where($""EXISTS({{subConditionSql}})"");
}}", uClass_Name, fkcsBy, orgInfo, civ, string.Empty, UFString(t2.ClassName), _f6, _f7, _f8, _f9, LFString(orgInfo), t2.Owner, t2.Name);
});
table.Columns.ForEach(delegate (ColumnInfo col) {
string csType = GetCSType(col.Type);
string lname = col.Name.ToLower();
//if (col.IsPrimaryKey) return;
//if (lname == "create_time" ||
// lname == "update_time") return;
string fkcsBy = UFString(col.Name);
if (byItems.ContainsKey(fkcsBy)) return;
byItems.Add(fkcsBy, true);
string comment = _column_coments.ContainsKey(table.FullName) && _column_coments[table.FullName].ContainsKey(col.Name) ? _column_coments[table.FullName][col.Name] : col.Name;
string prototype_comment = comment == col.Name ? "" : string.Format(@"/// <summary>
/// {0},多个参数等于 OR 查询
/// </summary>
", comment.Replace("\r\n", "\n").Replace("\n", "\r\n /// "));
if (csType == "bool?" || csType == "Guid?") {
sb6.AppendFormat(@"
{4}public SelectBuild Where{1}(params {2}[] {1}) => this.Where1Or(@""a.[{3}] = {{0}}"", {1});", uClass_Name, fkcsBy, col.IsPrimaryKey ? csType.Replace("?", "") : csType, col.Name, prototype_comment);
return;
}
if (col.Type == SqlDbType.Int || col.Type == SqlDbType.BigInt || col.Type == SqlDbType.SmallInt || col.Type == SqlDbType.TinyInt ||
col.Type == SqlDbType.Real || col.Type == SqlDbType.Float || col.Type == SqlDbType.Decimal || col.Type == SqlDbType.Money) {
sb6.AppendFormat(@"
{4}public SelectBuild Where{1}(params {2}[] {1}) => this.Where1Or(@""a.[{3}] = {{0}}"", {1});", uClass_Name, fkcsBy, col.IsPrimaryKey ? csType.Replace("?", "") : csType, col.Name, prototype_comment);
sb6.AppendFormat(@"
public SelectBuild Where{1}Range({2} begin) => base.Where(@""a.[{3}] >= {{0}}"", begin);
public SelectBuild Where{1}Range({2} begin, {2} end) => end == null ? this.Where{1}Range(begin) : base.Where(@""a.[{3}] between {{0}} and {{1}}"", begin, end);", uClass_Name, fkcsBy, csType, col.Name);
return;
}
if (col.Type == SqlDbType.Date || col.Type == SqlDbType.DateTime || col.Type == SqlDbType.DateTime2 ||
col.Type == SqlDbType.DateTimeOffset || col.Type == SqlDbType.SmallDateTime || col.Type == SqlDbType.Time) {
if (col.IsPrimaryKey)
sb6.AppendFormat(@"
{4}public SelectBuild Where{1}({2} {1}) => base.Where(@""a.[{3}] = {{0}}"", {1});", uClass_Name, fkcsBy, csType, col.Name, prototype_comment);
sb6.AppendFormat(@"
public SelectBuild Where{1}Range({2} begin) => base.Where(@""a.[{3}] >= {{0}}"", begin);
public SelectBuild Where{1}Range({2} begin, {2} end) => end == null ? this.Where{1}Range(begin) : base.Where(@""a.[{3}] between {{0}} and {{1}}"", begin, end);", uClass_Name, fkcsBy, csType, col.Name);
return;
}
if ((col.Type == SqlDbType.Int || col.Type == SqlDbType.BigInt) && (lname == "status" || lname.StartsWith("status_") || lname.EndsWith("_status"))) {
sb6.AppendFormat(@"
public SelectBuild Where{1}(params int[] _0_16) {{
if (_0_16 == null || _0_16.Length == 0) return this;
{2}[] copy = new {2}[_0_16.Length];
for (int a = 0; a < _0_16.Length; a++) copy[a] = ({2})Math.Pow(2, _0_16[a]);
return this.Where1Or(@""(a.[{3}] & {{0}}) = {{0}}"", copy);
}}", uClass_Name, fkcsBy, csType.Replace("?", ""), col.Name);
return;
}
if (csType == "string") {
if (col.Length > 0 && col.Length < 301)
sb6.AppendFormat(@"
{4}public SelectBuild Where{1}(params {2}[] {1}) => this.Where1Or(@""a.[{3}] = {{0}}"", {1});", uClass_Name, fkcsBy, csType, col.Name, prototype_comment);
sb6.AppendFormat(@"
public SelectBuild Where{1}Like(string pattern, bool isNotLike = false) => this.Where($@""a.[{3}] {{(isNotLike ? ""NOT LIKE"" : ""LIKE"")}} {{{{0}}}}"", pattern);", uClass_Name, fkcsBy, csType, col.Name);
return;
}
});
sb1.AppendFormat(@"
#region async{3}
#endregion
public partial class SelectBuild : SelectBuild<{0}Info, SelectBuild> {{{2}
public SelectBuild(IDAL dal) : base(dal, SqlHelper.Instance) {{ }}
}}
}}
}}", uClass_Name, solutionName, sb6.ToString(), bll_async_code);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, solutionName, @".db\BLL\", basicName, @"\", uClass_Name, ".cs"), Deflate.Compress(sb1.ToString().Replace("|deleteby_fk|", sb5.ToString()))));
clearSb();
#endregion
if (table.PrimaryKeys.Count == 0) continue;
#region admin
if (isMakeAdmin) {
#region common define
string pkNames = string.Empty;
string pkUrlQuerys = string.Empty;
string pkHiddens = string.Empty;
for (int a = 0; a < table.PrimaryKeys.Count; a++) {
ColumnInfo col88 = table.PrimaryKeys[a];
pkNames += UFString(col88.Name) + ",";
pkUrlQuerys += string.Format(@"{0}=@item.{0}&", UFString(col88.Name));
pkHiddens += string.Format(@"@item.{0},", UFString(col88.Name));
}
if (pkNames.Length > 0) pkNames = pkNames.Remove(pkNames.Length - 1);
if (pkUrlQuerys.Length > 0) pkUrlQuerys = pkUrlQuerys.Remove(pkUrlQuerys.Length - 1);
if (pkHiddens.Length > 0) pkHiddens = pkHiddens.Remove(pkHiddens.Length - 1);
ForeignKeyInfo ttfk = table.ForeignKeys.Find(delegate (ForeignKeyInfo fkk) {
return fkk.ReferencedTable != null && fkk.ReferencedTable.FullName == table.FullName;
});
#endregion
#region wwwroot_sitemap
wwwroot_sitemap += string.Format(@"
<li><a href=""/{0}/""><i class=""fa fa-circle-o""></i>{0}</a></li>", uClass_Name);
#endregion
#region init_sysdir
admin_controllers_syscontroller_init_sysdir.Add(string.Format(@"
dir2 = Sysdir.Insert(dir1.Id, DateTime.Now, ""{0}"", {1}, ""/{0}/"");
dir3 = Sysdir.Insert(dir2.Id, DateTime.Now, ""列表"", 1, ""/{0}/"");
dir3 = Sysdir.Insert(dir2.Id, DateTime.Now, ""添加"", 2, ""/{0}/add"");
dir3 = Sysdir.Insert(dir2.Id, DateTime.Now, ""编辑"", 3, ""/{0}/edit"");
dir3 = Sysdir.Insert(dir2.Id, DateTime.Now, ""删除"", 4, ""/{0}/del"");", nClass_Name, admin_controllers_syscontroller_init_sysdir.Count + 1));
#endregion
#region Controller.cs
string str_listTh = "";
string str_listTd = "";
string str_listTh1 = "";
string str_listTd1 = "";
string str_controller_list_join = "";
byte str_controller_list_join_alias = 97;
string str_listCms2FilterFK = "";
string str_listCms2FilterFK_fkitems = "";
string keyLikes = string.Empty;
string getListParamQuery = "";
bool ttfk_flag = false;
string str_addhtml_mn = "";
string str_controller_insert_mn = "";
string str_controller_update_mn = "";
string str_fk_getlist = "";
string str_addjs_mn_initUI = "";
foreach (ColumnInfo col in table.Columns) {
List<ColumnInfo> us = table.Uniques.Find(delegate (List<ColumnInfo> cs) {
return cs.Find(delegate (ColumnInfo col88) {
return col88.Name == col.Name;
}) != null;
});
if (us == null) us = new List<ColumnInfo>();
List<ForeignKeyInfo> fks_comb = table.ForeignKeys.FindAll(delegate (ForeignKeyInfo fk2) {
return fk2.Columns.Count == 1 && fk2.Columns[0].Name == col.Name;
});
string csType = GetCSType(col.Type);
string csUName = UFString(col.Name);
string comment = _column_coments.ContainsKey(table.FullName) && _column_coments[table.FullName].ContainsKey(col.Name) ? _column_coments[table.FullName][col.Name] : col.Name;
if (csType == "string") {
keyLikes += "a." + col.Name + " ilike {0} or ";
}
List<ForeignKeyInfo> fks = table.ForeignKeys.FindAll(delegate (ForeignKeyInfo fk88) {
return fk88.Columns.Find(delegate (ColumnInfo col88) {
return col88.Name == col.Name;
}) != null;
});
//外键
ForeignKeyInfo fk = null;
string FK_uEntry_Name = string.Empty;
string tableNamefe3 = string.Empty;
string memberName = string.Empty;
string strName = string.Empty;
if (fks.Count > 0) {
fk = fks[0];
FK_uEntry_Name = fk.ReferencedTable != null ? GetCSName(fk.ReferencedTable.Name) :
GetCSName(TableInfo.GetEntryName(fk.ReferencedTableName));
tableNamefe3 = fk.ReferencedTable != null ? fk.ReferencedTable.Name : FK_uEntry_Name;
memberName = fk.Columns[0].Name.IndexOf(tableNamefe3) == -1 ? tableNamefe3 :
(fk.Columns[0].Name.Substring(0, fk.Columns[0].Name.IndexOf(tableNamefe3)) + tableNamefe3);
if (fk.Columns[0].Name.IndexOf(tableNamefe3) == 0 && fk.ReferencedTable != null) memberName = fk.ReferencedTable.ClassName;
ColumnInfo strNameCol = null;
if (fk.ReferencedTable != null) {
strNameCol = fk.ReferencedTable.Columns.Find(delegate (ColumnInfo col88) {
return col88.Name.ToLower().IndexOf("name") != -1 || col88.Name.ToLower().IndexOf("title") != -1;
});
if (strNameCol == null) strNameCol = fk.ReferencedTable.Columns.Find(delegate (ColumnInfo col88) {
return GetCSType(col88.Type) == "string" && col88.Length > 0 && col88.Length < 300;
});
}
strName = strNameCol != null ? "." + UFString(strNameCol.Name) : string.Empty;
}
string Obj_name = string.Concat("Obj_", memberName, strName);
if (!col.IsIdentity && fks.Count == 1) {
ForeignKeyInfo fkcb = fks[0];
string FK_uClass_Name = fkcb.ReferencedTable != null ? UFString(fkcb.ReferencedTable.ClassName) :
UFString(TableInfo.GetClassName(fkcb.ReferencedTableName));
getListParamQuery += string.Format(@"[FromQuery] {0}[] {1}, ", csType, csUName);
sb3.AppendFormat(@"
if ({0}.Length > 0) select.Where{0}({0});", csUName);
} else if (!col.IsIdentity && us.Count == 1 || col.IsPrimaryKey && table.PrimaryKeys.Count == 1) {
//主键或唯一键,非自动增值
}
//前端js或者模板
if (!col.IsIdentity && fks.Count == 1 && fks[0].Table.FullName != fks[0].ReferencedTable.FullName) {
str_listTh += string.Format(@"<th scope=""col"">{0}</th>
", comment);
str_listTd += string.Format(@"<td>[@item.{0}] @item.Obj_{1}{2}</td>
", csUName, memberName, string.IsNullOrEmpty(strName) ? "" : ("?" + strName));
// str_controller_list_join += string.Format(@"
//.LeftJoin<{0}>(""{3}"", ""{3}.{1} = a.{2}"")", UFString(fks[0].ReferencedTable.ClassName), fks[0].ReferencedColumns[0].Name, fks[0].Columns[0].Name, (char)++str_controller_list_join_alias);
str_controller_list_join += string.Format(@"
.LeftJoin(a => a.Obj_{0}.{1} == a.{2})", fks[0].ReferencedTable.ClassName, UFString(fks[0].ReferencedColumns[0].Name), UFString(fks[0].Columns[0].Name), (char)++str_controller_list_join_alias);
if (str_listCms2FilterFK_fkitems.Contains(" var fk_" + LFString(fks[0].ReferencedTable.ClassName) + "s = ") == false)
str_listCms2FilterFK_fkitems += string.Format(@"
var fk_{1}s = {2}{0}.Select.ToList();", UFString(fks[0].ReferencedTable.ClassName), LFString(fks[0].ReferencedTable.ClassName), UFString(fks[0].ReferencedTable.ClassName) == "User" ? solutionName + ".BLL." : "");
str_listCms2FilterFK += string.Format(@"
{{ name: '{0}', field: '{4}', text: @Html.Json(fk_{1}s.Select(a => a.{2})), value: @Html.Json(fk_{1}s.Select(a => a.{3})) }},",
UFString(fks[0].ReferencedTable.ClassName), LFString(fks[0].ReferencedTable.ClassName),
string.IsNullOrEmpty(strName) ? "ToString()" : strName.TrimStart('.'), UFString(fks[0].ReferencedColumns[0].Name), UFString(fks[0].Columns[0].Name));
} else if (csType == "string" && !ttfk_flag) {
ttfk_flag = true;
string t1 = string.Format(@"<th scope=""col"">{0}</th>
", comment);
string t2 = string.Format(@"<td>@item.{0}</td>
", csUName);
str_listTh1 += t1;
str_listTd1 += t2;
if (ttfk == null || ttfk.Columns[0].Name.ToLower() != "parent_id") {
str_listTh += t1;
str_listTd += t2;
}
} else {
str_listTh += string.Format(@"<th scope=""col"">{0}</th>
", comment);
str_listTd += string.Format(@"<td>@item.{0}</td>
", csUName);
}
}
if (keyLikes.Length > 0) {
keyLikes = keyLikes.Remove(keyLikes.Length - 4);
getListParamQuery = "[FromQuery] string key, " + getListParamQuery;
sb2.AppendFormat(@"
.Where(!string.IsNullOrEmpty(key), ""{0}"", string.Concat(""%"", key, ""%""))", keyLikes);
}
string itemSetValuePK = "";
string itemSetValuePKInsert = "";
string itemSetValueNotPK = "";
string itemCsParamInsertForm = "";
string itemCsParamUpdateForm = "";
table.Columns.ForEach(delegate (ColumnInfo col88) {
string csLName = LFString(col88.Name);
string csUName = UFString(col88.Name);
string csType = GetCSType(col88.Type);
if (col88.IsPrimaryKey) {
// itemSetValuePK += string.Format(@"
//item.{0} = {0};", csUName);
if (col88.IsIdentity) ;
else if (csType == "Guid?") {
itemSetValuePKInsert += string.Format(@"
item.{0} = BLL.SqlHelper.NewMongodbId();", csUName);
} else {
itemSetValuePKInsert += string.Format(@"
item.{0} = {0};", csUName);
itemCsParamInsertForm += string.Format(", [FromForm] {0} {1}", csType, csUName);
}
} else if (col88.IsIdentity) {
} else if ((csLName == "img" || csLName.StartsWith("img_") || csLName.EndsWith("_img") ||
csLName == "path" || csLName.StartsWith("path_") || csLName.EndsWith("_path")) && (col88.Type == SqlDbType.VarChar || col88.Type == SqlDbType.Char)) {
//图片字段
itemCsParamInsertForm += string.Format(", [FromForm] {0} {1}, [FromForm] IFormFile {1}_file", csType, csUName);
itemCsParamUpdateForm += string.Format(", [FromForm] {0} {1}, [FromForm] IFormFile {1}_file", csType, csUName);
itemSetValuePKInsert += string.Format(@"
if ({1}_file != null) {{
item.{1} = $""/upload/{{Guid.NewGuid().ToString()}}.png"";
using (FileStream fs = new FileStream(System.IO.Path.Combine(AppContext.BaseDirectory, item.{1}), FileMode.Create)) {1}_file.CopyTo(fs);
}} else
item.{1} = {1};", "", csUName);
itemSetValuePK += string.Format(@"
if (!string.IsNullOrEmpty(item.{1}) && (item.{1} != {1} || {1}_file != null)) {{
string path = System.IO.Path.Combine(AppContext.BaseDirectory, item.{1});
if (System.IO.File.Exists(path)) System.IO.File.Delete(path);
}}
if ({1}_file != null) {{
item.{1} = $""/upload/{{Guid.NewGuid().ToString()}}.png"";
using (FileStream fs = new FileStream(System.IO.Path.Combine(AppContext.BaseDirectory, item.{1}), FileMode.Create)) {1}_file.CopyTo(fs);
}} else
item.{1} = {1};", "", csUName);
} else {
string colvalue = "";
if (csType == "DateTime?" && (
string.Compare(csLName, "create_time", true) == 0 ||
string.Compare(csLName, "update_time", true) == 0
)) {
colvalue = "DateTime.Now";
} else {
string csType2 = csType;
if (csType2 == "bool?") csType2 = "bool";
itemCsParamInsertForm += string.Format(", [FromForm] {0} {1}", csType2, csUName);
itemCsParamUpdateForm += string.Format(", [FromForm] {0} {1}", csType2, csUName);
colvalue = csUName;
}
itemSetValueNotPK += string.Format(@"
item.{0} = {1};", csUName, colvalue);
}
});
if (itemCsParamInsertForm.Length > 0) itemCsParamInsertForm = itemCsParamInsertForm.Substring(2);
// m -> n
_tables.ForEach(delegate (TableInfo t2) {
ForeignKeyInfo fk = t2.ForeignKeys.Find(delegate (ForeignKeyInfo ffk) {
if (ffk.ReferencedTable.FullName == table.FullName) {
return true;
}
return false;
});
if (fk == null) return;
if (fk.Table.FullName == table.FullName) return;
List<ForeignKeyInfo> fk2 = t2.ForeignKeys.FindAll(delegate (ForeignKeyInfo ffk2) {
return ffk2 != fk;
});
if (fk2.Count != 1) return;
if (fk.Columns[0].IsPrimaryKey == false) return; //中间表关系键,必须为主键
if (t2.Columns.Count != 2) return; //mn表若不是两个字段,则不处理
//t2.Columns
string t2name = t2.Name;
string tablename = table.Name;
string addname = t2name;
if (t2name.StartsWith(tablename + "_")) {
addname = t2name.Substring(tablename.Length + 1);
} else if (t2name.EndsWith("_" + tablename)) {
addname = t2name.Remove(addname.Length - tablename.Length - 1);
} else if (fk2.Count == 1 && t2name.EndsWith("_" + tablename)) {
addname = t2name.Remove(t2name.Length - tablename.Length - 1);
} else if (fk2.Count == 1 && t2name.EndsWith("_" + fk2[0].ReferencedTable.Name)) {
addname = t2name;
}
ColumnInfo strNameCol = fk2[0].ReferencedTable.Columns.Find(delegate (ColumnInfo col88) {
return col88.Name.ToLower().IndexOf("name") != -1 || col88.Name.ToLower().IndexOf("title") != -1;
});
if (strNameCol == null) strNameCol = fk2[0].ReferencedTable.Columns.Find(delegate (ColumnInfo col88) {
return GetCSType(col88.Type) == "string" && col88.Length > 0 && col88.Length < 300;
});
if (strNameCol == null) strNameCol = fk2[0].ReferencedTable.PrimaryKeys[0];
string strName = UFString(strNameCol.Name);
getListParamQuery += string.Format(@"[FromQuery] {0}[] {1}_{2}, ", GetCSType(fk2[0].ReferencedTable.PrimaryKeys[0].Type).Replace("?", ""), UFString(addname), table.PrimaryKeys[0].Name);
sb3.AppendFormat(@"
if ({0}_{1}.Length > 0) select.Where{0}_{1}({0}_{1});", UFString(addname), table.PrimaryKeys[0].Name);
if (str_listCms2FilterFK_fkitems.Contains(" var fk_" + LFString(fk2[0].ReferencedTable.ClassName) + "s = ") == false)
str_listCms2FilterFK_fkitems += string.Format(@"
var fk_{1}s = {2}{0}.Select.ToList();", UFString(fk2[0].ReferencedTable.ClassName), LFString(fk2[0].ReferencedTable.ClassName), UFString(fk2[0].ReferencedTable.ClassName) == "User" ? solutionName + ".BLL." : "");
str_listCms2FilterFK += string.Format(@"
{{ name: '{0}', field: '{4}', text: @Html.Json(fk_{1}s.Select(a => a.{2})), value: @Html.Json(fk_{1}s.Select(a => a.{3})) }},",
UFString(fk2[0].ReferencedTable.ClassName), LFString(fk2[0].ReferencedTable.ClassName),
string.IsNullOrEmpty(strName) ? "ToString()" : strName.TrimStart('.'), UFString(fk2[0].ReferencedColumns[0].Name), UFString(fk2[0].Columns[0].Name));
//add.html 标签关联
itemCsParamInsertForm += string.Format(", [FromForm] {0}[] mn_{1}", GetCSType(fk2[0].ReferencedColumns[0].Type).Replace("?", ""), UFString(addname));
itemCsParamUpdateForm += string.Format(", [FromForm] {0}[] mn_{1}", GetCSType(fk2[0].ReferencedColumns[0].Type).Replace("?", ""), UFString(addname));
str_controller_insert_mn += string.Format(@"
//关联 {1}
foreach ({0} mn_{1}_in in mn_{1})
item.Flag{1}(mn_{1}_in);", GetCSType(fk2[0].ReferencedColumns[0].Type).Replace("?", ""), UFString(addname));
str_controller_update_mn += string.Format(@"
//关联 {1}
if (mn_{1}.Length == 0) {{
item.Unflag{1}ALL();
}} else {{
List<{0}> mn_{1}_list = mn_{1}.ToList();
foreach (var Obj_{2} in item.Obj_{2}s) {{
int idx = mn_{1}_list.FindIndex(a => a == Obj_{2}.Id);
if (idx == -1) item.Unflag{1}(Obj_{2}.Id);
else mn_{1}_list.RemoveAt(idx);
}}
mn_{1}_list.ForEach(a => item.Flag{1}(a));
}}", GetCSType(fk2[0].ReferencedColumns[0].Type).Replace("?", ""), UFString(addname), LFString(addname));
str_addhtml_mn += string.Format(@"
<tr>
<td>{1}</td>
<td>
<select name=""mn_{2}"" data-placeholder=""Select a {3}"" class=""form-control select2"" multiple>
@foreach ({0}Info fk in fk_{1}s) {{ <option value=""@fk.{4}"">@fk.{5}</option> }}
</select>
</td>
</tr>", UFString(fk2[0].ReferencedTable.ClassName), LFString(fk2[0].ReferencedTable.ClassName),
UFString(addname), LFString(addname), UFString(fk2[0].ReferencedColumns[0].Name), strName);
if (str_fk_getlist.Contains(" var fk_" + LFString(fk2[0].ReferencedTable.ClassName) + "s") == false)
str_fk_getlist += string.Format(@"
var fk_{1}s = {2}{0}.Select.ToList();", UFString(fk2[0].ReferencedTable.ClassName), LFString(fk2[0].ReferencedTable.ClassName), UFString(fk2[0].ReferencedTable.ClassName) == "User" ? solutionName + ".BLL." : "");
str_addjs_mn_initUI += string.Format(@"
item.mn_{0} = @Html.Json(item.Obj_{2}s);
for (var a = 0; a < item.mn_{0}.length; a++) $(form.mn_{0}).find('option[value=""{{0}}""]'.format(item.mn_{0}[a].{1})).attr('selected', 'selected');", UFString(addname), UFString(fk2[0].ReferencedColumns[0].Name), LFString(addname));
});
string str_mvcdel = string.Format(@"
async public Task<APIReturn> _Del([FromForm] {2}[] id) {{
var dels = new List<object>();
foreach ({2} id2 in id)
dels.Add(await {3}{1}.DeleteAsync(id2));
if (dels.Count > 0) return APIReturn.成功.SetMessage($""删除成功,影响行数:{{dels.Count}}"").SetData(""dels"", dels);
return APIReturn.失败;
}}", solutionName, uClass_Name, GetCSType(table.PrimaryKeys[0].Type).Replace("?", ""), uClass_Name == "User" ? "BLL." : "");
if (table.PrimaryKeys.Count > 1) {
string pkParses = "";
int pk_idx = 0;
foreach (ColumnInfo pk in table.PrimaryKeys) {
pkParses += ", " + string.Format(GetStringifyParse(pk.Type).Replace(".Replace(StringifySplit, \"|\")", ""), "vs[" + pk_idx++ + "]");
}
pkParses = pkParses.Substring(2);
str_mvcdel = string.Format(@"
async public Task<APIReturn> _Del([FromForm] string[] id) {{
var dels = new List<object>();
foreach (string id2 in id) {{
string[] vs = id2.Split(',');
dels.Add(await {3}{1}.DeleteAsync({2}));
}}
if (dels.Count > 0) return APIReturn.成功.SetMessage($""删除成功,影响行数:{{dels.Count}}"").SetData(""dels"", dels);
return APIReturn.失败;
}}", solutionName, uClass_Name, pkParses, uClass_Name == "User" ? "BLL." : "");
}
sb1.AppendFormat(CONST.Module_Admin_Controller, solutionName, uClass_Name, nClass_Name, pkMvcRoute,
"[FromQuery] " + pkCsParam.Replace("?", "").Replace(", ", ", [FromQuery] "), pkCsParamNoType, itemSetValuePK, itemSetValueNotPK,
sb2.ToString(), sb3.ToString(), itemCsParamInsertForm, itemCsParamUpdateForm, getListParamQuery, itemSetValuePKInsert,
str_controller_list_join, "",
str_controller_insert_mn, str_controller_update_mn, str_mvcdel, uClass_Name == "User" ? "BLL." : "");
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"\Controllers\", uClass_Name, @"Controller.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
if (ttfk == null || ttfk.Columns[0].Name.ToLower() != "parent_id") {
#region wwwroot/xxx/index.html
foreach (ColumnInfo col in table.Columns) {
List<ForeignKeyInfo> ffks = new List<ForeignKeyInfo>();
foreach (TableInfo fti in _tables) {
ffks.AddRange(fti.ForeignKeys.FindAll(delegate (ForeignKeyInfo ffk) {
if (ffk.ReferencedTable != null && ffk.ReferencedTable.FullName == table.FullName) {
return ffk.ReferencedColumns.Find(delegate (ColumnInfo col88) {
return col88.Name == col.Name;
}) != null;
}
return false;
}));
}
foreach (ForeignKeyInfo ffk in ffks) {
string FFK_uClass_Name = UFString(ffk.Table.ClassName);
string FFK_nClass_Name = UFString(ffk.Table.ClassName);
string urlQuerys = string.Empty;
ffk.Columns.ForEach(delegate (ColumnInfo col88) {
string FFK_csUName = UFString(col.Name);
urlQuerys += string.Format("{0}=@item.{1}&", UFString(col88.Name), FFK_csUName);
});
if (urlQuerys.Length > 0) urlQuerys = urlQuerys.Remove(urlQuerys.Length - 1);
str_listTh += string.Format(@"<th scope=""col""> </th>
");
str_listTd += string.Format(@"<td><a href=""../{0}/?{1}"">{0}</a></td>
", FFK_nClass_Name, urlQuerys);
}
}
sb1.AppendFormat(@"@{{
Layout = """";
}}
<div class=""box"">
<div class=""box-header with-border"">
<h3 id=""box-title"" class=""box-title""></h3>
<span class=""form-group mr15""></span><a href=""./add"" data-toggle=""modal"" class=""btn btn-success pull-right"">添加</a>
</div>
<div class=""box-body"">
<div class=""table-responsive"">
<form id=""form_search"">
<div id=""div_filter""></div>
</form>
<form id=""form_list"" action=""./del"" method=""post"">
@Html.AntiForgeryToken()
<input type=""hidden"" name=""__callback"" value=""del_callback""/>
<table id=""GridView1"" cellspacing=""0"" rules=""all"" border=""1"" style=""border-collapse:collapse;"" class=""table table-bordered table-hover"">
<tr>
<th scope=""col"" style=""width:2%;""><input type=""checkbox"" onclick=""$('#GridView1 tbody tr').each(function (idx, el) {{ var chk = $(el).find('td:first input[type=\'checkbox\']')[0]; if (chk) chk.checked = !chk.checked; }});"" /></th>
{3}<th scope=""col"" style=""width:5%;""> </th>
</tr>
<tbody>
@foreach({0}Info item in ViewBag.items) {{
<tr>
<td><input type=""checkbox"" id=""id"" name=""id"" value=""{2}"" /></td>
{4}<td><a href=""./edit?{1}"">修改</a></td>
</tr>
}}
</tbody>
</table>
</form>
<a id=""btn_delete_sel"" href=""#"" class=""btn btn-danger pull-right"">删除选中项</a>
<div id=""kkpager""></div>
</div>
</div>
</div>
@{{{6}
}}
<script type=""text/javascript"">
(function () {{
top.del_callback = function(rt) {{
if (rt.success) return top.mainViewNav.goto('./?' + new Date().getTime());
alert(rt.message);
}};
var qs = _clone(top.mainViewNav.query);
var page = cint(qs.page, 1);
delete qs.page;
$('#kkpager').html(cms2Pager(@ViewBag.count, page, 20, qs, 'page'));
var fqs = _clone(top.mainViewNav.query);
delete fqs.page;
var fsc = [{5}
null
];
fsc.pop();
cms2Filter(fsc, fqs);
top.mainViewInit();
}})();
</script>
", uClass_Name, pkUrlQuerys, pkHiddens, str_listTh, str_listTd, str_listCms2FilterFK, str_listCms2FilterFK_fkitems);
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Views\", uClass_Name, @"\List.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
} else {
#region wwwroot/xxx/index.html(递归关系)
sb1.AppendFormat(@"@{{
Layout = """";
}}
<div class=""box"">
<div class=""box-header with-border"">
<h3 id=""box-title"" class=""box-title""></h3>
<span class=""form-group mr15""></span><a href=""./add"" data-toggle=""modal"" class=""btn btn-success pull-right"">添加</a>
</div>
<div class=""box-body"">
<div class=""table-responsive"">
<form id=""form_list"" action=""./del"" method=""post"">
@Html.AntiForgeryToken()
<input type=""hidden"" name=""__callback"" value=""del_callback""/>
<table id=""GridView1"" cellspacing=""0"" rules=""all"" border=""1"" style=""border-collapse:collapse;"" class=""table table-bordered table-hover"">
<tr>
{8}{6}<th scope=""col"" style=""width:5%;""> </th>
<th scope=""col"" style=""width:5%;"">删除</th>
</tr>
<tbody>
@foreach({0}Info item in ViewBag.items) {{
<tr data-tt-id=""@item.{1}"" data-tt-parent-id=""@item.{2}"">
{9}{7}<td><a href=""./edit?{4}"">修改</a></td>
<td><input id=""id"" name=""id"" type=""checkbox"" value=""{5}"" /></td>
</tr>
}}
</tbody>
</table>
</form>
</div>
</div>
</div>
<div>
<font color=""red"">*</font> 删除父项时,请先删除其所有子项。
<a id=""btn_delete_sel"" href=""#"" class=""btn btn-danger pull-right"">删除选中项</a>
</div>
<script type=""text/javascript"">
(function() {{
top.del_callback = function(rt) {{
if (rt.success) return top.mainViewNav.goto('./?' + new Date().getTime());
alert(rt.message);
}};
$('table#GridView1').treetable({{ expandable: true }});
$('table#GridView1').treetable('expandAll');
top.mainViewInit();
}})();
</script>", uClass_Name, UFString(table.PrimaryKeys[0].Name), UFString(ttfk.Columns[0].Name), "",
pkUrlQuerys.Replace("a.", ""), pkHiddens.Replace("a.", ""), str_listTh.Replace("a.", ""), str_listTd.Replace("a.", ""), str_listTh1.Replace("a.", ""), str_listTd1.Replace("a.", ""));
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Views\", uClass_Name, @"\List.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
}
#region wwwroot/xxx/add.html
foreach (ColumnInfo col in table.Columns) {
string csType = GetCSType(col.Type);
string csUName = UFString(col.Name);
string lname = col.Name.ToLower();
string comment = _column_coments.ContainsKey(table.FullName) && _column_coments[table.FullName].ContainsKey(col.Name) ? _column_coments[table.FullName][col.Name] : col.Name;
string rfvEmpty = string.Empty;
List<ColumnInfo> us = table.Uniques.Find(delegate (List<ColumnInfo> cs) {
return cs.Find(delegate (ColumnInfo col88) {
return col88.Name == col.Name;
}) != null;
});
if (us == null) us = new List<ColumnInfo>();
List<ForeignKeyInfo> fks_comb = table.ForeignKeys.FindAll(delegate (ForeignKeyInfo fk) {
return fk.Columns.Count == 1 && fk.Columns[0].Name == col.Name;
});
if (csType == "bool?") {
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td id=""{0}_td""><input name=""{0}"" type=""checkbox"" value=""true"" /></td>
</tr>", csUName, comment);
} else if (csType == "DateTime?" && (
string.Compare(lname, "create_time", true) == 0 ||
string.Compare(lname, "update_time", true) == 0
)) {
sb14.AppendFormat(@"
<tr>
<td>{1}</td>
<td><input name=""{0}"" type=""text"" readonly class=""datepicker"" style=""width:20%;background-color:#ddd;"" /></td>
</tr>", csUName, comment);
} else if (col.IsPrimaryKey && col.IsIdentity) {
//主键自动增值
sb4.AppendFormat(@"
@if (item != null) {{
<tr>
<td>{1}</td>
<td><input name=""{0}"" type=""text"" readonly class=""datepicker"" style=""width:20%;background-color:#ddd;"" /></td>
</tr>
}}", csUName, comment);
} else if (col.IsPrimaryKey && csType == "Guid?") {
//uuid主键
sb4.AppendFormat(@"
@if (item != null) {{
<tr>
<td>{1}</td>
<td><input name=""{0}"" type=""text"" readonly class=""datepicker"" style=""width:60%;background-color:#ddd;"" /></td>
</tr>
}}", csUName, comment);
} else if (fks_comb.Count == 1) {
//外键下拉框
ForeignKeyInfo fkcb = fks_comb[0];
string FK_uClass_Name = fkcb.ReferencedTable != null ? UFString(fkcb.ReferencedTable.ClassName) :
UFString(TableInfo.GetClassName(fkcb.ReferencedTableName));
ForeignKeyInfo fkrr = fkcb.ReferencedTable != null ?
fkcb.ReferencedTable.ForeignKeys.Find(delegate (ForeignKeyInfo fkkk) {
return fkkk.ReferencedTable != null && fkcb.ReferencedTable.FullName == fkkk.ReferencedTable.FullName;
}) : null;
bool isParentSelect = fkcb.ReferencedTable != null && fkrr != null;
string FK_Column = fkcb.ReferencedTable != null ?
UFString(fkcb.ReferencedColumns[0].Name) : UFString(fkcb.ReferencedColumnNames[0]);
ColumnInfo strCol = fkcb.ReferencedTable.Columns.Find(delegate (ColumnInfo col99) {
return col99.Name.ToLower().IndexOf("name") != -1 || col99.Name.ToLower().IndexOf("title") != -1;
});
if (strCol == null) strCol = fkcb.ReferencedTable.Columns.Find(delegate (ColumnInfo col99) {
return GetCSType(col99.Type) == "string" && col99.Length > 0 && col99.Length < 300;
});
string FK_Column_Text = fkcb.ReferencedTable != null && strCol != null ? UFString(strCol.Name)
: FK_Column;
if (isParentSelect) {
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td id=""{0}_td""></td>
</tr>", csUName, comment);
sb5.AppendFormat(@"
$('#{3}_td').html(yieldTreeSelect(yieldTreeArray(@Html.Json(fk_{0}s), null, '{1}', '{2}'), '{{#{4}}}', '{1}')).find('select').attr('name', '{3}');",
FK_uClass_Name, UFString(fkcb.ReferencedColumns[0].Name), UFString(fkrr.Columns[0].Name), csUName, FK_Column_Text);
} else {
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td>
<select name=""{0}"">
<option value="""">------ 请选择 ------</option>
@foreach (var fk in fk_{4}s) {{ <option value=""@fk.{2}"">@fk.{3}</option> }}
</select>
</td>
</tr>", csUName, comment, UFString(fkcb.ReferencedColumns[0].Name), FK_Column_Text, FK_uClass_Name);
}
if (str_fk_getlist.Contains(" var fk_" + FK_uClass_Name + "s") == false)
str_fk_getlist += string.Format(@"
var fk_{0}s = {1}{0}.Select.ToList();", FK_uClass_Name, FK_uClass_Name == "User" ? solutionName + ".BLL." : "");
} else if ((col.Type == SqlDbType.Int || col.Type == SqlDbType.BigInt) && (lname == "status" || lname.StartsWith("status_") || lname.EndsWith("_status"))) {
//加载 multi 多状态字段
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td><input name=""{0}"" type=""hidden"" multi_status=""状态1,状态2,状态3,状态4,状态5"" /></td>
</tr>", csUName, comment);
} else if (col.Type == SqlDbType.SmallInt || col.Type == SqlDbType.Int || col.Type == SqlDbType.BigInt) {
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td><input name=""{0}"" type=""text"" class=""form-control"" data-inputmask=""'mask': '9', 'repeat': 6, 'greedy': false"" data-mask style=""width:200px;"" /></td>
</tr>", csUName, comment);
} else if (col.Type == SqlDbType.Float || col.Type == SqlDbType.Real || col.Type == SqlDbType.Decimal || col.Type == SqlDbType.Money || col.Type == SqlDbType.SmallMoney) {
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td>
<div class=""input-group"" style=""width:200px;"">
<span class=""input-group-addon"">¥</span>
<input name=""{0}"" type=""text"" class=""form-control"" data-inputmask=""'mask': '9', 'repeat': 10, 'greedy': false"" data-mask />
<span class=""input-group-addon"">.00</span>
</div>
</td>
</tr>", csUName, comment);
} else if (col.Type == SqlDbType.DateTime || col.Type == SqlDbType.DateTime2 || col.Type == SqlDbType.DateTimeOffset) {
//日期
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td><input name=""{0}"" type=""text"" class=""datepicker"" /></td>
</tr>", csUName, comment);
} else if (col.Type == SqlDbType.Date || col.Type == SqlDbType.SmallDateTime) {
//日期控件
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td>
<div class=""input-group date"" style=""width:200px;"">
<div class=""input-group-addon""><i class=""fa fa-calendar""></i></div>
<input name=""{0}"" type=""text"" data-provide=""datepicker"" class=""form-control pull-right"" readonly />
</div>
</td>
</tr>", csUName, comment);
} else if ((lname == "img" || lname.StartsWith("img_") || lname.EndsWith("_img") ||
lname == "path" || lname.StartsWith("path_") || lname.EndsWith("_path")) && (col.Type == SqlDbType.VarChar || col.Type == SqlDbType.Char)) {
//图片字段
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td>
<input name=""{0}"" type=""text"" class=""datepicker"" style=""width:60%;"" />
<input name=""{0}_file"" type=""file"">
</td>
</tr>", csUName, comment);
} else if (col.Type == SqlDbType.Text || (col.Type == SqlDbType.VarChar && col.Length == -1)) {
//加载百度编辑器
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td><textarea name=""{0}"" style=""width:100%;height:100px;"" editor=""ueditor""></textarea></td>
</tr>", csUName, comment);
} else {
sb4.AppendFormat(@"
<tr>
<td>{1}</td>
<td><input name=""{0}"" type=""text"" class=""datepicker"" style=""width:60%;"" /></td>
</tr>", csUName, comment);
}
}
sb4.Append(str_addhtml_mn);
if (sb14.ToString().Length > 0) {
sb14.Insert(0, @"
@if (item != null) {");
sb14.Append(@"
}");
}
sb1.AppendFormat(@"@{{
Layout = """";
{0}Info item = ViewBag.item;{3}
}}
<div class=""box"">
<div class=""box-header with-border"">
<h3 class=""box-title"" id=""box-title""></h3>
</div>
<div class=""box-body"">
<div class=""table-responsive"">
<form id=""form_add"" method=""post"">
@Html.AntiForgeryToken()
<input type=""hidden"" name=""__callback"" value=""edit_callback"" />
<div>
<table cellspacing=""0"" rules=""all"" class=""table table-bordered table-hover"" border=""1"" style=""border-collapse:collapse;"">{1}{5}
<tr>
<td width=""8%""> </td>
<td><input type=""submit"" value=""@(item == null ? ""添加"" : ""更新"")"" /> <input type=""button"" value=""取消"" /></td>
</tr>
</table>
</div>
</form>
</div>
</div>
</div>
<script type=""text/javascript"">
(function () {{
top.edit_callback = function (rt) {{
if (rt.success) return top.mainViewNav.goto('./?' + new Date().getTime());
alert(rt.message);
}};
{2}
var form = $('#form_add')[0];
var item = null;
@if (item != null) {{
<text>
item = @Html.Json(item);
fillForm(form, item);{4}
</text>
}}
top.mainViewInit();
}})();
</script>", uClass_Name, sb4.ToString(), sb5.ToString(), str_fk_getlist, str_addjs_mn_initUI, sb14.ToString());
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Views\", uClass_Name, @"\Edit.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
}
#endregion
}
#region BLL StoreProcedure.cs
int spsssss = 0;
sb1.AppendFormat(@"using System;
using System.Configuration;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
namespace {0}.BLL {{
public partial class StoreProcedure {{
", solutionName);
foreach (TableInfo table in _tables) {
if (table.IsOutput == false) continue;
if (table.Type != "P") continue;
string uClass_Name = CodeBuild.UFString(table.ClassName);
string nClass_Name = table.ClassName;
string nTable_Name = "[" + table.Owner + "].[" + table.Name + "]";
List<string> csParms = new List<string>();
List<string> csParmsNoType = new List<string>();
List<string> setOutParmsNull = new List<string>();
List<string> dimParms = new List<string>();
List<string> dimOutParms = new List<string>();
List<string> dimOutParmsInput = new List<string>();
List<string> dimOutParmsReturn = new List<string>();
int idx = 0;
foreach (ColumnInfo column in table.Columns) {
string name = CodeBuild.GetCSName(column.Name);
string csType = CodeBuild.GetCSType(column.Type);
string nameOut = string.Empty;
string sqlParm = string.Empty;
if (column.IsIdentity) {
setOutParmsNull.Add(string.Format(@"{0} = null;", name));
dimOutParms.Add(string.Format(@"SqlParameter parmO{0} = null;", idx));
dimOutParmsInput.Add(string.Format(@"parmO{0}.Direction = ParameterDirection.Output;", idx));
dimOutParmsReturn.Add(string.Format(@"if (parmO{0}.Value != DBNull.Value) {1} = ({2})parmO{0}.Value;", idx, name, csType));
nameOut = "out ";
sqlParm = "parmO" + idx++ + " = ";
}
csParms.Add(nameOut + csType + " " + name);
csParmsNoType.Add(nameOut + name);
dimParms.Add(sqlParm +
string.Format(@"new SqlParameter {{ ParameterName = ""{0}"", SqlDbType = SqlDbType.{1}, Size = {2}, Value = {3} }}", column.Name, column.Type, column.Length, name));
}
if (table.Columns.Count == 0) {
sb1.AppendFormat(@"
public static void {0}() => SqlHelper.Instance.ExecuteNonQuery(CommandType.StoredProcedure, @""{1}"");
", uClass_Name, nTable_Name);
} else {
if (setOutParmsNull.Count > 0) setOutParmsNull.Add("");
if (dimOutParms.Count > 0) dimOutParms.Add("");
if (dimOutParmsInput.Count > 0) dimOutParmsInput.Add("");
if (dimOutParmsReturn.Count > 0) dimOutParmsReturn.AddRange(new string[] { "", "" });
sb1.AppendFormat(@"
#region {0}
public static void {0}({1}) => {0}({2}, false);
public static List<object[][]> {0}Return({1}) => {0}({2}, true);
private static List<object[][]> {0}({1}, bool isReturn) {{
{3}{5}var sqlParams = new[] {{
{4}
}};
{6}List<object[][]> ds = null;
if (isReturn) ds = ExecuteArrayAll(@""{7}"", sqlParams);
else SqlHelper.Instance.ExecuteNonQuery(CommandType.StoredProcedure, @""{7}"", sqlParams);
{8}return ds;
}}
#endregion
", uClass_Name,
string.Join(", ", csParms.ToArray()),
string.Join(", ", csParmsNoType.ToArray()),
string.Join("\r\n ", setOutParmsNull.ToArray()),
string.Join(",\r\n ", dimParms.ToArray()),
string.Join("\r\n ", dimOutParms.ToArray()),
string.Join("\r\n ", dimOutParmsInput.ToArray()),
nTable_Name,
string.Join("\r\n ", dimOutParmsReturn.ToArray()));
}
spsssss++;
}
sb1.AppendFormat(@"
/// <summary>
/// 执行存储过程,可能有多个结果集返回
/// </summary>
/// <param name=""procedure"">存储过程</param>
/// <param name=""sqlParams"">参数</param>
/// <returns></returns>
public static List<object[][]> ExecuteArrayAll(string procedure, params SqlParameter[] sqlParams) {{
var ds = new List<object[][]>();
SqlHelper.Instance.ExecuteReader(dr => {{
while (true) {{
var dt = new List<object[]>();
while (dr.Read()) {{
object[] values = new object[dr.FieldCount];
dr.GetValues(values);
dt.Add(values);
}}
ds.Add(dt.ToArray());
if (dr.NextResult() == false) break;
}}
}}, CommandType.StoredProcedure, procedure, sqlParams);
return ds;
}}
}}
}}");
if (spsssss > 0) {
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, solutionName, @".db\BLL\", basicName, @"\StoreProcedure.cs"), Deflate.Compress(sb1.ToString())));
}
clearSb();
#endregion
#region BLL ItemCache.cs
sb1.AppendFormat(CONST.BLL_Build_ItemCache_cs, solutionName);
//loc1.Add(new BuildInfo(string.Concat(CONST.corePath, solutionName, @".db\BLL\", basicName, @"\ItemCache.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Model ExtensionMethods.cs 扩展方法
sb1.AppendFormat(CONST.Model_Build_ExtensionMethods_cs, solutionName, Model_Build_ExtensionMethods_cs.ToString());
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, solutionName, @".db\Model\", basicName, @"\_ExtensionMethods.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region DBUtility/SqlHelper.cs
sb1.AppendFormat(CONST.DAL_DBUtility_SqlHelper_cs, solutionName, connectionStringName);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, solutionName, @".db\DAL\DBUtility\SqlHelper.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
if (isSolution) {
#region db.csproj
sb1.AppendFormat(CONST.Db_csproj, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, solutionName, @".db\", solutionName, ".db.csproj"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Module/Test
#region TestController.cs
sb1.AppendFormat(CONST.Module_Test_Controller, solutionName, "Test");
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Module\Test\Controllers\TestController.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Init.cs
sb1.AppendFormat(CONST.Module_Test_Init_cs, solutionName, "Test");
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Module\Test\Init.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region appsettings.json
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Module\Test\appsettings.json"), Deflate.Compress("{\r\n}")));
clearSb();
#endregion
#region Views\_ViewStart.cshtml
sb1.AppendFormat(@"@{{
Layout = ""_Layout"";
}}", solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Module\Test\Views\_ViewStart.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Views\_ViewImports.cshtml
sb1.AppendFormat(@"@using Newtonsoft.Json;
@using {0}.BLL;
@using {0}.Model;
@addTagHelper *, Microsoft.AspNetCore.Mvc.TagHelpers
", solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Module\Test\Views\_ViewImports.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Views\Shared\_Layout.cshtml
sb1.AppendFormat(@"<!DOCTYPE html>
<html>
<head>
<meta charset=""utf-8"">
<title>@ViewBag.title</title>
<link rel=""stylesheet"" href=""//cdn.bootcss.com/semantic-ui/2.1.8/semantic.min.css"">
<link rel=""stylesheet"" href=""/css/style.css"">
<script src=""//cdn.bootcss.com/jquery/1.11.3/jquery.min.js""></script>
<script src=""//cdn.bootcss.com/semantic-ui/2.1.8/semantic.min.js""></script>
</head>
<body>
@RenderBody()
</body>
</html>", solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Module\Test\Views\Shared\_Layout.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Test.csproj
sb1.AppendFormat(CONST.Module_csproj, "Test");
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"Module\Test\Test.csproj"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#endregion
#region .gitattributes
sb1.Append(Server.Properties.Resources._gitattributes);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"..\.gitattributes"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region .gitignore
sb1.Append(Server.Properties.Resources._gitignore);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"..\.gitignore"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region build.bat
sb1.Append(Server.Properties.Resources._build_bat);
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"..\build.bat"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region readme.md
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"..\readme.md"), Deflate.Compress(string.Format(@"# {0}
.net core模块化开发框架
本项目由 [【dotnetGen_sqlserver】](https://github.com/2881099/dotnetGen_sqlserver) 工具生成
## Module
所有业务接口约定在 Module 划分并行开发,互不依赖
Module/Admin
生成的后台管理模块,http://localhost:5001/module/Admin 可访问
Module/Test
生成的测试模块
## WebHost
WebHost 编译的时候,会将 Module/* 编译结果复制到当前目录
WebHost 只当做主引擎运行时按需加载相应的 Module
WebHost 依赖 npm ,请安装 node,并在目录执行 npm install
WebHost 依赖 gulp-cli,请执行全局安装 npm install --global gulp-cli
运行步骤:
1、打开 vs 右击 Module 目录全部编译;
2、cd WebHost && npm install && dotnet build && dotnet run
## Infrastructure
Module 里面每个子模块的依赖所需
#### xx.db
包含一切数据库操作的封装
xx.Model(实体映射)
xx.BLL(静态方法封装)
xx.DAL(数据访问)
生成名特征取数据库名首字母大写(如: 表 test 对应 xx.Model.TestInfo、xx.BLL.Test、xx.DAL.Test)
数据库设计命名习惯:所有命名(username, stats_click)、外键字段(user_id)
仅支持主键作为外键,不支持组合字段,不支持唯一键作为外键
修改数据库后,双击“./GenMs只更新db.bat”可快速覆盖,所有类都使用 partial,方便扩展亦不会被二次生成覆盖
# 数据库相关方法
## 添加记录
```csharp
// 如有 create_time 字段并且类型为日期,内部会初始化
TestInfo newitem1 = Test.Insert(Title: ""添加的标题"", Content: ""这是一段添加的内容"");
TestInfo newitem2 = Test.Insert(new TestInfo {{ Title = ""添加的标题"", Content = ""这是一段添加的内容"" }});
```
## 添加记录(批量)
```csharp
List<TestInfo> newitems1 = Test.Insert(new [] {{
new TestInfo {{ Title = ""添加的标题1"", Content = ""这是一段添加的内容1"" }},
new TestInfo {{ Title = ""添加的标题2"", Content = ""这是一段添加的内容2"" }}
}});
```
## 更新记录
```csharp
// 更新 id = 1 所有字段
Test.Update(new TestInfo {{ Id: 1, Title = ""添加的标题"", Content = ""这是一段添加的内容"", Clicks = 1 }});
// 更新 id = 1 指定字段
Test.UpdateDiy(1).SetTitle(""修改后的标题"").SetContent(""修改后的内容"").SetClicks(1).ExecuteNonQuery();
// update 表名 set clicks = clicks + 1 where id = 1
Test.UpdateDiy(1).SetClicksIncrement(1).ExecuteNonQuery();
// 使用实体层修改
new TestInfo {{ Id = 1 }}.UpdateDiy.SetClicksIncrement(1).ExecuteNonQuery();
```
## 更新记录(批量)
```csharp
//先查找 clicks 在 0 - 100 的记录
List<TestInfo> newitems1 = Test.Select.WhereClicksRange(0, 100).ToList();
// update 表名 set clicks = clicks + 1 where id in (newitems1所有id)
newitems1.UpdateDiy().SetClicksIncrement(1).ExecuteNonQuery();
```
> 警告:批量更新的方法,在事务中使用会导致死锁
## 删除记录
```csharp
// 删除 id = 1 的记录
Test.Delete(1);
```
## 按主键/唯一键获取单条记录
> appsettings可配置缓存时间,以上所有增、改、删都会删除缓存保障同步
```csharp
//按主键获取
UserInfo user1 = User.GetItem(1);
//按唯一键
UserInfo user2 = User.GetItemByUsername(""2881099@qq.com"");
// 返回 null 或 UserInfo
```
## 查询(核心)
```csharp
//BLL.表名.Select 是一个链式查询对象,几乎支持所有查询,包括 group by、inner join等等,最终 ToList ToOne Aggregate 执行 sql
List<UserInfo> users1 = User.Select.WhereUsername(""2881099@qq.com"").WherePassword(""******"").WhereStatus(正常).ToList();
//返回 new List<UserInfo>() 或 有元素的 List,永不返回 null
//返回指定列,返回List<元组>
var users2 = User.Select.WhereStatus(正常).Aggregate<(int id, string title)>(""id,title"");
//多表查询,只返回 a 表字段
var users3 = User.Select.Where(a => a.Obj_user_group.Id == a.Group_id).ToList();
//join查询,返回 a, b 表字段 ,b 表结果填充至 a.Obj_user_group 对象,类似 ef.Include
var users4 = User.Select.InnerJoin(a => a.Obj_user_group.Id == a.Group_id).ToList();
//分组查询
var users5 = User.Select.GroupBy(""group_id"").Aggregate<(int groupId, int count)>(""group_id, count(1)"");
//等等...
```
## 事务
```csharp
//错误会回滚,事务内支持所有生成的同步方法(不支持生成对应的Async方法)
var user = User.GetItem(1);
SqlHelper.Transaction(() => {{
if (user.UpdateDiy.SetAmountIncrement(-num).Where(""amount > {{0}}"", num).ExecuteNonQuery() <= 0)
throw new Exception(""余额不足"");
var order = user.AddOrder(Amount: 1, Count: num, Count_off: num);
}});
```
## 缓存
1、根据主键、唯一键缓存
BLL GetItem、GetItemBy唯一键,使用了默认缓存策略180秒,用来缓存一条记录,db 层自动维护缓存同步,例如:
```csharp
//只有第一次查询了数据库,后面99次读取redis的缓存值
UserInfo u;
for (var a = 0; a < 100; a++)
u = User.GetItemByUsername(""2881099@qq.com"");
//执行类似以下的数据变动方法,会删除redis对应的缓存
u.UpdateDiy.SetLogin_time(DateTime.Now).ExecuteNonQuery();
```
2、缓存一个查询结果
BLL Select.ToList(10, ""cache_key""),将查询结果缓存10秒,需要手工删除redis对应的键
## 读写分离
内置实现读和写分离,一个【主库】多个【从库】,【从库】的查询策略为随机方式。
若某【从库】发生故障,将切换到其他可用【从库】,若已全部不可用则使用【主库】查询。
出现故障【从库】被隔离起来间隔性的检查可用状态,以待恢复。
```csharp
Topic.Select.WhereId(1).ToOne(); //读【从库】(默认)
Topic.Select.Master().WhereId(1).ToOne(); //读【主库】
```
# 生成规则
## 不会生成
* 没有主键,不会生成 增、改、删 方法
* 有自增字段,不会生成 批量 Insert 方法
## 特别规则
* 字段类型 string 相关并且长度 <= 300,会生成
> 表.Select.Where字段Like
* 95%的数据类型被支持
", solutionName))));
clearSb();
#endregion
#region GenMs只更新db.bat
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"..\GenMs只更新db.bat"), string.IsNullOrEmpty(_client.Username) ? Deflate.Compress(string.Format(@"
GenMs {0} -D {3} -N {4}", _client.Server, _client.Username, _client.Password, _client.Database, solutionName)) : Deflate.Compress(string.Format(@"
GenMs {0} -U {1} -P {2} -D {3} -N {4}", _client.Server, _client.Username, _client.Password, _client.Database, solutionName))));
clearSb();
#endregion
}
if (isMakeAdmin) {
#region WebHost
#region Extensions/StarupExtensions.cs
sb1.AppendFormat(CONST.WebHost_Extensions_StarupExtensions_cs, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"\Extensions\StarupExtensions.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Extensions/SwaggerExtensions.cs
sb1.AppendFormat(CONST.WebHost_Extensions_SwaggerExtensions_cs, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"\Extensions\SwaggerExtensions.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region .gitignore
sb1.Append(Server.Properties.Resources.WebHost_gitignore);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @".gitignore"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region appsettings.json
sb1.AppendFormat(CONST.WebHost_appsettings_json, solutionName, string.IsNullOrEmpty(_client.Username) ? $"Data Source={_client.Server};Integrated Security=True;Initial Catalog={_client.Database}" : $"Data Source={_client.Server};User ID={_client.Username};Password={_client.Password};Initial Catalog={_client.Database}");
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"appsettings.json"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region gulpfile.js
sb1.Append(Server.Properties.Resources.WebHost_gulpfile_js);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"gulpfile.js"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region nlog.config
sb1.AppendFormat(CONST.WebHost_nlog_config, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"nlog.config"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region package.json
sb1.Append(Server.Properties.Resources.WebHost_package_json);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"package.json"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Program.cs
sb1.AppendFormat(CONST.WebHost_Program_cs, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"Program.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Startup.cs
sb1.AppendFormat(CONST.WebHost_Startup_cs, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"Startup.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region web.config
sb1.Append(Server.Properties.Resources.WebHost_web_config);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"web.config"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region WebHost.csproj
sb1.AppendFormat(CONST.WebHost_csproj, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.webHostPath, @"WebHost.csproj"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#endregion
#region Module/Admin
#region SysController.cs
sb1.AppendFormat(CONST.Module_Admin_Controllers_SysController, solutionName, string.Join(string.Empty, admin_controllers_syscontroller_init_sysdir.ToArray()));
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Controllers\SysController.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region LoginController.cs
sb1.AppendFormat(CONST.Module_Admin_Controllers_LoginController, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Controllers\LoginController.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Views\Admin\Login\Index.cshtml
sb1.AppendFormat(CONST.Module_Admin_Views_Login_Index_cshtml, solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Views\Login\Index.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region wwwroot\index.html
sb1.AppendFormat(CONST.Module_Admin_wwwroot_index_html, solutionName, wwwroot_sitemap);
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"wwwroot\index.html"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Init.cs
sb1.AppendFormat(CONST.Module_Test_Init_cs, solutionName, "Admin");
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Init.cs"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region appsettings.json
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"appsettings.json"), Deflate.Compress("{\r\n}")));
clearSb();
#endregion
#region Views\_ViewStart.cshtml
sb1.AppendFormat(@"@{{
Layout = ""_Layout"";
}}", solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Views\_ViewStart.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Views\_ViewImports.cshtml
sb1.AppendFormat(@"@using Newtonsoft.Json;
@using {0}.BLL;
@using {0}.Model;
@addTagHelper *, Microsoft.AspNetCore.Mvc.TagHelpers
", solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Views\_ViewImports.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Views\Shared\_Layout.cshtml
sb1.AppendFormat(@"<!DOCTYPE html>
<html>
<head>
<meta charset=""utf-8"">
<title>@ViewBag.title</title>
<link rel=""stylesheet"" href=""//cdn.bootcss.com/semantic-ui/2.1.8/semantic.min.css"">
<link rel=""stylesheet"" href=""/css/style.css"">
<script src=""//cdn.bootcss.com/jquery/1.11.3/jquery.min.js""></script>
<script src=""//cdn.bootcss.com/semantic-ui/2.1.8/semantic.min.js""></script>
</head>
<body>
@RenderBody()
</body>
</html>", solutionName);
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Views\Shared\_Layout.cshtml"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#region Admin.csproj
sb1.AppendFormat(CONST.Module_csproj, "Admin");
loc1.Add(new BuildInfo(string.Concat(CONST.moduleAdminPath, @"Admin.csproj"), Deflate.Compress(sb1.ToString())));
clearSb();
#endregion
#endregion
}
if (isDownloadRes) {
loc1.Add(new BuildInfo(string.Concat(CONST.corePath, @"..\htm.zip"), Server.Properties.Resources.htm_zip));
}
GC.Collect();
return loc1;
}
}
}
|
2881099/dotnetGen_sqlserver | 18,612 | Server/CodeBuild(Lib).cs | using System;
using System.Collections.Generic;
using System.Data;
using System.Text;
using System.Text.RegularExpressions;
using Model;
namespace Server {
internal partial class CodeBuild {
//protected static string GetEntryName(string name) {
//}
protected static SqlDbType GetDBType(string strType) {
switch (strType.ToLower()) {
case "bit": return SqlDbType.Bit;
case "tinyint": return SqlDbType.TinyInt;
case "smallint": return SqlDbType.SmallInt;
case "int": return SqlDbType.Int;
case "bigint": return SqlDbType.BigInt;
case "numeric":
case "decimal": return SqlDbType.Decimal;
case "smallmoney": return SqlDbType.SmallMoney;
case "money": return SqlDbType.Money;
case "float": return SqlDbType.Float;
case "real": return SqlDbType.Real;
case "date": return SqlDbType.Date;
case "datetime":
case "datetime2": return SqlDbType.DateTime;
case "datetimeoffset": return SqlDbType.DateTimeOffset;
case "smalldatetime": return SqlDbType.SmallDateTime;
case "time": return SqlDbType.Time;
case "char": return SqlDbType.Char;
case "varchar": return SqlDbType.VarChar;
case "text": return SqlDbType.Text;
case "nchar": return SqlDbType.NChar;
case "nvarchar": return SqlDbType.NVarChar;
case "ntext": return SqlDbType.NText;
case "binary": return SqlDbType.Binary;
case "varbinary": return SqlDbType.VarBinary;
case "image": return SqlDbType.Image;
case "timestamp": return SqlDbType.Timestamp;
case "uniqueidentifier": return SqlDbType.UniqueIdentifier;
case "xml": return SqlDbType.Xml;
default: return SqlDbType.Variant;
}
}
protected static string GetDbToCsConvert(SqlDbType type) {
switch (type) {
case SqlDbType.Bit: return "(bool?)";
case SqlDbType.TinyInt: return "(byte?)";
case SqlDbType.SmallInt: return "(short?)";
case SqlDbType.Int: return "(int?)";
case SqlDbType.BigInt: return "(long?)";
case SqlDbType.Decimal:
case SqlDbType.SmallMoney:
case SqlDbType.Money: return "(decimal?)";
case SqlDbType.Float: return "(double?)";
case SqlDbType.Real: return "(float?)";
case SqlDbType.Date:
case SqlDbType.DateTime:
case SqlDbType.DateTime2:
case SqlDbType.SmallDateTime: return "(DateTime?)";
case SqlDbType.DateTimeOffset: return "(DateTimeOffset?)";
case SqlDbType.Time: return "(TimeSpan?)";
case SqlDbType.Char:
case SqlDbType.VarChar:
case SqlDbType.Text:
case SqlDbType.NChar:
case SqlDbType.NVarChar:
case SqlDbType.NText: return "(string)";
case SqlDbType.Binary:
case SqlDbType.VarBinary:
case SqlDbType.Image: return "(byte[])";
case SqlDbType.Timestamp: return "(byte[])";
case SqlDbType.UniqueIdentifier: return "(Guid?)";
case SqlDbType.Xml: return "(string)";
default: return "";
}
}
protected static string GetCSTypeValue(SqlDbType type) {
switch (type) {
case SqlDbType.Bit:
case SqlDbType.TinyInt:
case SqlDbType.SmallInt:
case SqlDbType.Int:
case SqlDbType.BigInt:
case SqlDbType.Decimal:
case SqlDbType.SmallMoney:
case SqlDbType.Money:
case SqlDbType.Float:
case SqlDbType.Real:
case SqlDbType.Date:
case SqlDbType.DateTime:
case SqlDbType.DateTime2:
case SqlDbType.SmallDateTime: return "{0}.Value";
case SqlDbType.DateTimeOffset: return "{0}.Value";
case SqlDbType.Time: return "{0}.Value";
case SqlDbType.Char:
case SqlDbType.VarChar:
case SqlDbType.Text:
case SqlDbType.NChar:
case SqlDbType.NVarChar:
case SqlDbType.NText: return "{0}";
case SqlDbType.Binary:
case SqlDbType.VarBinary:
case SqlDbType.Image: return "string.Concat({0})";
case SqlDbType.Timestamp: return "string.Concat({0})";
case SqlDbType.UniqueIdentifier: return "{0}.Value";
case SqlDbType.Xml: return "{0}";
default: return "string.Concat({0})";
}
}
protected static string GetCSType(SqlDbType type) {
switch (type) {
case SqlDbType.Bit: return "bool?";
case SqlDbType.TinyInt: return "byte?";
case SqlDbType.SmallInt: return "short?";
case SqlDbType.Int: return "int?";
case SqlDbType.BigInt: return "long?";
case SqlDbType.Decimal:
case SqlDbType.SmallMoney:
case SqlDbType.Money: return "decimal?";
case SqlDbType.Float: return "double?";
case SqlDbType.Real: return "float?";
case SqlDbType.Date:
case SqlDbType.DateTime:
case SqlDbType.DateTime2:
case SqlDbType.SmallDateTime: return "DateTime?";
case SqlDbType.DateTimeOffset: return "DateTimeOffset?";
case SqlDbType.Time: return "TimeSpan?";
case SqlDbType.Char:
case SqlDbType.VarChar:
case SqlDbType.Text:
case SqlDbType.NChar:
case SqlDbType.NVarChar:
case SqlDbType.NText: return "string";
case SqlDbType.Binary:
case SqlDbType.VarBinary:
case SqlDbType.Image: return "byte[]";
case SqlDbType.Timestamp: return "byte[]";
case SqlDbType.UniqueIdentifier: return "Guid?";
case SqlDbType.Xml: return "string";
default: return "object";
}
}
protected static string GetDataReaderMethod(SqlDbType type) {
switch (type) {
case SqlDbType.Bit: return "GetBoolean";
case SqlDbType.TinyInt: return "GetByte";
case SqlDbType.SmallInt: return "GetInt16";
case SqlDbType.Int: return "GetInt32";
case SqlDbType.BigInt: return "GetInt64";
case SqlDbType.Decimal:
case SqlDbType.SmallMoney:
case SqlDbType.Money: return "GetDecimal";
case SqlDbType.Float: return "GetDouble";
case SqlDbType.Real: return "GetFloat";
case SqlDbType.Date:
case SqlDbType.DateTime:
case SqlDbType.DateTime2:
case SqlDbType.SmallDateTime: return "GetDateTime";
case SqlDbType.DateTimeOffset: return "GetDateTimeOffset";
case SqlDbType.Time: return "GetTimeSpan";
case SqlDbType.Char:
case SqlDbType.VarChar:
case SqlDbType.Text:
case SqlDbType.NChar:
case SqlDbType.NVarChar:
case SqlDbType.NText: return "GetString";
case SqlDbType.Binary:
case SqlDbType.VarBinary:
case SqlDbType.Image: return "GetBytes";
case SqlDbType.Timestamp: return "GetBytes";
case SqlDbType.UniqueIdentifier: return "GetGuid";
case SqlDbType.Xml: return "GetValue";
default: return "GetValue";
}
}
protected static string GetToStringFieldConcat(ColumnInfo columnInfo) {
switch (columnInfo.Type) {
case SqlDbType.Bit: return string.Format("{0} == null ? \"null\" : ({0} == true ? \"true\" : \"false\")", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.TinyInt:
case SqlDbType.SmallInt:
case SqlDbType.Int:
case SqlDbType.BigInt:
case SqlDbType.Decimal:
case SqlDbType.SmallMoney:
case SqlDbType.Money:
case SqlDbType.Float:
//string.Format("", {0} : {{0}}"", {0} == null ? ""null"" : {0}.ToString())
case SqlDbType.Real: return string.Format("{0} == null ? \"null\" : {0}.ToString()", CodeBuild.UFString(columnInfo.Name));
// "'\" + _" + CodeBuild.UFString(columnInfo.Name) + " +\r\n \"'";
case SqlDbType.Date:
case SqlDbType.DateTime:
case SqlDbType.DateTime2:
//string.Format("\", {0} == null ? \"null\" : string.Concat(\"Date(\", {0}.Value.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds, \")\"), \r\n \"", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.SmallDateTime: return string.Format("{0} == null ? \"null\" : string.Concat(\"\", {0}.Value.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds, \"\")", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.DateTimeOffset: return string.Format("{0} == null ? \"null\" : string.Concat(\"\", {0}.Value.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds, \"\")", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.Time: return string.Format("{0} == null ? \"null\" : string.Concat(\"\", {0}.Value.Ticks, \"\")", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.Binary:
case SqlDbType.VarBinary:
// return "\" + (_" + CodeBuild.UFString(columnInfo.Name) + " == null ? null : Encoding.UTF8.GetString(_" + CodeBuild.UFString(columnInfo.Name) + ")) +\r\n \"";
case SqlDbType.Image: return string.Format("{0} == null ? \"null\" : Convert.ToBase64String({0})", CodeBuild.UFString(columnInfo.Name));
//return "'\" + _" + CodeBuild.UFString(columnInfo.Name) + " +\r\n \"'";
case SqlDbType.UniqueIdentifier: return string.Format("{0} == null ? \"null\" : {0}.ToString()", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.Xml:
case SqlDbType.Char:
case SqlDbType.VarChar:
case SqlDbType.Text:
case SqlDbType.NChar:
case SqlDbType.NVarChar:
// return "'\" + (_" + CodeBuild.UFString(columnInfo.Name) + " == null ? string.Empty : _" + CodeBuild.UFString(columnInfo.Name) + ".Replace(\"\\\\\", \"\\\\\\\\\").Replace(\"\\r\\n\", \"\\\\r\\\\n\").Replace(\"'\", \"\\\\'\")) + \r\n \"'";
case SqlDbType.NText: return string.Format("{0} == null ? \"null\" : string.Format(\"'{{0}}'\", {0}.Replace(\"\\\\\", \"\\\\\\\\\").Replace(\"\\r\\n\", \"\\\\r\\\\n\").Replace(\"'\", \"\\\\'\"))", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.Timestamp: return string.Format("{0} == null ? \"null\" : Convert.ToBase64String({0})", CodeBuild.UFString(columnInfo.Name));
default: return string.Format("{0} == null ? \"null\" : {0}.ToString()", CodeBuild.UFString(columnInfo.Name));
}
}
protected static string GetToHashtableFieldConcat(ColumnInfo columnInfo) {
switch (columnInfo.Type) {
case SqlDbType.Bit:
case SqlDbType.TinyInt:
case SqlDbType.SmallInt:
case SqlDbType.Int:
case SqlDbType.BigInt:
case SqlDbType.Decimal:
case SqlDbType.SmallMoney:
case SqlDbType.Money:
case SqlDbType.Float:
case SqlDbType.Real: return string.Format("{0}", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.Date:
case SqlDbType.DateTime:
case SqlDbType.DateTime2:
case SqlDbType.SmallDateTime: return string.Format("{0}.Value.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.DateTimeOffset: return string.Format("{0}.Value.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.Time: return string.Format("{0}.Value.Ticks", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.Binary:
case SqlDbType.VarBinary:
case SqlDbType.Image: return string.Format("Convert.ToBase64String({0})", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.UniqueIdentifier: return string.Format("{0}", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.Xml:
case SqlDbType.Char:
case SqlDbType.VarChar:
case SqlDbType.Text:
case SqlDbType.NChar:
case SqlDbType.NVarChar:
case SqlDbType.NText: return string.Format("{0}", CodeBuild.UFString(columnInfo.Name));
case SqlDbType.Timestamp:
default: return string.Format("{0}", CodeBuild.UFString(columnInfo.Name));
}
}
protected static string GetToStringStringify(ColumnInfo columnInfo)
{
switch (columnInfo.Type)
{
case SqlDbType.Bit: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : (_" + CodeBuild.UFString(columnInfo.Name) + " == true ? \"1\" : \"0\")";
case SqlDbType.TinyInt:
case SqlDbType.SmallInt:
case SqlDbType.Int:
case SqlDbType.BigInt:
case SqlDbType.Decimal:
case SqlDbType.SmallMoney:
case SqlDbType.Money:
case SqlDbType.Float:
case SqlDbType.Real: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : _" + CodeBuild.UFString(columnInfo.Name) + ".ToString()";
case SqlDbType.Date:
case SqlDbType.DateTime:
case SqlDbType.DateTime2:
case SqlDbType.SmallDateTime: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : _" + CodeBuild.UFString(columnInfo.Name) + ".Value.Ticks.ToString()";
case SqlDbType.DateTimeOffset: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : _" + CodeBuild.UFString(columnInfo.Name) + ".Value.Ticks.ToString()";
case SqlDbType.Time: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : _" + CodeBuild.UFString(columnInfo.Name) + ".Value.Ticks.ToString()";
case SqlDbType.Binary:
case SqlDbType.VarBinary:
case SqlDbType.Image: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : Convert.ToBase64String(_" + CodeBuild.UFString(columnInfo.Name) + ")";
case SqlDbType.UniqueIdentifier: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : _" + CodeBuild.UFString(columnInfo.Name) + ".ToString()";
case SqlDbType.Xml:
case SqlDbType.Char:
case SqlDbType.VarChar:
case SqlDbType.Text:
case SqlDbType.NChar:
case SqlDbType.NVarChar:
case SqlDbType.NText: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : _" + CodeBuild.UFString(columnInfo.Name) + ".Replace(\"|\", StringifySplit)";
case SqlDbType.Timestamp: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : Convert.ToBase64String(_" + CodeBuild.UFString(columnInfo.Name) + ")";
default: return "_" + CodeBuild.UFString(columnInfo.Name) + " == null ? \"null\" : _" + CodeBuild.UFString(columnInfo.Name) + ".ToString().Replace(\"|\", StringifySplit)";
}
}
protected static string GetStringifyParse(SqlDbType type)
{
switch (type)
{
case SqlDbType.Bit: return "{0} == \"1\"";
case SqlDbType.TinyInt: return "byte.Parse({0})";
case SqlDbType.SmallInt: return "short.Parse({0})";
case SqlDbType.Int: return "int.Parse({0})";
case SqlDbType.BigInt: return "long.Parse({0})";
case SqlDbType.Decimal:
case SqlDbType.SmallMoney:
case SqlDbType.Money: return "decimal.Parse({0})";
case SqlDbType.Float: return "double.Parse({0})";
case SqlDbType.Real: return "float.Parse({0})";
case SqlDbType.Date:
case SqlDbType.DateTime:
case SqlDbType.DateTime2:
case SqlDbType.SmallDateTime: return "new DateTime(long.Parse({0}))";
case SqlDbType.DateTimeOffset: return "new DateTimeOffset(new DateTime(long.Parse({0})))";
case SqlDbType.Time: return "TimeSpan.Parse({0})";
case SqlDbType.Char:
case SqlDbType.VarChar:
case SqlDbType.Text:
case SqlDbType.NChar:
case SqlDbType.NVarChar:
case SqlDbType.NText: return "{0}.Replace(StringifySplit, \"|\")";
case SqlDbType.Binary:
case SqlDbType.VarBinary:
case SqlDbType.Image: return "Convert.FromBase64String({0})";
case SqlDbType.Timestamp: return "Convert.FromBase64String({0})";
case SqlDbType.UniqueIdentifier: return "Guid.Parse({0})";
case SqlDbType.Xml: return "{0}.Replace(StringifySplit, \"|\")";
default: return "{0}";
}
}
protected static string UFString(string text) {
text = Regex.Replace(text, @"[^\w]", "_");
if (text.Length <= 1) return text.ToUpper();
else return text.Substring(0, 1).ToUpper() + text.Substring(1, text.Length - 1);
}
protected static string LFString(string text) {
if (text.Length <= 1) return text.ToLower();
else return text.Substring(0, 1).ToLower() + text.Substring(1, text.Length - 1);
}
protected static string GetCSName(string name) {
name = Regex.Replace(name.TrimStart('@'), @"[^\w]", "_");
return char.IsLetter(name, 0) ? name : string.Concat("_", name);
}
protected static string AppendParameter(ColumnInfo columnInfo, string value, string place) {
if (columnInfo == null) return "";
string returnValue = place + string.Format("new SqlParameter {{ ParameterName = \"{0}{1}\", SqlDbType = SqlDbType.{2}, Size = {3}, Value = {4} }}, \r\n",
columnInfo.Name.StartsWith("@") ? null : "@", columnInfo.Name, columnInfo.Type,
columnInfo.Length.ToString(),
//columnInfo.Type == SqlDbType.Image ? string.Format("{0} == null ? 0 : {0}.Length", value + Lib.UFString(columnInfo.Name)) : columnInfo.Length.ToString(),
value + CodeBuild.UFString(columnInfo.Name));
return returnValue;
}
protected static string AppendParameters(List<ColumnInfo> columnInfos, string value, string place) {
string returnValue = "";
foreach (ColumnInfo columnInfo in columnInfos) {
returnValue += AppendParameter(columnInfo, value, place);
}
return returnValue == "" ? "" : returnValue.Substring(0, returnValue.Length - 4);
}
protected static string AppendParameters(List<ColumnInfo> columnInfos, string place) {
return AppendParameters(columnInfos, "", place);
}
protected static string AppendParameters(TableInfo table, string place) {
return AppendParameters(table.Columns, "item.", place);
}
protected static string AppendParameters(ColumnInfo columnInfo, string place) {
string returnValue = AppendParameter(columnInfo, "", place);
return returnValue == "" ? "" : returnValue.Substring(0, returnValue.Length - 4);
}
protected static string AppendAddslashes(ColumnInfo columnInfo, string value, string place) {
if (columnInfo == null) return "";
string returnValue = place + value + CodeBuild.UFString(columnInfo.Name) + ", ";
return returnValue;
}
protected static string AppendAddslashes(List<ColumnInfo> columnInfos, string value, string place) {
string returnValue = "";
foreach (ColumnInfo columnInfo in columnInfos) {
returnValue += AppendAddslashes(columnInfo, value, place);
}
return returnValue == "" ? "" : returnValue.Substring(0, returnValue.Length - 2);
}
protected static string AppendAddslashes(List<ColumnInfo> columnInfos, string place) {
return AppendAddslashes(columnInfos, "", place);
}
protected static string AppendAddslashes(TableInfo table, string place) {
return AppendAddslashes(table.Columns, "item.", place);
}
protected static string AppendAddslashes(ColumnInfo columnInfo, string place) {
string returnValue = AppendParameter(columnInfo, "", place);
return returnValue == "" ? "" : returnValue.Substring(0, returnValue.Length - 2);
}
}
}
|
2881099/dotnetGen_postgresql | 5,086 | MakeCode/MakeCode.csproj | <?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="14.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.50727</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{5FDB8603-9878-49E3-9570-04734FDF1FDE}</ProjectGuid>
<OutputType>Exe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>MakeCode</RootNamespace>
<AssemblyName>MakeCode</AssemblyName>
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
<FileUpgradeFlags>
</FileUpgradeFlags>
<UpgradeBackupLocation>
</UpgradeBackupLocation>
<OldToolsVersion>2.0</OldToolsVersion>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup>
<StartupObject />
</PropertyGroup>
<ItemGroup>
<Reference Include="Mono.Security, Version=2.0.0.0, Culture=neutral, PublicKeyToken=0738eb9f132ed756, processorArchitecture=MSIL">
<SpecificVersion>False</SpecificVersion>
<HintPath>..\Mono.Security.dll</HintPath>
</Reference>
<Reference Include="Npgsql, Version=2.2.7.0, Culture=neutral, PublicKeyToken=5d8b90d52f46fda7, processorArchitecture=MSIL">
<SpecificVersion>False</SpecificVersion>
<HintPath>..\Npgsql.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Data" />
<Reference Include="System.Drawing" />
<Reference Include="System.Web" />
<Reference Include="System.Windows.Forms" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="ClientSocket.cs" />
<Compile Include="ConsoleApp.cs" />
<Compile Include="FrmMain.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="FrmMain.designer.cs">
<DependentUpon>FrmMain.cs</DependentUpon>
</Compile>
<Compile Include="FrmView.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="FrmView.designer.cs">
<DependentUpon>FrmView.cs</DependentUpon>
</Compile>
<Compile Include="Lib.cs" />
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<EmbeddedResource Include="FrmMain.resx">
<DependentUpon>FrmMain.cs</DependentUpon>
<SubType>Designer</SubType>
</EmbeddedResource>
<EmbeddedResource Include="FrmView.resx">
<DependentUpon>FrmView.cs</DependentUpon>
<SubType>Designer</SubType>
</EmbeddedResource>
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
<DesignTime>True</DesignTime>
</Compile>
<None Include="app.config" />
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<None Include="Properties\vssver2.scc" />
<None Include="Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
<Compile Include="Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
<DependentUpon>Settings.settings</DependentUpon>
</Compile>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj">
<Project>{F0054101-9AC9-4E0E-9E78-44EA89FC5C19}</Project>
<Name>Common</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> |
2881099/dotnetGen_mysql | 898 | Server/Properties/AssemblyInfo.cs | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过下列属性集
// 控制。更改这些属性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("Server")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Server")]
[assembly: AssemblyCopyright("版权所有 (C) 2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 属性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("1963fe13-0957-46b8-b20c-593eb34a5897")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 内部版本号
// 修订号
//
// 可以指定所有这些值,也可以使用“修订号”和“内部版本号”的默认值,
// 方法是按如下所示使用“*”:
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
2881099/dotnetGen_mysql | 14,089 | Server/Properties/Resources.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace Server.Properties {
using System;
/// <summary>
/// 一个强类型的资源类,用于查找本地化的字符串等。
/// </summary>
// 此类是由 StronglyTypedResourceBuilder
// 类通过类似于 ResGen 或 Visual Studio 的工具自动生成的。
// 若要添加或移除成员,请编辑 .ResX 文件,然后重新运行 ResGen
// (以 /str 作为命令选项),或重新生成 VS 项目。
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "15.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// 返回此类使用的缓存的 ResourceManager 实例。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Server.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// 使用此强类型资源类,为所有资源查找
/// 重写当前线程的 CurrentUICulture 属性。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
/// <summary>
/// 查找类似
///rem dotnet restore
///
///rem cd src/Module/Admin && dotnet build && cd ../../../
///rem cd src/Module/Order && dotnet build && cd ../../../
///rem cd src/Module/Search && dotnet build && cd ../../../
///
///dotnet build
///
///rem cd src/WebHost && npm install && npm install --global gulp-cli && gulp copy-module
///cd src/WebHost && gulp copy-module && cd ../../
///
///echo "Then type 'dotnet run' in src/WebHost to start the app."
///
///pause 的本地化字符串。
/// </summary>
internal static string _build_bat {
get {
return ResourceManager.GetString("_build_bat", resourceCulture);
}
}
/// <summary>
/// 查找类似 ###############################################################################
///# Set default behavior to automatically normalize line endings.
///###############################################################################
///* text=auto
///
///###############################################################################
///# Set default behavior for command prompt diff.
///#
///# This is need for earlier builds of msysgit that does not have it on by
///# default for csharp files.
///# Note: This is only used by comma [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string _gitattributes {
get {
return ResourceManager.GetString("_gitattributes", resourceCulture);
}
}
/// <summary>
/// 查找类似 ## Ignore Visual Studio temporary files, build results, and
///## files generated by popular Visual Studio add-ons.
///
///# User-specific files
///*.suo
///*.user
///*.userosscache
///*.sln.docstates
///
///# User-specific files (MonoDevelop/Xamarin Studio)
///*.userprefs
///
///# Build results
///[Dd]ebug/
///[Dd]ebugPublic/
///[Rr]elease/
///[Rr]eleases/
///[Xx]64/
///[Xx]86/
///[Bb]uild/
///bld/
///[Bb]in/
///[Oo]bj/
///
///# Visual Studio 2015 cache/options directory
///.vs/
///# Uncomment if you have tasks that create the project's static files in wwwr [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string _gitignore {
get {
return ResourceManager.GetString("_gitignore", resourceCulture);
}
}
/// <summary>
/// 查找 System.Byte[] 类型的本地化资源。
/// </summary>
internal static byte[] htm_zip {
get {
object obj = ResourceManager.GetObject("htm_zip", resourceCulture);
return ((byte[])(obj));
}
}
/// <summary>
/// 查找类似 using Microsoft.AspNetCore.Cors;
///using Microsoft.AspNetCore.Http;
///using Microsoft.AspNetCore.Mvc;
///using Microsoft.AspNetCore.Mvc.Filters;
///using Microsoft.Extensions.Logging;
///using Newtonsoft.Json;
///using System;
///using System.Collections;
///using System.Linq;
///using System.Threading.Tasks;
///
///[ServiceFilter(typeof(CustomExceptionFilter)), EnableCors("cors_all")]
///public partial class BaseController : Controller {
/// public ILogger _logger;
/// public ISession Session { get { return HttpContext.Session; } } [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string Infrastructure_Controllers_BaseController_cs {
get {
return ResourceManager.GetString("Infrastructure_Controllers_BaseController_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using Microsoft.AspNetCore.Hosting;
///using Microsoft.AspNetCore.Http;
///using Microsoft.AspNetCore.Mvc;
///using Microsoft.AspNetCore.Mvc.Filters;
///using Microsoft.Extensions.Configuration;
///using Microsoft.Extensions.Logging;
///using System;
///using System.Collections.Generic;
///using System.Security.Cryptography;
///using System.Text;
///using System.Threading.Tasks;
///
///public class CustomExceptionFilter : Attribute, IExceptionFilter {
/// private ILogger _logger = null;
/// private IConfiguration _cfg = null;
/// privat [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string Infrastructure_Controllers_CustomExceptionFilter_cs {
get {
return ResourceManager.GetString("Infrastructure_Controllers_CustomExceptionFilter_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using Newtonsoft.Json;
///using System;
///using System.Text.RegularExpressions;
///
///public static class GlobalExtensions {
/// public static object Json(this Microsoft.AspNetCore.Mvc.Rendering.IHtmlHelper html, object obj) {
/// string str = JsonConvert.SerializeObject(obj);
/// if (!string.IsNullOrEmpty(str)) str = Regex.Replace(str, @"<(/?script[\s>])", "<\"+\"$1", RegexOptions.IgnoreCase);
/// if (html == null) return str;
/// return html.Raw(str);
/// }
///
/// /// <summary>
/// /// 转格林时间,并以ISO8601格式化字符串
/// /// </summary> [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string Infrastructure_Extensions_GlobalExtensions_cs {
get {
return ResourceManager.GetString("Infrastructure_Extensions_GlobalExtensions_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using Microsoft.AspNetCore.Builder;
///using Microsoft.Extensions.DependencyInjection;
///
///public interface IModuleInitializer {
/// void Init(IApplicationBuilder services);
///} 的本地化字符串。
/// </summary>
internal static string Infrastructure_ModuleBasic_IModuleInitializer_cs {
get {
return ResourceManager.GetString("Infrastructure_ModuleBasic_IModuleInitializer_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using System.Linq;
///using System.Reflection;
///
///public class ModuleInfo {
/// public string Name { get; set; }
///
/// public Assembly Assembly { get; set; }
///
/// public string ShortName {
/// get {
/// return Name.Split('.').Last();
/// }
/// }
///
/// public string Path { get; set; }
///} 的本地化字符串。
/// </summary>
internal static string Infrastructure_ModuleBasic_ModuleInfo_cs {
get {
return ResourceManager.GetString("Infrastructure_ModuleBasic_ModuleInfo_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using Microsoft.AspNetCore.Mvc.Razor;
///using System.Collections.Generic;
///using System.Linq;
///
///public class ModuleViewLocationExpander : IViewLocationExpander {
/// private const string _moduleKey = "module";
///
/// public IEnumerable<string> ExpandViewLocations(ViewLocationExpanderContext context, IEnumerable<string> viewLocations) {
/// if (context.Values.ContainsKey(_moduleKey)) {
/// var module = context.Values[_moduleKey];
/// if (!string.IsNullOrWhiteSpace(module)) {
/// var moduleViewLocations = new stri [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string Infrastructure_ModuleBasic_ModuleViewLocationExpander_cs {
get {
return ResourceManager.GetString("Infrastructure_ModuleBasic_ModuleViewLocationExpander_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 [Mm]odule/
///wwwroot/[Mm]odule/ 的本地化字符串。
/// </summary>
internal static string WebHost_gitignore {
get {
return ResourceManager.GetString("WebHost_gitignore", resourceCulture);
}
}
/// <summary>
/// 查找类似 "use strict";
///
///var gulp = require('gulp'),
/// clean = require('gulp-clean'),
/// glob = require("glob");
///
///var paths = {
/// devModule: "../Module/",
/// hostModule: "./Module/",
/// hostWwwrootModules: "./wwwroot/module/"
///};
///
///var modules = loadModules();
///
///gulp.task('clean-module', function () {
/// return gulp.src([paths.hostModule + '*', paths.hostWwwrootModules + '*'], { read: false })
/// .pipe(clean());
///});
///
///gulp.task('copy-module', ['clean-module'], function () {
/// modules.forEach(f [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string WebHost_gulpfile_js {
get {
return ResourceManager.GetString("WebHost_gulpfile_js", resourceCulture);
}
}
/// <summary>
/// 查找类似 {
/// "version": "1.0.0",
/// "name": "aaa",
/// "private": true,
/// "devDependencies": {
/// "gulp": "3.9.1",
/// "gulp-clean": "0.3.2",
/// "glob": "7.1.1"
/// }
///} 的本地化字符串。
/// </summary>
internal static string WebHost_package_json {
get {
return ResourceManager.GetString("WebHost_package_json", resourceCulture);
}
}
/// <summary>
/// 查找类似 <?xml version="1.0" encoding="utf-8"?>
///<configuration>
///
/// <!--
/// Configure your application settings in appsettings.json. Learn more at http://go.microsoft.com/fwlink/?LinkId=786380
/// -->
///
/// <system.webServer>
/// <handlers>
/// <add name="aspNetCore" path="*" verb="*" modules="AspNetCoreModule" resourceType="Unspecified"/>
/// </handlers>
/// <aspNetCore processPath="%LAUNCHER_PATH%" arguments="%LAUNCHER_ARGS%" stdoutLogEnabled="false" stdoutLogFile=".\logs\stdout" forwardWindowsAuthToken="f [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string WebHost_web_config {
get {
return ResourceManager.GetString("WebHost_web_config", resourceCulture);
}
}
}
}
|
2881099/dotnetGen_mysql | 2,201 | Server/Resources/WebHost/gulpfile.js | "use strict";
var gulp = require('gulp'),
clean = require('gulp-clean'),
glob = require("glob");
var paths = {
devModule: "../Module/",
hostModule: "./Module/",
hostWwwrootModules: "./wwwroot/module/"
};
var modules = loadModules();
gulp.task('clean-module', function () {
return gulp.src([paths.hostModule + '*', paths.hostWwwrootModules + '*'], { read: false })
.pipe(clean());
});
gulp.task('copy-module', ['clean-module'], function () {
modules.forEach(function (module) {
console.log(paths.devModule + module.fullName + '/Views/**/*.*');
gulp.src([paths.devModule + module.fullName + '/Views/**/*.*'], { base: module.fullName })
.pipe(gulp.dest(paths.hostModule + module.fullName));
gulp.src(paths.devModule + module.fullName + '/bin/Debug/netstandard2.0/**/' + module.fullName + '.*')
.pipe(gulp.dest(paths.hostModule + module.fullName));
gulp.src(paths.devModule + module.fullName + '/appsettings.json')
.pipe(gulp.dest(paths.hostModule + module.fullName));
gulp.src(paths.devModule + module.fullName + '/wwwroot/**/*.*')
.pipe(gulp.dest(paths.hostWwwrootModules + module.name));
});
});
gulp.task('copy-static', function () {
modules.forEach(function (module) {
gulp.src([paths.devModule + module.fullName + '/Views/**/*.*'], { base: module.fullName })
.pipe(gulp.dest(paths.hostModule + module.fullName));
gulp.src(paths.devModule + module.fullName + '/wwwroot/**/*.*')
.pipe(gulp.dest(paths.hostWwwrootModules + module.name));
});
});
function loadModules() {
var moduleManifestPaths,
modules = [];
moduleManifestPaths = glob.sync(paths.devModule + '*/*.csproj', {});
moduleManifestPaths.forEach(function (moduleManifestPath) {
var reg = /\/([^\/]+)\/\1\.csproj/.exec(moduleManifestPath);
var moduleManifest = {
name: reg[1],
fullName: reg[1],
version: "1.0.0"
}
//var exec = require('child_process').exec;
//var child = exec('echo hello ' + name, function (err, stdout, stderr) {
// if (err) throw err;
// console.log(stdout);
//});
modules.push(moduleManifest);
});
return modules;
} |
2881099/dotnetGen_postgresql | 8,088 | MakeCode/ConsoleApp.cs | using Model;
using Npgsql;
using System;
using System.Collections.Generic;
using System.Data;
using System.IO;
using System.Text;
using System.Threading;
namespace MakeCode {
public class ConsoleApp {
ClientInfo _client;
ClientSocket _socket;
public string ConnectionString {
get {
string connStr = "Host={0};Port={1};Username={2};Password={3};Database={4};";
return string.Format(connStr, this._client.Server, this._client.Port, this._client.Username, this._client.Password, this._client.Database);
}
}
public string Server;
public int Port;
public string Username;
public string Password;
public string Database;
public string SolutionName;
public bool IsMakeSolution;
public bool IsMakeWebAdmin;
public bool IsDownloadRes;
public string OutputPath;
public ConsoleApp(string[] args, ManualResetEvent wait) {
string args0 = args[0].Trim().ToLower();
if (args[0] == "?" || args0 == "--help" || args0 == "-help") {
Console.WriteLine(@"
Example:
> MakeCode 127.0.0.1[:5432] -U postgres -P 123456 -D dyschool -N dyschool -S -A -R -O ""c:/dyschool/""
-U PostgreSQL账号
-P PostgreSQL密码
-D 需要生成的数据库
-N 字符串,生成代码的解决方案名,命名空间
-S 生成解决方案,在项目第一次生成时使用
-A 生成后台管理
-R 下载资源
-O 路径,生成后的代码保存到哪里");
wait.Set();
return;
}
string[] ss = args[0].Split(new char[] { ':' }, 2);
this.Server = ss[0];
if (int.TryParse(ss.Length == 2 ? ss[1] : "5432", out this.Port) == false) this.Port = 5432;
for (int a = 1; a < args.Length; a++) {
switch (args[a]) {
case "-U":
if (a + 1 >= args.Length) Console.WriteLine("-U 参数错误");
else this.Username = args[a + 1];
a++;
break;
case "-P":
if (a + 1 >= args.Length) Console.WriteLine("-P 参数错误");
else this.Password = args[a + 1];
a++;
break;
case "-D":
if (a + 1 >= args.Length) Console.WriteLine("-D 参数错误");
else this.Database = args[a + 1];
a++;
break;
case "-O":
if (a + 1 >= args.Length) Console.WriteLine("-O 参数错误");
else this.OutputPath = args[a + 1];
a++;
break;
case "-N":
if (a + 1 >= args.Length) Console.WriteLine("-N 参数错误");
else this.SolutionName = args[a + 1];
a++;
break;
case "-S":
this.IsMakeSolution = true;
break;
case "-A":
this.IsMakeWebAdmin = true;
break;
case "-R":
this.IsDownloadRes = true;
break;
}
}
this._client = new ClientInfo(this.Server, this.Port, this.Username, this.Password);
Uri uri = new Uri("tcp://" + Settings.Default.server + "/");
this._socket = new ClientSocket();
this._socket.Error += Socket_OnError;
this._socket.Receive += Socket_OnReceive;
this._socket.Connect(uri.Host, uri.Port);
Thread.CurrentThread.Join(TimeSpan.FromSeconds(1));
if (this._socket.Running == false) {
wait.Set();
return;
}
SocketMessager messager = new SocketMessager("GetDatabases", this._client);
this._socket.Write(messager, delegate (object sender2, ClientSocketReceiveEventArgs e2) {
List<DatabaseInfo> dbs = e2.Messager.Arg as List<DatabaseInfo>;
});
this._client.Database = this.Database;
List<TableInfo> tables = null;
messager = new SocketMessager("GetTablesByDatabase", this._client.Database);
this._socket.Write(messager, delegate (object sender2, ClientSocketReceiveEventArgs e2) {
tables = e2.Messager.Arg as List<TableInfo>;
});
if (tables == null) {
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] 无法读取表");
this._socket.Close();
this._socket.Dispose();
wait.Set();
return;
}
tables.ForEach(a => a.IsOutput = true);
List<BuildInfo> bs = null;
messager = new SocketMessager("Build", new object[] {
SolutionName,
IsMakeSolution,
string.Join("", tables.ConvertAll<string>(delegate(TableInfo table){
return string.Concat(table.IsOutput ? 1 : 0);
}).ToArray()),
IsMakeWebAdmin,
IsDownloadRes
});
this._socket.Write(messager, delegate (object sender2, ClientSocketReceiveEventArgs e2) {
bs = e2.Messager.Arg as List<BuildInfo>;
if (e2.Messager.Arg is Exception) throw e2.Messager.Arg as Exception;
}, TimeSpan.FromSeconds(60 * 5));
if (bs != null) {
foreach (BuildInfo b in bs) {
string path = Path.Combine(OutputPath, b.Path);
Directory.CreateDirectory(Path.GetDirectoryName(path));
string fileName = Path.GetFileName(b.Path);
string ext = Path.GetExtension(b.Path);
Encoding encode = Encoding.UTF8;
if (fileName.EndsWith(".rar") || fileName.EndsWith(".zip") || fileName.EndsWith(".dll")) {
using (FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write)) {
fs.Write(b.Data, 0, b.Data.Length);
fs.Close();
}
continue;
}
byte[] data = Deflate.Decompress(b.Data);
string content = Encoding.UTF8.GetString(data);
if (string.Compare(fileName, "web.config") == 0) {
string place = System.Web.HttpUtility.HtmlEncode(this.ConnectionString);
content = content.Replace("{connectionString}", place);
}
if (fileName.EndsWith(".json")) {
content = content.Replace("{connectionString}", this.ConnectionString);
}
if (string.Compare(ext, ".refresh") == 0) {
encode = Encoding.Unicode;
}
using (StreamWriter sw = new StreamWriter(path, false, encode)) {
sw.Write(content);
sw.Close();
}
}
}
this._socket.Close();
this._socket.Dispose();
GC.Collect();
ConsoleColor fc = Console.ForegroundColor;
Console.ForegroundColor = ConsoleColor.Green;
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] The code files be maked in \"" + OutputPath + "\", please check.");
Console.ForegroundColor = fc;
wait.Set();
}
private void Socket_OnError(object sender, ClientSocketErrorEventArgs e) {
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] " + e.Exception.Message);
}
private void Socket_OnReceive(object sender, ClientSocketReceiveEventArgs e) {
SocketMessager messager = null;
switch (e.Messager.Action) {
case "ExecuteDataSet":
string sql = e.Messager.Arg.ToString();
DataSet ds = null;
try {
ds = ConsoleApp.ExecuteDataSet(this.ConnectionString, sql);
} catch (Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, ds);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
case "ExecuteNonQuery":
string sql2 = e.Messager.Arg.ToString();
int val = 0;
try {
val = ConsoleApp.ExecuteNonQuery(this.ConnectionString, sql2);
} catch (Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, val);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
default:
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] " + "您当前使用的版本未能实现功能!");
break;
}
}
public static int ExecuteNonQuery(string connectionString, string cmdText) {
int val = 0;
using (NpgsqlConnection conn = new NpgsqlConnection(connectionString)) {
NpgsqlCommand cmd = new NpgsqlCommand(cmdText, conn);
try {
cmd.Connection.Open();
val = cmd.ExecuteNonQuery();
} catch {
cmd.Parameters.Clear();
cmd.Connection.Close();
throw;
}
}
return val;
}
public static DataSet ExecuteDataSet(string connectionString, string cmdText) {
DataSet ds = new DataSet();
using (NpgsqlConnection conn = new NpgsqlConnection(connectionString)) {
NpgsqlCommand cmd = new NpgsqlCommand(cmdText, conn);
NpgsqlDataAdapter sda = new NpgsqlDataAdapter(cmd);
try {
cmd.Connection.Open();
sda.Fill(ds);
} catch {
cmd.Parameters.Clear();
cmd.Connection.Close();
throw;
}
cmd.Connection.Close();
cmd.Parameters.Clear();
}
return ds;
}
}
}
|
2881099/dotnetGen_postgresql | 12,376 | MakeCode/FrmMain.cs | using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Windows.Forms;
using Npgsql;
using Model;
namespace MakeCode {
public partial class FrmMain : Form {
public FrmMain() {
InitializeComponent();
}
private ClientInfo _client;
private ClientSocket _socket;
public List<TableInfo> _tables = new List<TableInfo>();
public string ConnectionString {
get {
string connStr = "Host={0};Port={1};Username={2};Password={3};Database={4};";
return string.Format(connStr, this._client.Server, this._client.Port, this._client.Username, this._client.Password, string.IsNullOrEmpty(this._client.Database) ? "postgres" : this._client.Database);
}
}
private void BindGridView() {
DataGridViewLinkColumn dgvColName = new DataGridViewLinkColumn();
dgvColName.Name = "dgvColName";
dgvColName.DefaultCellStyle.SelectionBackColor = System.Drawing.Color.White;
dgvColName.DataPropertyName = "FullName";
dgvColName.HeaderText = "Name";
dgvColName.DisplayIndex = 1;
dgvColName.Width = 206;
DataGridViewCheckBoxColumn dgvColIsOutput = new DataGridViewCheckBoxColumn();
dgvColIsOutput.Name = "dgvColIsOutput";
dgvColIsOutput.DefaultCellStyle.SelectionBackColor = System.Drawing.Color.White;
dgvColIsOutput.DefaultCellStyle.Alignment = DataGridViewContentAlignment.MiddleCenter;
dgvColIsOutput.DataPropertyName = "IsOutput";
dgvColIsOutput.HeaderText = "Ins Sel";
dgvColIsOutput.DisplayIndex = 2;
dgvColIsOutput.Width = 60;
this.dgvGridview.AutoGenerateColumns = false;
this.dgvGridview.DataSource = null;
this.dgvGridview.Columns.Clear();
this.dgvGridview.Columns.AddRange(new DataGridViewColumn[]{
dgvColName,
dgvColIsOutput
});
dgvGridview.DataSource = _tables;
txtProject_TextChanged(this, EventArgs.Empty);
}
private void FrmMain_Load(object sender, EventArgs e) {
Uri uri = new Uri("tcp://" + Settings.Default.server + "/");
this._socket = new ClientSocket();
this._socket.Error += Socket_OnError;
this._socket.Receive += Socket_OnReceive;
this._socket.Connect(uri.Host, uri.Port);
this.Closed += delegate(object sender2, EventArgs e2) {
this._socket.Dispose();
};
}
private void Socket_OnError(object sender, ClientSocketErrorEventArgs e) {
//Lib.Msgbox(e.Exception.Message + "\r\n\r\n" + e.Exception.StackTrace, MessageBoxIcon.Error);
Lib.Msgbox(e.Exception.Message, MessageBoxIcon.Error);
}
private void Socket_OnReceive(object sender, ClientSocketReceiveEventArgs e) {
SocketMessager messager = null;
switch (e.Messager.Action) {
case "ExecuteDataSet":
string sql = e.Messager.Arg.ToString();
DataSet ds = null;
try {
ds = ConsoleApp.ExecuteDataSet(this.ConnectionString, sql);
} catch(Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, ds);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
case "ExecuteNonQuery":
string sql2 = e.Messager.Arg.ToString();
int val = 0;
try {
val = ConsoleApp.ExecuteNonQuery(this.ConnectionString, sql2);
} catch (Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, val);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
default:
Lib.Msgbox("ǰʹõİ汾δʵֹܣ");
break;
}
}
private void btnConnect_Click(object sender, EventArgs e) {
this.btnConnect.Enabled = false;
if (this.btnConnect.Text == "Connect") {
this._client = new ClientInfo(this.txtServer.Text, int.Parse(this.txtPort.Text), this.txtUsername.Text, this.txtPassword.Text);
List<DatabaseInfo> dbs = null;
SocketMessager messager = new SocketMessager("GetDatabases", this._client);
this._socket.Write(messager, delegate(object sender2, ClientSocketReceiveEventArgs e2) {
dbs = e2.Messager.Arg as List<DatabaseInfo>;
});
if (dbs == null) {
this.btnConnect.Enabled = true;
return;
}
this.cmbDatabase.DisplayMember = "Name";
this.cmbDatabase.DataSource = dbs;
if (this.cmbDatabase.Items.Count > 0) {
this.cmbDatabase.SelectedIndex = 0;
this.cmbDatabase.Enabled = true;
}
this.txtServer.Enabled = this.txtPort.Enabled = this.txtUsername.Enabled = this.txtPassword.Enabled = false;
} else {
this.txtSolution.Clear();
this.cmbDatabase.DataSource = null;
this.cmbDatabase.Enabled = false;
this.btnBuild.Enabled = false;
this.txtServer.Enabled = this.txtPort.Enabled = this.txtUsername.Enabled = this.txtPassword.Enabled = true;
this.dgvGridview.DataSource = null;
}
this.btnConnect.Text = this.btnConnect.Text == "Connect" ? "DisConnect" : "Connect";
this.btnConnect.Enabled = true;
}
private void cmbDatabase_SelectedIndexChanged(object sender, EventArgs e) {
if (this.btnConnect.Text == "DisConnect" && this.btnConnect.Enabled == false) return;
this._client.Database = this.cmbDatabase.Text;
List<TableInfo> tables = null;
SocketMessager messager = new SocketMessager("GetTablesByDatabase", this._client.Database);
this._socket.Write(messager, delegate(object sender2, ClientSocketReceiveEventArgs e2) {
tables = e2.Messager.Arg as List<TableInfo>;
});
this._tables = tables;
this.BindGridView();
}
private void btnBuild_Click(object sender, EventArgs e) {
if (this._tables.Find(delegate(TableInfo table) {
return table.IsOutput;
}) == null) {
DataGridViewCellMouseEventArgs e2 = new DataGridViewCellMouseEventArgs(1, -1, 1, 1, new MouseEventArgs(MouseButtons.Left, 1, 1, 1, 1));
this.dgvGridview_ColumnHeaderMouseClick(this, e2);
}
FolderBrowserDialog fbd = new FolderBrowserDialog();
if (fbd.ShowDialog() != DialogResult.OK) return;
string selectedPath = fbd.SelectedPath;
List<BuildInfo> bs = null;
SocketMessager messager = new SocketMessager("Build", new object[] {
this.txtSolution.Text,
this.chkSolution.Checked,
string.Join("", this._tables.ConvertAll<string>(delegate(TableInfo table){
return string.Concat(table.IsOutput ? 1 : 0);
}).ToArray()),
this.chkWebAdmin.Checked,
this.chkDownloadRes.Checked
});
this._socket.Write(messager, delegate(object sender2, ClientSocketReceiveEventArgs e2) {
bs = e2.Messager.Arg as List<BuildInfo>;
if (e2.Messager.Arg is Exception) throw e2.Messager.Arg as Exception;
}, TimeSpan.FromSeconds(60 * 5));
if (bs == null) return;
foreach (BuildInfo b in bs) {
string path = Path.Combine(selectedPath, b.Path);
Directory.CreateDirectory(Path.GetDirectoryName(path));
string fileName = Path.GetFileName(b.Path);
string ext = Path.GetExtension(b.Path);
Encoding encode = Encoding.UTF8;
if (fileName.EndsWith(".rar") || fileName.EndsWith(".zip") || fileName.EndsWith(".dll")) {
using (FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write)) {
fs.Write(b.Data, 0, b.Data.Length);
fs.Close();
}
continue;
}
byte[] data = Deflate.Decompress(b.Data);
string content = Encoding.UTF8.GetString(data);
if (string.Compare(fileName, "web.config") == 0) {
string place = System.Web.HttpUtility.HtmlEncode(this.ConnectionString);
content = content.Replace("{connectionString}", place);
}
if (fileName.EndsWith(".json")) {
string place = this.ConnectionString.Replace("\\", "\\\\").Replace("\"", "\\\"").Replace("'", "\\'");
content = content.Replace("{connectionString}", this.ConnectionString);
}
//if (string.Compare(fileName, "procedure.sql") == 0) {
// this.ExecuteNonQuery(content);
//}
if (string.Compare(ext, ".refresh") == 0) {
encode = Encoding.Unicode;
}
using (StreamWriter sw = new StreamWriter(path, false, encode)) {
sw.Write(content);
sw.Close();
}
}
GC.Collect();
Lib.Msgbox("The code files be maked in \"" + selectedPath + "\", please check.");
//System.Diagnostics.Process.Start("iexplore.exe", "http://www.penzz.com/");
}
private void txtProject_TextChanged(object sender, EventArgs e) {
this.btnBuild.Enabled = this._tables != null && this._tables.Count > 0 && this.txtSolution.Text != string.Empty;
}
private void dgvGridview_ColumnHeaderMouseClick(object sender, DataGridViewCellMouseEventArgs e) {
if (e.Button == MouseButtons.Left && e.ColumnIndex == 1 && this._tables != null) {
foreach (TableInfo table in _tables) table.IsOutput = !table.IsOutput &&
(table.Columns.Count > 0 && table.FullName != "dbo.sysdiagrams" || table.Type == "P");
this.BindGridView();
}
}
private void dgvGridview_CellContentClick(object sender, DataGridViewCellEventArgs e) {
DataGridView dgv = sender as DataGridView;
if (dgv != null) {
bool isFrmMain = dgv.FindForm() is FrmMain;
if (e.RowIndex >= 0) {
DataGridViewColumn column = dgv.Columns[e.ColumnIndex];
DataGridViewRow row = dgv.Rows[e.RowIndex];
if (isFrmMain && column.Name == "dgvColName" || column.Name == "dgvColView") {
string pdgvColName = string.Concat(row.Cells["dgvColName"].Value);
string dgvColValue = string.Concat(column.Name == "dgvColView" ? row.Cells["dgvColView"].Value : null);
string viewTable = isFrmMain ? pdgvColName : row.Cells["dgvColView"].Tag.ToString();
string name = isFrmMain ? pdgvColName : dgv.Tag.ToString();
if (dgvColValue == "FK-x") {
Lib.Msgbox("ⲿĿֱɺ鿴룡");
return;
}
TableInfo table = _tables.Find(delegate(TableInfo table1) {
return viewTable == table1.FullName;
});
if (table == null) return;
FrmView frmView = new FrmView();
frmView.Text = isFrmMain ? (name + " - view") :
(name + "." + pdgvColName + " - " + table.FullName + " - relation view");
frmView.dgvGridview.Tag = viewTable;
foreach (ColumnInfo c1 in table.Columns) {
string viewText = null;
object image = c1.IsPrimaryKey ? this.imageList1.Images["PrimaryKey.ico"] : null;
table.ForeignKeys.FindAll(delegate(ForeignKeyInfo fk) {
ColumnInfo c2 = fk.Columns.Find(delegate(ColumnInfo c3) {
return c3.Name == c1.Name;
});
if (c2 != null) {
if (fk.ReferencedTable != null) {
viewTable = fk.ReferencedTable.FullName;
viewText = "View";
} else {
viewTable = fk.ReferencedTableName;
viewText = "FK-x";
}
if (image == null) image = imageList1.Images["Key.ico"];
}
return c2 != null;
});
frmView.dgvGridview.Rows.Add(new object[] { image, c1.Name, c1.SqlType, c1.IsNullable, viewText });
if (viewText != null) frmView.dgvGridview.Rows[frmView.dgvGridview.Rows.Count - 1].Cells["dgvColView"].Tag = viewTable;
}
frmView.dgvGridview.CellContentClick += dgvGridview_CellContentClick;
frmView.ShowDialog();
frmView.Dispose();
}
}
}
}
private void dgvGridview_CellValueChanged(object sender, DataGridViewCellEventArgs e) {
if (e.RowIndex >= 0) {
DataGridViewColumn column = ((DataGridView)sender).Columns[e.ColumnIndex];
DataGridViewRow row = ((DataGridView)sender).Rows[e.RowIndex];
if (column.Name == "dgvColIsOutput") {
txtProject_TextChanged(sender, e);
}
}
}
private void FrmMain_FormClosing(object sender, FormClosingEventArgs e) {
Properties.Settings.Default.Save();
}
private void dgvGridview_DataError(object sender, DataGridViewDataErrorEventArgs e) {
e.Cancel = true;
}
private void dgvGridview_CellFormatting(object sender, DataGridViewCellFormattingEventArgs e) {
if (e.ColumnIndex == 0 && this._tables != null && e.RowIndex < this._tables.Count) {
switch (this._tables[e.RowIndex].Type) {
case "P":
e.CellStyle.BackColor = ColorTranslator.FromHtml("#CDEDFC");
break;
}
}
}
private void txtPort_KeyPress(object sender, KeyPressEventArgs e) {
if (!Char.IsNumber(e.KeyChar) && e.KeyChar != (char)8) e.Handled = true;
}
}
}
|
2881099/dotnetGen_postgresql | 16,694 | MakeCode/FrmMain.designer.cs | using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using Model;
namespace MakeCode {
partial class FrmMain {
/// <summary>
/// 必需的设计器变量。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 清理所有正在使用的资源。
/// </summary>
/// <param name="disposing">如果应释放托管资源,为 true;否则为 false。</param>
protected override void Dispose(bool disposing) {
if (disposing && (components != null)) {
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows 窗体设计器生成的代码
/// <summary>
/// 设计器支持所需的方法 - 不要
/// 使用代码编辑器修改此方法的内容。
/// </summary>
private void InitializeComponent() {
this.components = new System.ComponentModel.Container();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(FrmMain));
this.imageList1 = new System.Windows.Forms.ImageList(this.components);
this.labServer = new System.Windows.Forms.Label();
this.labProject = new System.Windows.Forms.Label();
this.labUsername = new System.Windows.Forms.Label();
this.labPassword = new System.Windows.Forms.Label();
this.cmbDatabase = new System.Windows.Forms.ComboBox();
this.btnBuild = new System.Windows.Forms.Button();
this.btnConnect = new System.Windows.Forms.Button();
this.dgvGridview = new System.Windows.Forms.DataGridView();
this.toolTip1 = new System.Windows.Forms.ToolTip(this.components);
this.txtPort = new System.Windows.Forms.TextBox();
this.chkDownloadRes = new System.Windows.Forms.CheckBox();
this.chkWebAdmin = new System.Windows.Forms.CheckBox();
this.chkSolution = new System.Windows.Forms.CheckBox();
this.txtServer = new System.Windows.Forms.TextBox();
this.txtSolution = new System.Windows.Forms.TextBox();
this.txtUsername = new System.Windows.Forms.TextBox();
this.txtPassword = new System.Windows.Forms.TextBox();
this.labDatabase = new System.Windows.Forms.Label();
this.panel1 = new System.Windows.Forms.Panel();
this.webBrowser1 = new System.Windows.Forms.WebBrowser();
this.labPort = new System.Windows.Forms.Label();
((System.ComponentModel.ISupportInitialize)(this.dgvGridview)).BeginInit();
this.panel1.SuspendLayout();
this.SuspendLayout();
//
// imageList1
//
this.imageList1.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("imageList1.ImageStream")));
this.imageList1.TransparentColor = System.Drawing.Color.Transparent;
this.imageList1.Images.SetKeyName(0, "PrimaryKey.ico");
this.imageList1.Images.SetKeyName(1, "Key.ico");
//
// labServer
//
this.labServer.AutoSize = true;
this.labServer.Location = new System.Drawing.Point(10, 323);
this.labServer.Name = "labServer";
this.labServer.Size = new System.Drawing.Size(59, 12);
this.labServer.TabIndex = 16;
this.labServer.Text = "PgSQL主机";
//
// labProject
//
this.labProject.AutoSize = true;
this.labProject.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft;
this.labProject.Location = new System.Drawing.Point(302, 323);
this.labProject.Name = "labProject";
this.labProject.Size = new System.Drawing.Size(53, 12);
this.labProject.TabIndex = 27;
this.labProject.Text = "项目名称";
//
// labUsername
//
this.labUsername.AutoSize = true;
this.labUsername.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft;
this.labUsername.Location = new System.Drawing.Point(10, 350);
this.labUsername.Name = "labUsername";
this.labUsername.Size = new System.Drawing.Size(41, 12);
this.labUsername.TabIndex = 18;
this.labUsername.Text = "用户名";
//
// labPassword
//
this.labPassword.AutoSize = true;
this.labPassword.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft;
this.labPassword.Location = new System.Drawing.Point(10, 377);
this.labPassword.Name = "labPassword";
this.labPassword.Size = new System.Drawing.Size(41, 12);
this.labPassword.TabIndex = 20;
this.labPassword.Text = "密 码";
//
// cmbDatabase
//
this.cmbDatabase.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.cmbDatabase.Enabled = false;
this.cmbDatabase.FormattingEnabled = true;
this.cmbDatabase.Location = new System.Drawing.Point(364, 374);
this.cmbDatabase.Margin = new System.Windows.Forms.Padding(4);
this.cmbDatabase.Name = "cmbDatabase";
this.cmbDatabase.Size = new System.Drawing.Size(117, 20);
this.cmbDatabase.TabIndex = 24;
this.toolTip1.SetToolTip(this.cmbDatabase, "请选择一个数据库");
this.cmbDatabase.SelectedIndexChanged += new System.EventHandler(this.cmbDatabase_SelectedIndexChanged);
//
// btnBuild
//
this.btnBuild.Enabled = false;
this.btnBuild.Location = new System.Drawing.Point(487, 373);
this.btnBuild.Name = "btnBuild";
this.btnBuild.Size = new System.Drawing.Size(89, 21);
this.btnBuild.TabIndex = 25;
this.btnBuild.Text = "生成";
this.toolTip1.SetToolTip(this.btnBuild, "生成");
this.btnBuild.UseVisualStyleBackColor = true;
this.btnBuild.Click += new System.EventHandler(this.btnBuild_Click);
//
// btnConnect
//
this.btnConnect.Location = new System.Drawing.Point(208, 374);
this.btnConnect.Name = "btnConnect";
this.btnConnect.Size = new System.Drawing.Size(90, 21);
this.btnConnect.TabIndex = 22;
this.btnConnect.Text = "Connect";
this.btnConnect.UseVisualStyleBackColor = true;
this.btnConnect.Click += new System.EventHandler(this.btnConnect_Click);
//
// dgvGridview
//
this.dgvGridview.AllowUserToAddRows = false;
this.dgvGridview.AllowUserToResizeRows = false;
this.dgvGridview.BackgroundColor = System.Drawing.SystemColors.ActiveCaptionText;
this.dgvGridview.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize;
this.dgvGridview.Location = new System.Drawing.Point(12, 12);
this.dgvGridview.Name = "dgvGridview";
this.dgvGridview.RowHeadersVisible = false;
this.dgvGridview.RowHeadersWidthSizeMode = System.Windows.Forms.DataGridViewRowHeadersWidthSizeMode.DisableResizing;
this.dgvGridview.RowTemplate.Height = 23;
this.dgvGridview.Size = new System.Drawing.Size(286, 302);
this.dgvGridview.TabIndex = 26;
this.dgvGridview.CellContentClick += new System.Windows.Forms.DataGridViewCellEventHandler(this.dgvGridview_CellContentClick);
this.dgvGridview.CellFormatting += new System.Windows.Forms.DataGridViewCellFormattingEventHandler(this.dgvGridview_CellFormatting);
this.dgvGridview.CellValueChanged += new System.Windows.Forms.DataGridViewCellEventHandler(this.dgvGridview_CellValueChanged);
this.dgvGridview.ColumnHeaderMouseClick += new System.Windows.Forms.DataGridViewCellMouseEventHandler(this.dgvGridview_ColumnHeaderMouseClick);
this.dgvGridview.DataError += new System.Windows.Forms.DataGridViewDataErrorEventHandler(this.dgvGridview_DataError);
//
// txtPort
//
this.txtPort.DataBindings.Add(new System.Windows.Forms.Binding("Text", global::MakeCode.Properties.Settings.Default, "txtPort_text", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.txtPort.Location = new System.Drawing.Point(241, 320);
this.txtPort.MaxLength = 5;
this.txtPort.Name = "txtPort";
this.txtPort.Size = new System.Drawing.Size(55, 21);
this.txtPort.TabIndex = 39;
this.txtPort.Text = global::MakeCode.Properties.Settings.Default.txtPort_text;
this.toolTip1.SetToolTip(this.txtPort, "\r\n如:127.0.0.1:5432");
this.txtPort.KeyPress += new System.Windows.Forms.KeyPressEventHandler(this.txtPort_KeyPress);
//
// chkDownloadRes
//
this.chkDownloadRes.AutoSize = true;
this.chkDownloadRes.Checked = global::MakeCode.Properties.Settings.Default.chkDownloadRes_checked;
this.chkDownloadRes.DataBindings.Add(new System.Windows.Forms.Binding("Checked", global::MakeCode.Properties.Settings.Default, "chkDownloadRes_checked", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.chkDownloadRes.Location = new System.Drawing.Point(397, 347);
this.chkDownloadRes.Name = "chkDownloadRes";
this.chkDownloadRes.RightToLeft = System.Windows.Forms.RightToLeft.Yes;
this.chkDownloadRes.Size = new System.Drawing.Size(84, 16);
this.chkDownloadRes.TabIndex = 37;
this.chkDownloadRes.Text = "下载资源包";
this.toolTip1.SetToolTip(this.chkDownloadRes, "是否下载资源包,因网速原因,可能会影响生成速度");
this.chkDownloadRes.UseVisualStyleBackColor = true;
//
// chkWebAdmin
//
this.chkWebAdmin.AutoSize = true;
this.chkWebAdmin.Checked = global::MakeCode.Properties.Settings.Default.chkWebAdmin_checked;
this.chkWebAdmin.DataBindings.Add(new System.Windows.Forms.Binding("Checked", global::MakeCode.Properties.Settings.Default, "chkWebAdmin_checked", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.chkWebAdmin.Location = new System.Drawing.Point(486, 322);
this.chkWebAdmin.Name = "chkWebAdmin";
this.chkWebAdmin.RightToLeft = System.Windows.Forms.RightToLeft.Yes;
this.chkWebAdmin.Size = new System.Drawing.Size(96, 16);
this.chkWebAdmin.TabIndex = 35;
this.chkWebAdmin.Text = "生成后台管理";
this.toolTip1.SetToolTip(this.chkWebAdmin, "是否生成 WEB 管理");
this.chkWebAdmin.UseVisualStyleBackColor = true;
//
// chkSolution
//
this.chkSolution.AutoSize = true;
this.chkSolution.Checked = global::MakeCode.Properties.Settings.Default.chkSolution_checked;
this.chkSolution.DataBindings.Add(new System.Windows.Forms.Binding("Checked", global::MakeCode.Properties.Settings.Default, "chkSolution_checked", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.chkSolution.Location = new System.Drawing.Point(486, 347);
this.chkSolution.Name = "chkSolution";
this.chkSolution.RightToLeft = System.Windows.Forms.RightToLeft.Yes;
this.chkSolution.Size = new System.Drawing.Size(96, 16);
this.chkSolution.TabIndex = 30;
this.chkSolution.Text = "生成解决方案";
this.toolTip1.SetToolTip(this.chkSolution, "是否生成解决方案(.sln)和项目文件(.csproj)");
this.chkSolution.UseVisualStyleBackColor = true;
//
// txtServer
//
this.txtServer.DataBindings.Add(new System.Windows.Forms.Binding("Text", global::MakeCode.Properties.Settings.Default, "txtServer_text", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.txtServer.Location = new System.Drawing.Point(81, 320);
this.txtServer.Name = "txtServer";
this.txtServer.Size = new System.Drawing.Size(116, 21);
this.txtServer.TabIndex = 17;
this.txtServer.Text = global::MakeCode.Properties.Settings.Default.txtServer_text;
this.toolTip1.SetToolTip(this.txtServer, "\r\n如:127.0.0.1:5432");
//
// txtSolution
//
this.txtSolution.DataBindings.Add(new System.Windows.Forms.Binding("Text", global::MakeCode.Properties.Settings.Default, "txtSolution_text", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.txtSolution.Location = new System.Drawing.Point(364, 320);
this.txtSolution.Name = "txtSolution";
this.txtSolution.Size = new System.Drawing.Size(117, 21);
this.txtSolution.TabIndex = 28;
this.txtSolution.Text = global::MakeCode.Properties.Settings.Default.txtSolution_text;
this.toolTip1.SetToolTip(this.txtSolution, "要生成的解决方案名(不能为空)\r\n如:Nic");
this.txtSolution.TextChanged += new System.EventHandler(this.txtProject_TextChanged);
//
// txtUsername
//
this.txtUsername.DataBindings.Add(new System.Windows.Forms.Binding("Text", global::MakeCode.Properties.Settings.Default, "txtUsername_text", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.txtUsername.Location = new System.Drawing.Point(81, 347);
this.txtUsername.Name = "txtUsername";
this.txtUsername.Size = new System.Drawing.Size(116, 21);
this.txtUsername.TabIndex = 19;
this.txtUsername.Text = global::MakeCode.Properties.Settings.Default.txtUsername_text;
this.toolTip1.SetToolTip(this.txtUsername, "用户\r\n:postgres");
//
// txtPassword
//
this.txtPassword.DataBindings.Add(new System.Windows.Forms.Binding("Text", global::MakeCode.Properties.Settings.Default, "txtPassword_text", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.txtPassword.Location = new System.Drawing.Point(81, 374);
this.txtPassword.Name = "txtPassword";
this.txtPassword.PasswordChar = '*';
this.txtPassword.Size = new System.Drawing.Size(116, 21);
this.txtPassword.TabIndex = 21;
this.txtPassword.Text = global::MakeCode.Properties.Settings.Default.txtPassword_text;
this.toolTip1.SetToolTip(this.txtPassword, "\r\n如:123456");
//
// labDatabase
//
this.labDatabase.AutoSize = true;
this.labDatabase.Location = new System.Drawing.Point(302, 378);
this.labDatabase.Name = "labDatabase";
this.labDatabase.Size = new System.Drawing.Size(53, 12);
this.labDatabase.TabIndex = 23;
this.labDatabase.Text = "Database";
//
// panel1
//
this.panel1.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.panel1.Controls.Add(this.webBrowser1);
this.panel1.Location = new System.Drawing.Point(304, 12);
this.panel1.Name = "panel1";
this.panel1.Size = new System.Drawing.Size(270, 302);
this.panel1.TabIndex = 36;
//
// webBrowser1
//
this.webBrowser1.AllowWebBrowserDrop = false;
this.webBrowser1.IsWebBrowserContextMenuEnabled = false;
this.webBrowser1.Location = new System.Drawing.Point(0, 0);
this.webBrowser1.MinimumSize = new System.Drawing.Size(20, 20);
this.webBrowser1.Name = "webBrowser1";
this.webBrowser1.Size = new System.Drawing.Size(270, 302);
this.webBrowser1.TabIndex = 33;
this.webBrowser1.Url = new System.Uri("http://www.penzz.com/nicpetshop.html", System.UriKind.Absolute);
this.webBrowser1.WebBrowserShortcutsEnabled = false;
//
// labPort
//
this.labPort.AutoSize = true;
this.labPort.Location = new System.Drawing.Point(206, 326);
this.labPort.Name = "labPort";
this.labPort.Size = new System.Drawing.Size(29, 12);
this.labPort.TabIndex = 38;
this.labPort.Text = "端口";
//
// FrmMain
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.BackColor = System.Drawing.SystemColors.GradientActiveCaption;
this.ClientSize = new System.Drawing.Size(586, 405);
this.Controls.Add(this.txtPort);
this.Controls.Add(this.labPort);
this.Controls.Add(this.chkDownloadRes);
this.Controls.Add(this.panel1);
this.Controls.Add(this.chkWebAdmin);
this.Controls.Add(this.chkSolution);
this.Controls.Add(this.txtServer);
this.Controls.Add(this.txtSolution);
this.Controls.Add(this.txtUsername);
this.Controls.Add(this.txtPassword);
this.Controls.Add(this.labServer);
this.Controls.Add(this.labProject);
this.Controls.Add(this.labDatabase);
this.Controls.Add(this.labUsername);
this.Controls.Add(this.labPassword);
this.Controls.Add(this.cmbDatabase);
this.Controls.Add(this.btnBuild);
this.Controls.Add(this.btnConnect);
this.Controls.Add(this.dgvGridview);
this.ForeColor = System.Drawing.Color.Navy;
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle;
this.MaximizeBox = false;
this.Name = "FrmMain";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
this.Text = "代码生成器(.NET Core + PostgreSQL)";
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.FrmMain_FormClosing);
this.Load += new System.EventHandler(this.FrmMain_Load);
((System.ComponentModel.ISupportInitialize)(this.dgvGridview)).EndInit();
this.panel1.ResumeLayout(false);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.ImageList imageList1;
private CheckBox chkSolution;
private TextBox txtServer;
private TextBox txtSolution;
private TextBox txtUsername;
private TextBox txtPassword;
private Label labServer;
private Label labProject;
private Label labUsername;
private Label labPassword;
private ComboBox cmbDatabase;
private Button btnBuild;
private Button btnConnect;
private DataGridView dgvGridview;
private ToolTip toolTip1;
private Label labDatabase;
private Panel panel1;
private WebBrowser webBrowser1;
private CheckBox chkDownloadRes;
private CheckBox chkWebAdmin;
private Label labPort;
private TextBox txtPort;
}
} |
2881099/dotnetGen_postgresql | 1,391 | MakeCode/Settings.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace MakeCode {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "14.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("127.0.0.1:38888")]
public string server {
get {
return ((string)(this["server"]));
}
set {
this["server"] = value;
}
}
}
}
|
27182812/ChatGLM-LLaMA-chinese-insturct | 135,468 | src/transformers/models/deta/modeling_deta.py | # coding=utf-8
# Copyright 2022 SenseTime and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch DETA model."""
import copy
import math
import warnings
from dataclasses import dataclass
from typing import Dict, List, Optional, Tuple
import torch
import torch.nn.functional as F
from torch import Tensor, nn
from ...activations import ACT2FN
from ...file_utils import (
ModelOutput,
add_start_docstrings,
add_start_docstrings_to_model_forward,
is_scipy_available,
is_vision_available,
replace_return_docstrings,
)
from ...modeling_outputs import BaseModelOutput
from ...modeling_utils import PreTrainedModel
from ...pytorch_utils import meshgrid, torch_int_div
from ...utils import is_torchvision_available, logging, requires_backends
from ..auto import AutoBackbone
from .configuration_deta import DetaConfig
logger = logging.get_logger(__name__)
if is_vision_available():
from transformers.image_transforms import center_to_corners_format
if is_torchvision_available():
from torchvision.ops.boxes import batched_nms
if is_scipy_available():
from scipy.optimize import linear_sum_assignment
logger = logging.get_logger(__name__)
_CONFIG_FOR_DOC = "DetaConfig"
_CHECKPOINT_FOR_DOC = "jozhang97/deta-swin-large-o365"
DETA_PRETRAINED_MODEL_ARCHIVE_LIST = [
"jozhang97/deta-swin-large-o365",
# See all DETA models at https://huggingface.co/models?filter=deta
]
@dataclass
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrDecoderOutput with DeformableDetr->Deta
class DetaDecoderOutput(ModelOutput):
"""
Base class for outputs of the DetaDecoder. This class adds two attributes to BaseModelOutputWithCrossAttentions,
namely:
- a stacked tensor of intermediate decoder hidden states (i.e. the output of each decoder layer)
- a stacked tensor of intermediate reference points.
Args:
last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`):
Sequence of hidden-states at the output of the last layer of the model.
intermediate_hidden_states (`torch.FloatTensor` of shape `(batch_size, config.decoder_layers, num_queries, hidden_size)`):
Stacked intermediate hidden states (output of each layer of the decoder).
intermediate_reference_points (`torch.FloatTensor` of shape `(batch_size, config.decoder_layers, sequence_length, hidden_size)`):
Stacked intermediate reference points (reference points of each layer of the decoder).
hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer
plus the initial embedding outputs.
attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,
sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in
the self-attention heads.
cross_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` and `config.add_cross_attention=True` is passed or when `config.output_attentions=True`):
Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,
sequence_length)`. Attentions weights of the decoder's cross-attention layer, after the attention softmax,
used to compute the weighted average in the cross-attention heads.
"""
last_hidden_state: torch.FloatTensor = None
intermediate_hidden_states: torch.FloatTensor = None
intermediate_reference_points: torch.FloatTensor = None
hidden_states: Optional[Tuple[torch.FloatTensor]] = None
attentions: Optional[Tuple[torch.FloatTensor]] = None
cross_attentions: Optional[Tuple[torch.FloatTensor]] = None
@dataclass
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrModelOutput with DeformableDetr->Deta,Deformable DETR->DETA
class DetaModelOutput(ModelOutput):
"""
Base class for outputs of the Deformable DETR encoder-decoder model.
Args:
init_reference_points (`torch.FloatTensor` of shape `(batch_size, num_queries, 4)`):
Initial reference points sent through the Transformer decoder.
last_hidden_state (`torch.FloatTensor` of shape `(batch_size, num_queries, hidden_size)`):
Sequence of hidden-states at the output of the last layer of the decoder of the model.
intermediate_hidden_states (`torch.FloatTensor` of shape `(batch_size, config.decoder_layers, num_queries, hidden_size)`):
Stacked intermediate hidden states (output of each layer of the decoder).
intermediate_reference_points (`torch.FloatTensor` of shape `(batch_size, config.decoder_layers, num_queries, 4)`):
Stacked intermediate reference points (reference points of each layer of the decoder).
decoder_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
shape `(batch_size, num_queries, hidden_size)`. Hidden-states of the decoder at the output of each layer
plus the initial embedding outputs.
decoder_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, num_queries,
num_queries)`. Attentions weights of the decoder, after the attention softmax, used to compute the weighted
average in the self-attention heads.
cross_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_queries, num_heads, 4, 4)`.
Attentions weights of the decoder's cross-attention layer, after the attention softmax, used to compute the
weighted average in the cross-attention heads.
encoder_last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
Sequence of hidden-states at the output of the last layer of the encoder of the model.
encoder_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the encoder at the output of each
layer plus the initial embedding outputs.
encoder_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_queries, num_heads, 4, 4)`.
Attentions weights of the encoder, after the attention softmax, used to compute the weighted average in the
self-attention heads.
enc_outputs_class (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.num_labels)`, *optional*, returned when `config.with_box_refine=True` and `config.two_stage=True`):
Predicted bounding boxes scores where the top `config.two_stage_num_proposals` scoring bounding boxes are
picked as region proposals in the first stage. Output of bounding box binary classification (i.e.
foreground and background).
enc_outputs_coord_logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, 4)`, *optional*, returned when `config.with_box_refine=True` and `config.two_stage=True`):
Logits of predicted bounding boxes coordinates in the first stage.
"""
init_reference_points: torch.FloatTensor = None
last_hidden_state: torch.FloatTensor = None
intermediate_hidden_states: torch.FloatTensor = None
intermediate_reference_points: torch.FloatTensor = None
decoder_hidden_states: Optional[Tuple[torch.FloatTensor]] = None
decoder_attentions: Optional[Tuple[torch.FloatTensor]] = None
cross_attentions: Optional[Tuple[torch.FloatTensor]] = None
encoder_last_hidden_state: Optional[torch.FloatTensor] = None
encoder_hidden_states: Optional[Tuple[torch.FloatTensor]] = None
encoder_attentions: Optional[Tuple[torch.FloatTensor]] = None
enc_outputs_class: Optional[torch.FloatTensor] = None
enc_outputs_coord_logits: Optional[torch.FloatTensor] = None
@dataclass
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrObjectDetectionOutput with DeformableDetr->Deta
class DetaObjectDetectionOutput(ModelOutput):
"""
Output type of [`DetaForObjectDetection`].
Args:
loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` are provided)):
Total loss as a linear combination of a negative log-likehood (cross-entropy) for class prediction and a
bounding box loss. The latter is defined as a linear combination of the L1 loss and the generalized
scale-invariant IoU loss.
loss_dict (`Dict`, *optional*):
A dictionary containing the individual losses. Useful for logging.
logits (`torch.FloatTensor` of shape `(batch_size, num_queries, num_classes + 1)`):
Classification logits (including no-object) for all queries.
pred_boxes (`torch.FloatTensor` of shape `(batch_size, num_queries, 4)`):
Normalized boxes coordinates for all queries, represented as (center_x, center_y, width, height). These
values are normalized in [0, 1], relative to the size of each individual image in the batch (disregarding
possible padding). You can use [`~DetaProcessor.post_process_object_detection`] to retrieve the
unnormalized bounding boxes.
auxiliary_outputs (`list[Dict]`, *optional*):
Optional, only returned when auxilary losses are activated (i.e. `config.auxiliary_loss` is set to `True`)
and labels are provided. It is a list of dictionaries containing the two above keys (`logits` and
`pred_boxes`) for each decoder layer.
last_hidden_state (`torch.FloatTensor` of shape `(batch_size, num_queries, hidden_size)`, *optional*):
Sequence of hidden-states at the output of the last layer of the decoder of the model.
decoder_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
shape `(batch_size, num_queries, hidden_size)`. Hidden-states of the decoder at the output of each layer
plus the initial embedding outputs.
decoder_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, num_queries,
num_queries)`. Attentions weights of the decoder, after the attention softmax, used to compute the weighted
average in the self-attention heads.
cross_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_queries, num_heads, 4, 4)`.
Attentions weights of the decoder's cross-attention layer, after the attention softmax, used to compute the
weighted average in the cross-attention heads.
encoder_last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
Sequence of hidden-states at the output of the last layer of the encoder of the model.
encoder_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the encoder at the output of each
layer plus the initial embedding outputs.
encoder_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, sequence_length, num_heads, 4,
4)`. Attentions weights of the encoder, after the attention softmax, used to compute the weighted average
in the self-attention heads.
intermediate_hidden_states (`torch.FloatTensor` of shape `(batch_size, config.decoder_layers, num_queries, hidden_size)`):
Stacked intermediate hidden states (output of each layer of the decoder).
intermediate_reference_points (`torch.FloatTensor` of shape `(batch_size, config.decoder_layers, num_queries, 4)`):
Stacked intermediate reference points (reference points of each layer of the decoder).
init_reference_points (`torch.FloatTensor` of shape `(batch_size, num_queries, 4)`):
Initial reference points sent through the Transformer decoder.
enc_outputs_class (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.num_labels)`, *optional*, returned when `config.with_box_refine=True` and `config.two_stage=True`):
Predicted bounding boxes scores where the top `config.two_stage_num_proposals` scoring bounding boxes are
picked as region proposals in the first stage. Output of bounding box binary classification (i.e.
foreground and background).
enc_outputs_coord_logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, 4)`, *optional*, returned when `config.with_box_refine=True` and `config.two_stage=True`):
Logits of predicted bounding boxes coordinates in the first stage.
"""
loss: Optional[torch.FloatTensor] = None
loss_dict: Optional[Dict] = None
logits: torch.FloatTensor = None
pred_boxes: torch.FloatTensor = None
auxiliary_outputs: Optional[List[Dict]] = None
init_reference_points: Optional[torch.FloatTensor] = None
last_hidden_state: Optional[torch.FloatTensor] = None
intermediate_hidden_states: Optional[torch.FloatTensor] = None
intermediate_reference_points: Optional[torch.FloatTensor] = None
decoder_hidden_states: Optional[Tuple[torch.FloatTensor]] = None
decoder_attentions: Optional[Tuple[torch.FloatTensor]] = None
cross_attentions: Optional[Tuple[torch.FloatTensor]] = None
encoder_last_hidden_state: Optional[torch.FloatTensor] = None
encoder_hidden_states: Optional[Tuple[torch.FloatTensor]] = None
encoder_attentions: Optional[Tuple[torch.FloatTensor]] = None
enc_outputs_class: Optional = None
enc_outputs_coord_logits: Optional = None
def _get_clones(module, N):
return nn.ModuleList([copy.deepcopy(module) for i in range(N)])
def inverse_sigmoid(x, eps=1e-5):
x = x.clamp(min=0, max=1)
x1 = x.clamp(min=eps)
x2 = (1 - x).clamp(min=eps)
return torch.log(x1 / x2)
# Copied from transformers.models.detr.modeling_detr.DetrFrozenBatchNorm2d with Detr->Deta
class DetaFrozenBatchNorm2d(nn.Module):
"""
BatchNorm2d where the batch statistics and the affine parameters are fixed.
Copy-paste from torchvision.misc.ops with added eps before rqsrt, without which any other models than
torchvision.models.resnet[18,34,50,101] produce nans.
"""
def __init__(self, n):
super().__init__()
self.register_buffer("weight", torch.ones(n))
self.register_buffer("bias", torch.zeros(n))
self.register_buffer("running_mean", torch.zeros(n))
self.register_buffer("running_var", torch.ones(n))
def _load_from_state_dict(
self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs
):
num_batches_tracked_key = prefix + "num_batches_tracked"
if num_batches_tracked_key in state_dict:
del state_dict[num_batches_tracked_key]
super()._load_from_state_dict(
state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs
)
def forward(self, x):
# move reshapes to the beginning
# to make it user-friendly
weight = self.weight.reshape(1, -1, 1, 1)
bias = self.bias.reshape(1, -1, 1, 1)
running_var = self.running_var.reshape(1, -1, 1, 1)
running_mean = self.running_mean.reshape(1, -1, 1, 1)
epsilon = 1e-5
scale = weight * (running_var + epsilon).rsqrt()
bias = bias - running_mean * scale
return x * scale + bias
# Copied from transformers.models.detr.modeling_detr.replace_batch_norm with Detr->Deta
def replace_batch_norm(m, name=""):
for attr_str in dir(m):
target_attr = getattr(m, attr_str)
if isinstance(target_attr, nn.BatchNorm2d):
frozen = DetaFrozenBatchNorm2d(target_attr.num_features)
bn = getattr(m, attr_str)
frozen.weight.data.copy_(bn.weight)
frozen.bias.data.copy_(bn.bias)
frozen.running_mean.data.copy_(bn.running_mean)
frozen.running_var.data.copy_(bn.running_var)
setattr(m, attr_str, frozen)
for n, ch in m.named_children():
replace_batch_norm(ch, n)
class DetaBackboneWithPositionalEncodings(nn.Module):
"""
Backbone model with positional embeddings.
nn.BatchNorm2d layers are replaced by DetaFrozenBatchNorm2d as defined above.
"""
def __init__(self, config):
super().__init__()
backbone = AutoBackbone.from_config(config.backbone_config)
with torch.no_grad():
replace_batch_norm(backbone)
self.model = backbone
self.intermediate_channel_sizes = self.model.channels
# TODO fix this
if config.backbone_config.model_type == "resnet":
for name, parameter in self.model.named_parameters():
if "stages.1" not in name and "stages.2" not in name and "stages.3" not in name:
parameter.requires_grad_(False)
self.position_embedding = build_position_encoding(config)
def forward(self, pixel_values: torch.Tensor, pixel_mask: torch.Tensor):
"""
Outputs feature maps of latter stages C_3 through C_5 in ResNet if `config.num_feature_levels > 1`, otherwise
outputs feature maps of C_5.
"""
# first, send pixel_values through the backbone to get list of feature maps
features = self.model(pixel_values).feature_maps
# next, create position embeddings
out = []
pos = []
for feature_map in features:
# downsample pixel_mask to match shape of corresponding feature_map
mask = nn.functional.interpolate(pixel_mask[None].float(), size=feature_map.shape[-2:]).to(torch.bool)[0]
position_embeddings = self.position_embedding(feature_map, mask).to(feature_map.dtype)
out.append((feature_map, mask))
pos.append(position_embeddings)
return out, pos
# Copied from transformers.models.detr.modeling_detr._expand_mask
def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, target_len: Optional[int] = None):
"""
Expands attention_mask from `[batch_size, seq_len]` to `[batch_size, 1, target_seq_len, source_seq_len]`.
"""
batch_size, source_len = mask.size()
target_len = target_len if target_len is not None else source_len
expanded_mask = mask[:, None, None, :].expand(batch_size, 1, target_len, source_len).to(dtype)
inverted_mask = 1.0 - expanded_mask
return inverted_mask.masked_fill(inverted_mask.bool(), torch.finfo(dtype).min)
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrSinePositionEmbedding with DeformableDetr->Deta
class DetaSinePositionEmbedding(nn.Module):
"""
This is a more standard version of the position embedding, very similar to the one used by the Attention is all you
need paper, generalized to work on images.
"""
def __init__(self, embedding_dim=64, temperature=10000, normalize=False, scale=None):
super().__init__()
self.embedding_dim = embedding_dim
self.temperature = temperature
self.normalize = normalize
if scale is not None and normalize is False:
raise ValueError("normalize should be True if scale is passed")
if scale is None:
scale = 2 * math.pi
self.scale = scale
def forward(self, pixel_values, pixel_mask):
if pixel_mask is None:
raise ValueError("No pixel mask provided")
y_embed = pixel_mask.cumsum(1, dtype=torch.float32)
x_embed = pixel_mask.cumsum(2, dtype=torch.float32)
if self.normalize:
eps = 1e-6
y_embed = (y_embed - 0.5) / (y_embed[:, -1:, :] + eps) * self.scale
x_embed = (x_embed - 0.5) / (x_embed[:, :, -1:] + eps) * self.scale
dim_t = torch.arange(self.embedding_dim, dtype=torch.float32, device=pixel_values.device)
dim_t = self.temperature ** (2 * torch_int_div(dim_t, 2 / self.embedding_dim))
pos_x = x_embed[:, :, :, None] / dim_t
pos_y = y_embed[:, :, :, None] / dim_t
pos_x = torch.stack((pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4).flatten(3)
pos_y = torch.stack((pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4).flatten(3)
pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2)
return pos
# Copied from transformers.models.detr.modeling_detr.DetrLearnedPositionEmbedding
class DetaLearnedPositionEmbedding(nn.Module):
"""
This module learns positional embeddings up to a fixed maximum size.
"""
def __init__(self, embedding_dim=256):
super().__init__()
self.row_embeddings = nn.Embedding(50, embedding_dim)
self.column_embeddings = nn.Embedding(50, embedding_dim)
def forward(self, pixel_values, pixel_mask=None):
height, width = pixel_values.shape[-2:]
width_values = torch.arange(width, device=pixel_values.device)
height_values = torch.arange(height, device=pixel_values.device)
x_emb = self.column_embeddings(width_values)
y_emb = self.row_embeddings(height_values)
pos = torch.cat([x_emb.unsqueeze(0).repeat(height, 1, 1), y_emb.unsqueeze(1).repeat(1, width, 1)], dim=-1)
pos = pos.permute(2, 0, 1)
pos = pos.unsqueeze(0)
pos = pos.repeat(pixel_values.shape[0], 1, 1, 1)
return pos
# Copied from transformers.models.detr.modeling_detr.build_position_encoding with Detr->Deta
def build_position_encoding(config):
n_steps = config.d_model // 2
if config.position_embedding_type == "sine":
# TODO find a better way of exposing other arguments
position_embedding = DetaSinePositionEmbedding(n_steps, normalize=True)
elif config.position_embedding_type == "learned":
position_embedding = DetaLearnedPositionEmbedding(n_steps)
else:
raise ValueError(f"Not supported {config.position_embedding_type}")
return position_embedding
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.multi_scale_deformable_attention
def multi_scale_deformable_attention(
value: Tensor, value_spatial_shapes: Tensor, sampling_locations: Tensor, attention_weights: Tensor
) -> Tensor:
batch_size, _, num_heads, hidden_dim = value.shape
_, num_queries, num_heads, num_levels, num_points, _ = sampling_locations.shape
value_list = value.split([height * width for height, width in value_spatial_shapes], dim=1)
sampling_grids = 2 * sampling_locations - 1
sampling_value_list = []
for level_id, (height, width) in enumerate(value_spatial_shapes):
# batch_size, height*width, num_heads, hidden_dim
# -> batch_size, height*width, num_heads*hidden_dim
# -> batch_size, num_heads*hidden_dim, height*width
# -> batch_size*num_heads, hidden_dim, height, width
value_l_ = (
value_list[level_id].flatten(2).transpose(1, 2).reshape(batch_size * num_heads, hidden_dim, height, width)
)
# batch_size, num_queries, num_heads, num_points, 2
# -> batch_size, num_heads, num_queries, num_points, 2
# -> batch_size*num_heads, num_queries, num_points, 2
sampling_grid_l_ = sampling_grids[:, :, :, level_id].transpose(1, 2).flatten(0, 1)
# batch_size*num_heads, hidden_dim, num_queries, num_points
sampling_value_l_ = nn.functional.grid_sample(
value_l_, sampling_grid_l_, mode="bilinear", padding_mode="zeros", align_corners=False
)
sampling_value_list.append(sampling_value_l_)
# (batch_size, num_queries, num_heads, num_levels, num_points)
# -> (batch_size, num_heads, num_queries, num_levels, num_points)
# -> (batch_size, num_heads, 1, num_queries, num_levels*num_points)
attention_weights = attention_weights.transpose(1, 2).reshape(
batch_size * num_heads, 1, num_queries, num_levels * num_points
)
output = (
(torch.stack(sampling_value_list, dim=-2).flatten(-2) * attention_weights)
.sum(-1)
.view(batch_size, num_heads * hidden_dim, num_queries)
)
return output.transpose(1, 2).contiguous()
class DetaMultiscaleDeformableAttention(nn.Module):
"""
Multiscale deformable attention as proposed in Deformable DETR.
"""
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrMultiscaleDeformableAttention.__init__ with DeformableDetr->Deta
def __init__(self, embed_dim: int, num_heads: int, n_levels: int, n_points: int):
super().__init__()
if embed_dim % num_heads != 0:
raise ValueError(
f"embed_dim (d_model) must be divisible by num_heads, but got {embed_dim} and {num_heads}"
)
dim_per_head = embed_dim // num_heads
# check if dim_per_head is power of 2
if not ((dim_per_head & (dim_per_head - 1) == 0) and dim_per_head != 0):
warnings.warn(
"You'd better set embed_dim (d_model) in DetaMultiscaleDeformableAttention to make the"
" dimension of each attention head a power of 2 which is more efficient in the authors' CUDA"
" implementation."
)
self.im2col_step = 64
self.d_model = embed_dim
self.n_levels = n_levels
self.n_heads = num_heads
self.n_points = n_points
self.sampling_offsets = nn.Linear(embed_dim, num_heads * n_levels * n_points * 2)
self.attention_weights = nn.Linear(embed_dim, num_heads * n_levels * n_points)
self.value_proj = nn.Linear(embed_dim, embed_dim)
self.output_proj = nn.Linear(embed_dim, embed_dim)
self._reset_parameters()
def _reset_parameters(self):
nn.init.constant_(self.sampling_offsets.weight.data, 0.0)
thetas = torch.arange(self.n_heads, dtype=torch.float32) * (2.0 * math.pi / self.n_heads)
grid_init = torch.stack([thetas.cos(), thetas.sin()], -1)
grid_init = (
(grid_init / grid_init.abs().max(-1, keepdim=True)[0])
.view(self.n_heads, 1, 1, 2)
.repeat(1, self.n_levels, self.n_points, 1)
)
for i in range(self.n_points):
grid_init[:, :, i, :] *= i + 1
with torch.no_grad():
self.sampling_offsets.bias = nn.Parameter(grid_init.view(-1))
nn.init.constant_(self.attention_weights.weight.data, 0.0)
nn.init.constant_(self.attention_weights.bias.data, 0.0)
nn.init.xavier_uniform_(self.value_proj.weight.data)
nn.init.constant_(self.value_proj.bias.data, 0.0)
nn.init.xavier_uniform_(self.output_proj.weight.data)
nn.init.constant_(self.output_proj.bias.data, 0.0)
def with_pos_embed(self, tensor: torch.Tensor, position_embeddings: Optional[Tensor]):
return tensor if position_embeddings is None else tensor + position_embeddings
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
encoder_hidden_states=None,
encoder_attention_mask=None,
position_embeddings: Optional[torch.Tensor] = None,
reference_points=None,
spatial_shapes=None,
level_start_index=None,
output_attentions: bool = False,
):
# add position embeddings to the hidden states before projecting to queries and keys
if position_embeddings is not None:
hidden_states = self.with_pos_embed(hidden_states, position_embeddings)
batch_size, num_queries, _ = hidden_states.shape
batch_size, sequence_length, _ = encoder_hidden_states.shape
if (spatial_shapes[:, 0] * spatial_shapes[:, 1]).sum() != sequence_length:
raise ValueError(
"Make sure to align the spatial shapes with the sequence length of the encoder hidden states"
)
value = self.value_proj(encoder_hidden_states)
if attention_mask is not None:
# we invert the attention_mask
value = value.masked_fill(~attention_mask[..., None], float(0))
value = value.view(batch_size, sequence_length, self.n_heads, self.d_model // self.n_heads)
sampling_offsets = self.sampling_offsets(hidden_states).view(
batch_size, num_queries, self.n_heads, self.n_levels, self.n_points, 2
)
attention_weights = self.attention_weights(hidden_states).view(
batch_size, num_queries, self.n_heads, self.n_levels * self.n_points
)
attention_weights = F.softmax(attention_weights, -1).view(
batch_size, num_queries, self.n_heads, self.n_levels, self.n_points
)
# batch_size, num_queries, n_heads, n_levels, n_points, 2
if reference_points.shape[-1] == 2:
offset_normalizer = torch.stack([spatial_shapes[..., 1], spatial_shapes[..., 0]], -1)
sampling_locations = (
reference_points[:, :, None, :, None, :]
+ sampling_offsets / offset_normalizer[None, None, None, :, None, :]
)
elif reference_points.shape[-1] == 4:
sampling_locations = (
reference_points[:, :, None, :, None, :2]
+ sampling_offsets / self.n_points * reference_points[:, :, None, :, None, 2:] * 0.5
)
else:
raise ValueError(f"Last dim of reference_points must be 2 or 4, but got {reference_points.shape[-1]}")
# PyTorch implementation (for now)
output = multi_scale_deformable_attention(value, spatial_shapes, sampling_locations, attention_weights)
output = self.output_proj(output)
return output, attention_weights
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrMultiheadAttention with DeformableDetr->Deta,Deformable DETR->DETA
class DetaMultiheadAttention(nn.Module):
"""
Multi-headed attention from 'Attention Is All You Need' paper.
Here, we add position embeddings to the queries and keys (as explained in the Deformable DETR paper).
"""
def __init__(
self,
embed_dim: int,
num_heads: int,
dropout: float = 0.0,
bias: bool = True,
):
super().__init__()
self.embed_dim = embed_dim
self.num_heads = num_heads
self.dropout = dropout
self.head_dim = embed_dim // num_heads
if self.head_dim * num_heads != self.embed_dim:
raise ValueError(
f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim} and `num_heads`:"
f" {num_heads})."
)
self.scaling = self.head_dim**-0.5
self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
def _shape(self, tensor: torch.Tensor, seq_len: int, batch_size: int):
return tensor.view(batch_size, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous()
def with_pos_embed(self, tensor: torch.Tensor, position_embeddings: Optional[Tensor]):
return tensor if position_embeddings is None else tensor + position_embeddings
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
position_embeddings: Optional[torch.Tensor] = None,
output_attentions: bool = False,
) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
"""Input shape: Batch x Time x Channel"""
batch_size, target_len, embed_dim = hidden_states.size()
# add position embeddings to the hidden states before projecting to queries and keys
if position_embeddings is not None:
hidden_states_original = hidden_states
hidden_states = self.with_pos_embed(hidden_states, position_embeddings)
# get queries, keys and values
query_states = self.q_proj(hidden_states) * self.scaling
key_states = self._shape(self.k_proj(hidden_states), -1, batch_size)
value_states = self._shape(self.v_proj(hidden_states_original), -1, batch_size)
proj_shape = (batch_size * self.num_heads, -1, self.head_dim)
query_states = self._shape(query_states, target_len, batch_size).view(*proj_shape)
key_states = key_states.view(*proj_shape)
value_states = value_states.view(*proj_shape)
source_len = key_states.size(1)
attn_weights = torch.bmm(query_states, key_states.transpose(1, 2))
if attn_weights.size() != (batch_size * self.num_heads, target_len, source_len):
raise ValueError(
f"Attention weights should be of size {(batch_size * self.num_heads, target_len, source_len)}, but is"
f" {attn_weights.size()}"
)
# expand attention_mask
if attention_mask is not None:
# [batch_size, seq_len] -> [batch_size, 1, target_seq_len, source_seq_len]
attention_mask = _expand_mask(attention_mask, hidden_states.dtype)
if attention_mask is not None:
if attention_mask.size() != (batch_size, 1, target_len, source_len):
raise ValueError(
f"Attention mask should be of size {(batch_size, 1, target_len, source_len)}, but is"
f" {attention_mask.size()}"
)
attn_weights = attn_weights.view(batch_size, self.num_heads, target_len, source_len) + attention_mask
attn_weights = attn_weights.view(batch_size * self.num_heads, target_len, source_len)
attn_weights = nn.functional.softmax(attn_weights, dim=-1)
if output_attentions:
# this operation is a bit awkward, but it's required to
# make sure that attn_weights keeps its gradient.
# In order to do so, attn_weights have to reshaped
# twice and have to be reused in the following
attn_weights_reshaped = attn_weights.view(batch_size, self.num_heads, target_len, source_len)
attn_weights = attn_weights_reshaped.view(batch_size * self.num_heads, target_len, source_len)
else:
attn_weights_reshaped = None
attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training)
attn_output = torch.bmm(attn_probs, value_states)
if attn_output.size() != (batch_size * self.num_heads, target_len, self.head_dim):
raise ValueError(
f"`attn_output` should be of size {(batch_size, self.num_heads, target_len, self.head_dim)}, but is"
f" {attn_output.size()}"
)
attn_output = attn_output.view(batch_size, self.num_heads, target_len, self.head_dim)
attn_output = attn_output.transpose(1, 2)
attn_output = attn_output.reshape(batch_size, target_len, embed_dim)
attn_output = self.out_proj(attn_output)
return attn_output, attn_weights_reshaped
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrEncoderLayer with DeformableDetr->Deta
class DetaEncoderLayer(nn.Module):
def __init__(self, config: DetaConfig):
super().__init__()
self.embed_dim = config.d_model
self.self_attn = DetaMultiscaleDeformableAttention(
embed_dim=self.embed_dim,
num_heads=config.encoder_attention_heads,
n_levels=config.num_feature_levels,
n_points=config.encoder_n_points,
)
self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]
self.activation_dropout = config.activation_dropout
self.fc1 = nn.Linear(self.embed_dim, config.encoder_ffn_dim)
self.fc2 = nn.Linear(config.encoder_ffn_dim, self.embed_dim)
self.final_layer_norm = nn.LayerNorm(self.embed_dim)
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: torch.Tensor,
position_embeddings: torch.Tensor = None,
reference_points=None,
spatial_shapes=None,
level_start_index=None,
output_attentions: bool = False,
):
"""
Args:
hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`):
Input to the layer.
attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`):
Attention mask.
position_embeddings (`torch.FloatTensor`, *optional*):
Position embeddings, to be added to `hidden_states`.
reference_points (`torch.FloatTensor`, *optional*):
Reference points.
spatial_shapes (`torch.LongTensor`, *optional*):
Spatial shapes of the backbone feature maps.
level_start_index (`torch.LongTensor`, *optional*):
Level start index.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
"""
residual = hidden_states
# Apply Multi-scale Deformable Attention Module on the multi-scale feature maps.
hidden_states, attn_weights = self.self_attn(
hidden_states=hidden_states,
attention_mask=attention_mask,
encoder_hidden_states=hidden_states,
encoder_attention_mask=attention_mask,
position_embeddings=position_embeddings,
reference_points=reference_points,
spatial_shapes=spatial_shapes,
level_start_index=level_start_index,
output_attentions=output_attentions,
)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
residual = hidden_states
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training)
hidden_states = self.fc2(hidden_states)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
hidden_states = self.final_layer_norm(hidden_states)
if self.training:
if torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any():
clamp_value = torch.finfo(hidden_states.dtype).max - 1000
hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value)
outputs = (hidden_states,)
if output_attentions:
outputs += (attn_weights,)
return outputs
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrDecoderLayer with DeformableDetr->Deta
class DetaDecoderLayer(nn.Module):
def __init__(self, config: DetaConfig):
super().__init__()
self.embed_dim = config.d_model
# self-attention
self.self_attn = DetaMultiheadAttention(
embed_dim=self.embed_dim,
num_heads=config.decoder_attention_heads,
dropout=config.attention_dropout,
)
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]
self.activation_dropout = config.activation_dropout
self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)
# cross-attention
self.encoder_attn = DetaMultiscaleDeformableAttention(
embed_dim=self.embed_dim,
num_heads=config.decoder_attention_heads,
n_levels=config.num_feature_levels,
n_points=config.decoder_n_points,
)
self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim)
# feedforward neural networks
self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim)
self.fc2 = nn.Linear(config.decoder_ffn_dim, self.embed_dim)
self.final_layer_norm = nn.LayerNorm(self.embed_dim)
def forward(
self,
hidden_states: torch.Tensor,
position_embeddings: Optional[torch.Tensor] = None,
reference_points=None,
spatial_shapes=None,
level_start_index=None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = False,
):
"""
Args:
hidden_states (`torch.FloatTensor`):
Input to the layer of shape `(seq_len, batch, embed_dim)`.
position_embeddings (`torch.FloatTensor`, *optional*):
Position embeddings that are added to the queries and keys in the self-attention layer.
reference_points (`torch.FloatTensor`, *optional*):
Reference points.
spatial_shapes (`torch.LongTensor`, *optional*):
Spatial shapes.
level_start_index (`torch.LongTensor`, *optional*):
Level start index.
encoder_hidden_states (`torch.FloatTensor`):
cross attention input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_attention_mask (`torch.FloatTensor`): encoder attention mask of size
`(batch, 1, target_len, source_len)` where padding elements are indicated by very large negative
values.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
"""
residual = hidden_states
# Self Attention
hidden_states, self_attn_weights = self.self_attn(
hidden_states=hidden_states,
position_embeddings=position_embeddings,
output_attentions=output_attentions,
)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
second_residual = hidden_states
# Cross-Attention
cross_attn_weights = None
hidden_states, cross_attn_weights = self.encoder_attn(
hidden_states=hidden_states,
attention_mask=encoder_attention_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
position_embeddings=position_embeddings,
reference_points=reference_points,
spatial_shapes=spatial_shapes,
level_start_index=level_start_index,
output_attentions=output_attentions,
)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = second_residual + hidden_states
hidden_states = self.encoder_attn_layer_norm(hidden_states)
# Fully Connected
residual = hidden_states
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training)
hidden_states = self.fc2(hidden_states)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
hidden_states = self.final_layer_norm(hidden_states)
outputs = (hidden_states,)
if output_attentions:
outputs += (self_attn_weights, cross_attn_weights)
return outputs
# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead
class DetaClassificationHead(nn.Module):
"""Head for sentence-level classification tasks."""
def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float):
super().__init__()
self.dense = nn.Linear(input_dim, inner_dim)
self.dropout = nn.Dropout(p=pooler_dropout)
self.out_proj = nn.Linear(inner_dim, num_classes)
def forward(self, hidden_states: torch.Tensor):
hidden_states = self.dropout(hidden_states)
hidden_states = self.dense(hidden_states)
hidden_states = torch.tanh(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.out_proj(hidden_states)
return hidden_states
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrPreTrainedModel with DeformableDetr->Deta
class DetaPreTrainedModel(PreTrainedModel):
config_class = DetaConfig
base_model_prefix = "model"
main_input_name = "pixel_values"
def _init_weights(self, module):
std = self.config.init_std
if isinstance(module, DetaLearnedPositionEmbedding):
nn.init.uniform_(module.row_embeddings.weight)
nn.init.uniform_(module.column_embeddings.weight)
elif isinstance(module, DetaMultiscaleDeformableAttention):
module._reset_parameters()
elif isinstance(module, (nn.Linear, nn.Conv2d, nn.BatchNorm2d)):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=std)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=std)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
if hasattr(module, "reference_points") and not self.config.two_stage:
nn.init.xavier_uniform_(module.reference_points.weight.data, gain=1.0)
nn.init.constant_(module.reference_points.bias.data, 0.0)
if hasattr(module, "level_embed"):
nn.init.normal_(module.level_embed)
def _set_gradient_checkpointing(self, module, value=False):
if isinstance(module, DetaDecoder):
module.gradient_checkpointing = value
DETA_START_DOCSTRING = r"""
This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
etc.)
This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage
and behavior.
Parameters:
config ([`DetaConfig`]):
Model configuration class with all the parameters of the model. Initializing with a config file does not
load the weights associated with the model, only the configuration. Check out the
[`~PreTrainedModel.from_pretrained`] method to load the model weights.
"""
DETA_INPUTS_DOCSTRING = r"""
Args:
pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`):
Pixel values. Padding will be ignored by default should you provide it.
Pixel values can be obtained using [`AutoImageProcessor`]. See [`AutoImageProcessor.__call__`] for details.
pixel_mask (`torch.LongTensor` of shape `(batch_size, height, width)`, *optional*):
Mask to avoid performing attention on padding pixel values. Mask values selected in `[0, 1]`:
- 1 for pixels that are real (i.e. **not masked**),
- 0 for pixels that are padding (i.e. **masked**).
[What are attention masks?](../glossary#attention-mask)
decoder_attention_mask (`torch.LongTensor` of shape `(batch_size, num_queries)`, *optional*):
Not used by default. Can be used to mask object queries.
encoder_outputs (`tuple(tuple(torch.FloatTensor)`, *optional*):
Tuple consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`)
`last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of
hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder.
inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
Optionally, instead of passing the flattened feature map (output of the backbone + projection layer), you
can choose to directly pass a flattened representation of an image.
decoder_inputs_embeds (`torch.FloatTensor` of shape `(batch_size, num_queries, hidden_size)`, *optional*):
Optionally, instead of initializing the queries with a tensor of zeros, you can choose to directly pass an
embedded representation.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.
"""
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrEncoder with DeformableDetr->Deta
class DetaEncoder(DetaPreTrainedModel):
"""
Transformer encoder consisting of *config.encoder_layers* deformable attention layers. Each layer is a
[`DetaEncoderLayer`].
The encoder updates the flattened multi-scale feature maps through multiple deformable attention layers.
Args:
config: DetaConfig
"""
def __init__(self, config: DetaConfig):
super().__init__(config)
self.dropout = config.dropout
self.layers = nn.ModuleList([DetaEncoderLayer(config) for _ in range(config.encoder_layers)])
# Initialize weights and apply final processing
self.post_init()
@staticmethod
def get_reference_points(spatial_shapes, valid_ratios, device):
"""
Get reference points for each feature map. Used in decoder.
Args:
spatial_shapes (`torch.LongTensor` of shape `(num_feature_levels, 2)`):
Spatial shapes of each feature map.
valid_ratios (`torch.FloatTensor` of shape `(batch_size, num_feature_levels, 2)`):
Valid ratios of each feature map.
device (`torch.device`):
Device on which to create the tensors.
Returns:
`torch.FloatTensor` of shape `(batch_size, num_queries, num_feature_levels, 2)`
"""
reference_points_list = []
for level, (height, width) in enumerate(spatial_shapes):
ref_y, ref_x = meshgrid(
torch.linspace(0.5, height - 0.5, height, dtype=torch.float32, device=device),
torch.linspace(0.5, width - 0.5, width, dtype=torch.float32, device=device),
indexing="ij",
)
# TODO: valid_ratios could be useless here. check https://github.com/fundamentalvision/Deformable-DETR/issues/36
ref_y = ref_y.reshape(-1)[None] / (valid_ratios[:, None, level, 1] * height)
ref_x = ref_x.reshape(-1)[None] / (valid_ratios[:, None, level, 0] * width)
ref = torch.stack((ref_x, ref_y), -1)
reference_points_list.append(ref)
reference_points = torch.cat(reference_points_list, 1)
reference_points = reference_points[:, :, None] * valid_ratios[:, None]
return reference_points
def forward(
self,
inputs_embeds=None,
attention_mask=None,
position_embeddings=None,
spatial_shapes=None,
level_start_index=None,
valid_ratios=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
Args:
inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`):
Flattened feature map (output of the backbone + projection layer) that is passed to the encoder.
attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing attention on padding pixel features. Mask values selected in `[0, 1]`:
- 1 for pixel features that are real (i.e. **not masked**),
- 0 for pixel features that are padding (i.e. **masked**).
[What are attention masks?](../glossary#attention-mask)
position_embeddings (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`):
Position embeddings that are added to the queries and keys in each self-attention layer.
spatial_shapes (`torch.LongTensor` of shape `(num_feature_levels, 2)`):
Spatial shapes of each feature map.
level_start_index (`torch.LongTensor` of shape `(num_feature_levels)`):
Starting index of each feature map.
valid_ratios (`torch.FloatTensor` of shape `(batch_size, num_feature_levels, 2)`):
Ratio of valid area in each feature level.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors
for more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
hidden_states = inputs_embeds
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
reference_points = self.get_reference_points(spatial_shapes, valid_ratios, device=inputs_embeds.device)
encoder_states = () if output_hidden_states else None
all_attentions = () if output_attentions else None
for i, encoder_layer in enumerate(self.layers):
if output_hidden_states:
encoder_states = encoder_states + (hidden_states,)
layer_outputs = encoder_layer(
hidden_states,
attention_mask,
position_embeddings=position_embeddings,
reference_points=reference_points,
spatial_shapes=spatial_shapes,
level_start_index=level_start_index,
output_attentions=output_attentions,
)
hidden_states = layer_outputs[0]
if output_attentions:
all_attentions = all_attentions + (layer_outputs[1],)
if output_hidden_states:
encoder_states = encoder_states + (hidden_states,)
if not return_dict:
return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None)
return BaseModelOutput(
last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions
)
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrDecoder with DeformableDetr->Deta,Deformable DETR->DETA
class DetaDecoder(DetaPreTrainedModel):
"""
Transformer decoder consisting of *config.decoder_layers* layers. Each layer is a [`DetaDecoderLayer`].
The decoder updates the query embeddings through multiple self-attention and cross-attention layers.
Some tweaks for Deformable DETR:
- `position_embeddings`, `reference_points`, `spatial_shapes` and `valid_ratios` are added to the forward pass.
- it also returns a stack of intermediate outputs and reference points from all decoding layers.
Args:
config: DetaConfig
"""
def __init__(self, config: DetaConfig):
super().__init__(config)
self.dropout = config.dropout
self.layers = nn.ModuleList([DetaDecoderLayer(config) for _ in range(config.decoder_layers)])
self.gradient_checkpointing = False
# hack implementation for iterative bounding box refinement and two-stage Deformable DETR
self.bbox_embed = None
self.class_embed = None
# Initialize weights and apply final processing
self.post_init()
def forward(
self,
inputs_embeds=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
position_embeddings=None,
reference_points=None,
spatial_shapes=None,
level_start_index=None,
valid_ratios=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
Args:
inputs_embeds (`torch.FloatTensor` of shape `(batch_size, num_queries, hidden_size)`):
The query embeddings that are passed into the decoder.
encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention
of the decoder.
encoder_attention_mask (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing cross-attention on padding pixel_values of the encoder. Mask values selected
in `[0, 1]`:
- 1 for pixels that are real (i.e. **not masked**),
- 0 for pixels that are padding (i.e. **masked**).
position_embeddings (`torch.FloatTensor` of shape `(batch_size, num_queries, hidden_size)`, *optional*):
Position embeddings that are added to the queries and keys in each self-attention layer.
reference_points (`torch.FloatTensor` of shape `(batch_size, num_queries, 4)` is `as_two_stage` else `(batch_size, num_queries, 2)` or , *optional*):
Reference point in range `[0, 1]`, top-left (0,0), bottom-right (1, 1), including padding area.
spatial_shapes (`torch.FloatTensor` of shape `(num_feature_levels, 2)`):
Spatial shapes of the feature maps.
level_start_index (`torch.LongTensor` of shape `(num_feature_levels)`, *optional*):
Indexes for the start of each feature level. In range `[0, sequence_length]`.
valid_ratios (`torch.FloatTensor` of shape `(batch_size, num_feature_levels, 2)`, *optional*):
Ratio of valid area in each feature level.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors
for more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if inputs_embeds is not None:
hidden_states = inputs_embeds
# decoder layers
all_hidden_states = () if output_hidden_states else None
all_self_attns = () if output_attentions else None
all_cross_attentions = () if (output_attentions and encoder_hidden_states is not None) else None
intermediate = ()
intermediate_reference_points = ()
for idx, decoder_layer in enumerate(self.layers):
if reference_points.shape[-1] == 4:
reference_points_input = (
reference_points[:, :, None] * torch.cat([valid_ratios, valid_ratios], -1)[:, None]
)
else:
if reference_points.shape[-1] != 2:
raise ValueError("Reference points' last dimension must be of size 2")
reference_points_input = reference_points[:, :, None] * valid_ratios[:, None]
if output_hidden_states:
all_hidden_states += (hidden_states,)
if self.gradient_checkpointing and self.training:
def create_custom_forward(module):
def custom_forward(*inputs):
return module(*inputs, output_attentions)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(decoder_layer),
hidden_states,
encoder_hidden_states,
encoder_attention_mask,
None,
)
else:
layer_outputs = decoder_layer(
hidden_states,
position_embeddings=position_embeddings,
encoder_hidden_states=encoder_hidden_states,
reference_points=reference_points_input,
spatial_shapes=spatial_shapes,
level_start_index=level_start_index,
encoder_attention_mask=encoder_attention_mask,
output_attentions=output_attentions,
)
hidden_states = layer_outputs[0]
# hack implementation for iterative bounding box refinement
if self.bbox_embed is not None:
tmp = self.bbox_embed[idx](hidden_states)
if reference_points.shape[-1] == 4:
new_reference_points = tmp + inverse_sigmoid(reference_points)
new_reference_points = new_reference_points.sigmoid()
else:
if reference_points.shape[-1] != 2:
raise ValueError(
f"Reference points' last dimension must be of size 2, but is {reference_points.shape[-1]}"
)
new_reference_points = tmp
new_reference_points[..., :2] = tmp[..., :2] + inverse_sigmoid(reference_points)
new_reference_points = new_reference_points.sigmoid()
reference_points = new_reference_points.detach()
intermediate += (hidden_states,)
intermediate_reference_points += (reference_points,)
if output_attentions:
all_self_attns += (layer_outputs[1],)
if encoder_hidden_states is not None:
all_cross_attentions += (layer_outputs[2],)
# Keep batch_size as first dimension
intermediate = torch.stack(intermediate, dim=1)
intermediate_reference_points = torch.stack(intermediate_reference_points, dim=1)
# add hidden states from the last decoder layer
if output_hidden_states:
all_hidden_states += (hidden_states,)
if not return_dict:
return tuple(
v
for v in [
hidden_states,
intermediate,
intermediate_reference_points,
all_hidden_states,
all_self_attns,
all_cross_attentions,
]
if v is not None
)
return DetaDecoderOutput(
last_hidden_state=hidden_states,
intermediate_hidden_states=intermediate,
intermediate_reference_points=intermediate_reference_points,
hidden_states=all_hidden_states,
attentions=all_self_attns,
cross_attentions=all_cross_attentions,
)
@add_start_docstrings(
"""
The bare DETA Model (consisting of a backbone and encoder-decoder Transformer) outputting raw hidden-states without
any specific head on top.
""",
DETA_START_DOCSTRING,
)
class DetaModel(DetaPreTrainedModel):
def __init__(self, config: DetaConfig):
super().__init__(config)
if config.two_stage:
requires_backends(self, ["torchvision"])
# Create backbone with positional encoding
self.backbone = DetaBackboneWithPositionalEncodings(config)
intermediate_channel_sizes = self.backbone.intermediate_channel_sizes
# Create input projection layers
if config.num_feature_levels > 1:
num_backbone_outs = len(intermediate_channel_sizes)
input_proj_list = []
for _ in range(num_backbone_outs):
in_channels = intermediate_channel_sizes[_]
input_proj_list.append(
nn.Sequential(
nn.Conv2d(in_channels, config.d_model, kernel_size=1),
nn.GroupNorm(32, config.d_model),
)
)
for _ in range(config.num_feature_levels - num_backbone_outs):
input_proj_list.append(
nn.Sequential(
nn.Conv2d(in_channels, config.d_model, kernel_size=3, stride=2, padding=1),
nn.GroupNorm(32, config.d_model),
)
)
in_channels = config.d_model
self.input_proj = nn.ModuleList(input_proj_list)
else:
self.input_proj = nn.ModuleList(
[
nn.Sequential(
nn.Conv2d(intermediate_channel_sizes[-1], config.d_model, kernel_size=1),
nn.GroupNorm(32, config.d_model),
)
]
)
if not config.two_stage:
self.query_position_embeddings = nn.Embedding(config.num_queries, config.d_model * 2)
self.encoder = DetaEncoder(config)
self.decoder = DetaDecoder(config)
self.level_embed = nn.Parameter(torch.Tensor(config.num_feature_levels, config.d_model))
if config.two_stage:
self.enc_output = nn.Linear(config.d_model, config.d_model)
self.enc_output_norm = nn.LayerNorm(config.d_model)
self.pos_trans = nn.Linear(config.d_model * 2, config.d_model * 2)
self.pos_trans_norm = nn.LayerNorm(config.d_model * 2)
self.pix_trans = nn.Linear(config.d_model, config.d_model)
self.pix_trans_norm = nn.LayerNorm(config.d_model)
else:
self.reference_points = nn.Linear(config.d_model, 2)
self.assign_first_stage = config.assign_first_stage
self.two_stage_num_proposals = config.two_stage_num_proposals
self.post_init()
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrModel.get_encoder
def get_encoder(self):
return self.encoder
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrModel.get_decoder
def get_decoder(self):
return self.decoder
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrModel.freeze_backbone
def freeze_backbone(self):
for name, param in self.backbone.conv_encoder.model.named_parameters():
param.requires_grad_(False)
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrModel.unfreeze_backbone
def unfreeze_backbone(self):
for name, param in self.backbone.conv_encoder.model.named_parameters():
param.requires_grad_(True)
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrModel.get_valid_ratio
def get_valid_ratio(self, mask):
"""Get the valid ratio of all feature maps."""
_, height, width = mask.shape
valid_height = torch.sum(mask[:, :, 0], 1)
valid_width = torch.sum(mask[:, 0, :], 1)
valid_ratio_heigth = valid_height.float() / height
valid_ratio_width = valid_width.float() / width
valid_ratio = torch.stack([valid_ratio_width, valid_ratio_heigth], -1)
return valid_ratio
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrModel.get_proposal_pos_embed
def get_proposal_pos_embed(self, proposals):
"""Get the position embedding of the proposals."""
num_pos_feats = 128
temperature = 10000
scale = 2 * math.pi
dim_t = torch.arange(num_pos_feats, dtype=torch.float32, device=proposals.device)
dim_t = temperature ** (2 * torch.div(dim_t, 2) / num_pos_feats)
# batch_size, num_queries, 4
proposals = proposals.sigmoid() * scale
# batch_size, num_queries, 4, 128
pos = proposals[:, :, :, None] / dim_t
# batch_size, num_queries, 4, 64, 2 -> batch_size, num_queries, 512
pos = torch.stack((pos[:, :, :, 0::2].sin(), pos[:, :, :, 1::2].cos()), dim=4).flatten(2)
return pos
def gen_encoder_output_proposals(self, enc_output, padding_mask, spatial_shapes):
"""Generate the encoder output proposals from encoded enc_output.
Args:
enc_output (Tensor[batch_size, sequence_length, hidden_size]): Output of the encoder.
padding_mask (Tensor[batch_size, sequence_length]): Padding mask for `enc_output`.
spatial_shapes (Tensor[num_feature_levels, 2]): Spatial shapes of the feature maps.
Returns:
`tuple(torch.FloatTensor)`: A tuple of feature map and bbox prediction.
- object_query (Tensor[batch_size, sequence_length, hidden_size]): Object query features. Later used to
directly predict a bounding box. (without the need of a decoder)
- output_proposals (Tensor[batch_size, sequence_length, 4]): Normalized proposals, after an inverse
sigmoid.
"""
batch_size = enc_output.shape[0]
proposals = []
_cur = 0
level_ids = []
for level, (height, width) in enumerate(spatial_shapes):
mask_flatten_ = padding_mask[:, _cur : (_cur + height * width)].view(batch_size, height, width, 1)
valid_height = torch.sum(~mask_flatten_[:, :, 0, 0], 1)
valid_width = torch.sum(~mask_flatten_[:, 0, :, 0], 1)
grid_y, grid_x = meshgrid(
torch.linspace(0, height - 1, height, dtype=torch.float32, device=enc_output.device),
torch.linspace(0, width - 1, width, dtype=torch.float32, device=enc_output.device),
indexing="ij",
)
grid = torch.cat([grid_x.unsqueeze(-1), grid_y.unsqueeze(-1)], -1)
scale = torch.cat([valid_width.unsqueeze(-1), valid_height.unsqueeze(-1)], 1).view(batch_size, 1, 1, 2)
grid = (grid.unsqueeze(0).expand(batch_size, -1, -1, -1) + 0.5) / scale
width_heigth = torch.ones_like(grid) * 0.05 * (2.0**level)
proposal = torch.cat((grid, width_heigth), -1).view(batch_size, -1, 4)
proposals.append(proposal)
_cur += height * width
level_ids.append(grid.new_ones(height * width, dtype=torch.long) * level)
output_proposals = torch.cat(proposals, 1)
output_proposals_valid = ((output_proposals > 0.01) & (output_proposals < 0.99)).all(-1, keepdim=True)
output_proposals = torch.log(output_proposals / (1 - output_proposals)) # inverse sigmoid
output_proposals = output_proposals.masked_fill(padding_mask.unsqueeze(-1), float("inf"))
output_proposals = output_proposals.masked_fill(~output_proposals_valid, float("inf"))
# assign each pixel as an object query
object_query = enc_output
object_query = object_query.masked_fill(padding_mask.unsqueeze(-1), float(0))
object_query = object_query.masked_fill(~output_proposals_valid, float(0))
object_query = self.enc_output_norm(self.enc_output(object_query))
level_ids = torch.cat(level_ids)
return object_query, output_proposals, level_ids
@add_start_docstrings_to_model_forward(DETA_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=DetaModelOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
pixel_values,
pixel_mask=None,
decoder_attention_mask=None,
encoder_outputs=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
Returns:
Examples:
```python
>>> from transformers import AutoImageProcessor, DetaModel
>>> from PIL import Image
>>> import requests
>>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw)
>>> image_processor = AutoImageProcessor.from_pretrained("jozhang97/deta-swin-large-o365")
>>> model = DetaModel.from_pretrained("jozhang97/deta-swin-large-o365", two_stage=False)
>>> inputs = image_processor(images=image, return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
>>> list(last_hidden_states.shape)
[1, 900, 256]
```"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
batch_size, num_channels, height, width = pixel_values.shape
device = pixel_values.device
if pixel_mask is None:
pixel_mask = torch.ones(((batch_size, height, width)), dtype=torch.long, device=device)
# Extract multi-scale feature maps of same resolution `config.d_model` (cf Figure 4 in paper)
# First, sent pixel_values + pixel_mask through Backbone to obtain the features
# which is a list of tuples
features, position_embeddings_list = self.backbone(pixel_values, pixel_mask)
# Then, apply 1x1 convolution to reduce the channel dimension to d_model (256 by default)
sources = []
masks = []
for level, (source, mask) in enumerate(features):
sources.append(self.input_proj[level](source))
masks.append(mask)
if mask is None:
raise ValueError("No attention mask was provided")
# Lowest resolution feature maps are obtained via 3x3 stride 2 convolutions on the final stage
if self.config.num_feature_levels > len(sources):
_len_sources = len(sources)
for level in range(_len_sources, self.config.num_feature_levels):
if level == _len_sources:
source = self.input_proj[level](features[-1][0])
else:
source = self.input_proj[level](sources[-1])
mask = nn.functional.interpolate(pixel_mask[None].float(), size=source.shape[-2:]).to(torch.bool)[0]
pos_l = self.backbone.position_embedding(source, mask).to(source.dtype)
sources.append(source)
masks.append(mask)
position_embeddings_list.append(pos_l)
# Create queries
query_embeds = None
if not self.config.two_stage:
query_embeds = self.query_position_embeddings.weight
# Prepare encoder inputs (by flattening)
spatial_shapes = [(source.shape[2:]) for source in sources]
source_flatten = [source.flatten(2).transpose(1, 2) for source in sources]
mask_flatten = [mask.flatten(1) for mask in masks]
lvl_pos_embed_flatten = []
for level, pos_embed in enumerate(position_embeddings_list):
pos_embed = pos_embed.flatten(2).transpose(1, 2)
lvl_pos_embed = pos_embed + self.level_embed[level].view(1, 1, -1)
lvl_pos_embed_flatten.append(lvl_pos_embed)
source_flatten = torch.cat(source_flatten, 1)
mask_flatten = torch.cat(mask_flatten, 1)
lvl_pos_embed_flatten = torch.cat(lvl_pos_embed_flatten, 1)
spatial_shapes = torch.as_tensor(spatial_shapes, dtype=torch.long, device=source_flatten.device)
level_start_index = torch.cat((spatial_shapes.new_zeros((1,)), spatial_shapes.prod(1).cumsum(0)[:-1]))
valid_ratios = torch.stack([self.get_valid_ratio(m) for m in masks], 1)
valid_ratios = valid_ratios.float()
# Fourth, sent source_flatten + mask_flatten + lvl_pos_embed_flatten (backbone + proj layer output) through encoder
# Also provide spatial_shapes, level_start_index and valid_ratios
if encoder_outputs is None:
encoder_outputs = self.encoder(
inputs_embeds=source_flatten,
attention_mask=mask_flatten,
position_embeddings=lvl_pos_embed_flatten,
spatial_shapes=spatial_shapes,
level_start_index=level_start_index,
valid_ratios=valid_ratios,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
# If the user passed a tuple for encoder_outputs, we wrap it in a BaseModelOutput when return_dict=True
elif return_dict and not isinstance(encoder_outputs, BaseModelOutput):
encoder_outputs = BaseModelOutput(
last_hidden_state=encoder_outputs[0],
hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None,
attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None,
)
# Fifth, prepare decoder inputs
batch_size, _, num_channels = encoder_outputs[0].shape
enc_outputs_class = None
enc_outputs_coord_logits = None
if self.config.two_stage:
object_query_embedding, output_proposals, level_ids = self.gen_encoder_output_proposals(
encoder_outputs[0], ~mask_flatten, spatial_shapes
)
# hack implementation for two-stage DETA
# apply a detection head to each pixel (A.4 in paper)
# linear projection for bounding box binary classification (i.e. foreground and background)
enc_outputs_class = self.decoder.class_embed[-1](object_query_embedding)
# 3-layer FFN to predict bounding boxes coordinates (bbox regression branch)
delta_bbox = self.decoder.bbox_embed[-1](object_query_embedding)
enc_outputs_coord_logits = delta_bbox + output_proposals
# only keep top scoring `config.two_stage_num_proposals` proposals
topk = self.two_stage_num_proposals
proposal_logit = enc_outputs_class[..., 0]
if self.assign_first_stage:
proposal_boxes = center_to_corners_format(enc_outputs_coord_logits.sigmoid().float()).clamp(0, 1)
topk_proposals = []
for b in range(batch_size):
prop_boxes_b = proposal_boxes[b]
prop_logits_b = proposal_logit[b]
# pre-nms per-level topk
pre_nms_topk = 1000
pre_nms_inds = []
for lvl in range(len(spatial_shapes)):
lvl_mask = level_ids == lvl
pre_nms_inds.append(torch.topk(prop_logits_b.sigmoid() * lvl_mask, pre_nms_topk)[1])
pre_nms_inds = torch.cat(pre_nms_inds)
# nms on topk indices
post_nms_inds = batched_nms(
prop_boxes_b[pre_nms_inds], prop_logits_b[pre_nms_inds], level_ids[pre_nms_inds], 0.9
)
keep_inds = pre_nms_inds[post_nms_inds]
if len(keep_inds) < self.two_stage_num_proposals:
print(
f"[WARNING] nms proposals ({len(keep_inds)}) < {self.two_stage_num_proposals}, running"
" naive topk"
)
keep_inds = torch.topk(proposal_logit[b], topk)[1]
# keep top Q/L indices for L levels
q_per_l = topk // len(spatial_shapes)
is_level_ordered = (
level_ids[keep_inds][None]
== torch.arange(len(spatial_shapes), device=level_ids.device)[:, None]
)
keep_inds_mask = is_level_ordered & (is_level_ordered.cumsum(1) <= q_per_l) # LS
keep_inds_mask = keep_inds_mask.any(0) # S
# pad to Q indices (might let ones filtered from pre-nms sneak by... unlikely because we pick high conf anyways)
if keep_inds_mask.sum() < topk:
num_to_add = topk - keep_inds_mask.sum()
pad_inds = (~keep_inds_mask).nonzero()[:num_to_add]
keep_inds_mask[pad_inds] = True
keep_inds_topk = keep_inds[keep_inds_mask]
topk_proposals.append(keep_inds_topk)
topk_proposals = torch.stack(topk_proposals)
else:
topk_proposals = torch.topk(enc_outputs_class[..., 0], topk, dim=1)[1]
topk_coords_logits = torch.gather(
enc_outputs_coord_logits, 1, topk_proposals.unsqueeze(-1).repeat(1, 1, 4)
)
topk_coords_logits = topk_coords_logits.detach()
reference_points = topk_coords_logits.sigmoid()
init_reference_points = reference_points
pos_trans_out = self.pos_trans_norm(self.pos_trans(self.get_proposal_pos_embed(topk_coords_logits)))
query_embed, target = torch.split(pos_trans_out, num_channels, dim=2)
else:
query_embed, target = torch.split(query_embeds, num_channels, dim=1)
query_embed = query_embed.unsqueeze(0).expand(batch_size, -1, -1)
target = target.unsqueeze(0).expand(batch_size, -1, -1)
reference_points = self.reference_points(query_embed).sigmoid()
init_reference_points = reference_points
decoder_outputs = self.decoder(
inputs_embeds=target,
position_embeddings=query_embed,
encoder_hidden_states=encoder_outputs[0],
encoder_attention_mask=mask_flatten,
reference_points=reference_points,
spatial_shapes=spatial_shapes,
level_start_index=level_start_index,
valid_ratios=valid_ratios,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
if not return_dict:
enc_outputs = tuple(value for value in [enc_outputs_class, enc_outputs_coord_logits] if value is not None)
tuple_outputs = (init_reference_points,) + decoder_outputs + encoder_outputs + enc_outputs
return tuple_outputs
return DetaModelOutput(
init_reference_points=init_reference_points,
last_hidden_state=decoder_outputs.last_hidden_state,
intermediate_hidden_states=decoder_outputs.intermediate_hidden_states,
intermediate_reference_points=decoder_outputs.intermediate_reference_points,
decoder_hidden_states=decoder_outputs.hidden_states,
decoder_attentions=decoder_outputs.attentions,
cross_attentions=decoder_outputs.cross_attentions,
encoder_last_hidden_state=encoder_outputs.last_hidden_state,
encoder_hidden_states=encoder_outputs.hidden_states,
encoder_attentions=encoder_outputs.attentions,
enc_outputs_class=enc_outputs_class,
enc_outputs_coord_logits=enc_outputs_coord_logits,
)
@add_start_docstrings(
"""
DETA Model (consisting of a backbone and encoder-decoder Transformer) with object detection heads on top, for tasks
such as COCO detection.
""",
DETA_START_DOCSTRING,
)
class DetaForObjectDetection(DetaPreTrainedModel):
# When using clones, all layers > 0 will be clones, but layer 0 *is* required
_keys_to_ignore_on_load_missing = ["bbox_embed\.[1-9]\d*", "class_embed\.[1-9]\d*"]
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrForObjectDetection.__init__ with DeformableDetr->Deta
def __init__(self, config: DetaConfig):
super().__init__(config)
# Deformable DETR encoder-decoder model
self.model = DetaModel(config)
# Detection heads on top
self.class_embed = nn.Linear(config.d_model, config.num_labels)
self.bbox_embed = DetaMLPPredictionHead(
input_dim=config.d_model, hidden_dim=config.d_model, output_dim=4, num_layers=3
)
prior_prob = 0.01
bias_value = -math.log((1 - prior_prob) / prior_prob)
self.class_embed.bias.data = torch.ones(config.num_labels) * bias_value
nn.init.constant_(self.bbox_embed.layers[-1].weight.data, 0)
nn.init.constant_(self.bbox_embed.layers[-1].bias.data, 0)
# if two-stage, the last class_embed and bbox_embed is for region proposal generation
num_pred = (config.decoder_layers + 1) if config.two_stage else config.decoder_layers
if config.with_box_refine:
self.class_embed = _get_clones(self.class_embed, num_pred)
self.bbox_embed = _get_clones(self.bbox_embed, num_pred)
nn.init.constant_(self.bbox_embed[0].layers[-1].bias.data[2:], -2.0)
# hack implementation for iterative bounding box refinement
self.model.decoder.bbox_embed = self.bbox_embed
else:
nn.init.constant_(self.bbox_embed.layers[-1].bias.data[2:], -2.0)
self.class_embed = nn.ModuleList([self.class_embed for _ in range(num_pred)])
self.bbox_embed = nn.ModuleList([self.bbox_embed for _ in range(num_pred)])
self.model.decoder.bbox_embed = None
if config.two_stage:
# hack implementation for two-stage
self.model.decoder.class_embed = self.class_embed
for box_embed in self.bbox_embed:
nn.init.constant_(box_embed.layers[-1].bias.data[2:], 0.0)
# Initialize weights and apply final processing
self.post_init()
@torch.jit.unused
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrForObjectDetection._set_aux_loss
def _set_aux_loss(self, outputs_class, outputs_coord):
# this is a workaround to make torchscript happy, as torchscript
# doesn't support dictionary with non-homogeneous values, such
# as a dict having both a Tensor and a list.
return [{"logits": a, "pred_boxes": b} for a, b in zip(outputs_class[:-1], outputs_coord[:-1])]
@add_start_docstrings_to_model_forward(DETA_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=DetaObjectDetectionOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
pixel_values,
pixel_mask=None,
decoder_attention_mask=None,
encoder_outputs=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (`List[Dict]` of len `(batch_size,)`, *optional*):
Labels for computing the bipartite matching loss. List of dicts, each dictionary containing at least the
following 2 keys: 'class_labels' and 'boxes' (the class labels and bounding boxes of an image in the batch
respectively). The class labels themselves should be a `torch.LongTensor` of len `(number of bounding boxes
in the image,)` and the boxes a `torch.FloatTensor` of shape `(number of bounding boxes in the image, 4)`.
Returns:
Examples:
```python
>>> from transformers import AutoImageProcessor, DetaForObjectDetection
>>> from PIL import Image
>>> import requests
>>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw)
>>> image_processor = AutoImageProcessor.from_pretrained("jozhang97/deta-swin-large")
>>> model = DetaForObjectDetection.from_pretrained("jozhang97/deta-swin-large")
>>> inputs = image_processor(images=image, return_tensors="pt")
>>> outputs = model(**inputs)
>>> # convert outputs (bounding boxes and class logits) to COCO API
>>> target_sizes = torch.tensor([image.size[::-1]])
>>> results = image_processor.post_process_object_detection(outputs, threshold=0.5, target_sizes=target_sizes)[
... 0
... ]
>>> for score, label, box in zip(results["scores"], results["labels"], results["boxes"]):
... box = [round(i, 2) for i in box.tolist()]
... print(
... f"Detected {model.config.id2label[label.item()]} with confidence "
... f"{round(score.item(), 3)} at location {box}"
... )
Detected cat with confidence 0.683 at location [345.85, 23.68, 639.86, 372.83]
Detected cat with confidence 0.683 at location [8.8, 52.49, 316.93, 473.45]
Detected remote with confidence 0.568 at location [40.02, 73.75, 175.96, 117.33]
Detected remote with confidence 0.546 at location [333.68, 77.13, 370.12, 187.51]
```"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
# First, sent images through DETR base model to obtain encoder + decoder outputs
outputs = self.model(
pixel_values,
pixel_mask=pixel_mask,
decoder_attention_mask=decoder_attention_mask,
encoder_outputs=encoder_outputs,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = outputs.intermediate_hidden_states if return_dict else outputs[2]
init_reference = outputs.init_reference_points if return_dict else outputs[0]
inter_references = outputs.intermediate_reference_points if return_dict else outputs[3]
# class logits + predicted bounding boxes
outputs_classes = []
outputs_coords = []
for level in range(hidden_states.shape[1]):
if level == 0:
reference = init_reference
else:
reference = inter_references[:, level - 1]
reference = inverse_sigmoid(reference)
outputs_class = self.class_embed[level](hidden_states[:, level])
delta_bbox = self.bbox_embed[level](hidden_states[:, level])
if reference.shape[-1] == 4:
outputs_coord_logits = delta_bbox + reference
elif reference.shape[-1] == 2:
delta_bbox[..., :2] += reference
outputs_coord_logits = delta_bbox
else:
raise ValueError(f"reference.shape[-1] should be 4 or 2, but got {reference.shape[-1]}")
outputs_coord = outputs_coord_logits.sigmoid()
outputs_classes.append(outputs_class)
outputs_coords.append(outputs_coord)
# Keep batch_size as first dimension
outputs_class = torch.stack(outputs_classes, dim=1)
outputs_coord = torch.stack(outputs_coords, dim=1)
logits = outputs_class[:, -1]
pred_boxes = outputs_coord[:, -1]
loss, loss_dict, auxiliary_outputs = None, None, None
if labels is not None:
# First: create the matcher
matcher = DetaHungarianMatcher(
class_cost=self.config.class_cost, bbox_cost=self.config.bbox_cost, giou_cost=self.config.giou_cost
)
# Second: create the criterion
losses = ["labels", "boxes", "cardinality"]
criterion = DetaLoss(
matcher=matcher,
num_classes=self.config.num_labels,
focal_alpha=self.config.focal_alpha,
losses=losses,
num_queries=self.config.num_queries,
)
criterion.to(logits.device)
# Third: compute the losses, based on outputs and labels
outputs_loss = {}
outputs_loss["logits"] = logits
outputs_loss["pred_boxes"] = pred_boxes
if self.config.auxiliary_loss:
intermediate = outputs.intermediate_hidden_states if return_dict else outputs[4]
outputs_class = self.class_embed(intermediate)
outputs_coord = self.bbox_embed(intermediate).sigmoid()
auxiliary_outputs = self._set_aux_loss(outputs_class, outputs_coord)
outputs_loss["auxiliary_outputs"] = auxiliary_outputs
if self.config.two_stage:
enc_outputs_coord = outputs.enc_outputs_coord_logits.sigmoid()
outputs["enc_outputs"] = {"pred_logits": outputs.enc_outputs_class, "pred_boxes": enc_outputs_coord}
loss_dict = criterion(outputs_loss, labels)
# Fourth: compute total loss, as a weighted sum of the various losses
weight_dict = {"loss_ce": 1, "loss_bbox": self.config.bbox_loss_coefficient}
weight_dict["loss_giou"] = self.config.giou_loss_coefficient
if self.config.auxiliary_loss:
aux_weight_dict = {}
for i in range(self.config.decoder_layers - 1):
aux_weight_dict.update({k + f"_{i}": v for k, v in weight_dict.items()})
weight_dict.update(aux_weight_dict)
loss = sum(loss_dict[k] * weight_dict[k] for k in loss_dict.keys() if k in weight_dict)
if not return_dict:
if auxiliary_outputs is not None:
output = (logits, pred_boxes) + auxiliary_outputs + outputs
else:
output = (logits, pred_boxes) + outputs
tuple_outputs = ((loss, loss_dict) + output) if loss is not None else output
return tuple_outputs
dict_outputs = DetaObjectDetectionOutput(
loss=loss,
loss_dict=loss_dict,
logits=logits,
pred_boxes=pred_boxes,
auxiliary_outputs=auxiliary_outputs,
last_hidden_state=outputs.last_hidden_state,
decoder_hidden_states=outputs.decoder_hidden_states,
decoder_attentions=outputs.decoder_attentions,
cross_attentions=outputs.cross_attentions,
encoder_last_hidden_state=outputs.encoder_last_hidden_state,
encoder_hidden_states=outputs.encoder_hidden_states,
encoder_attentions=outputs.encoder_attentions,
intermediate_hidden_states=outputs.intermediate_hidden_states,
intermediate_reference_points=outputs.intermediate_reference_points,
init_reference_points=outputs.init_reference_points,
enc_outputs_class=outputs.enc_outputs_class,
enc_outputs_coord_logits=outputs.enc_outputs_coord_logits,
)
return dict_outputs
# Copied from transformers.models.detr.modeling_detr.dice_loss
def dice_loss(inputs, targets, num_boxes):
"""
Compute the DICE loss, similar to generalized IOU for masks
Args:
inputs: A float tensor of arbitrary shape.
The predictions for each example.
targets: A float tensor with the same shape as inputs. Stores the binary
classification label for each element in inputs (0 for the negative class and 1 for the positive
class).
"""
inputs = inputs.sigmoid()
inputs = inputs.flatten(1)
numerator = 2 * (inputs * targets).sum(1)
denominator = inputs.sum(-1) + targets.sum(-1)
loss = 1 - (numerator + 1) / (denominator + 1)
return loss.sum() / num_boxes
# Copied from transformers.models.detr.modeling_detr.sigmoid_focal_loss
def sigmoid_focal_loss(inputs, targets, num_boxes, alpha: float = 0.25, gamma: float = 2):
"""
Loss used in RetinaNet for dense detection: https://arxiv.org/abs/1708.02002.
Args:
inputs (`torch.FloatTensor` of arbitrary shape):
The predictions for each example.
targets (`torch.FloatTensor` with the same shape as `inputs`)
A tensor storing the binary classification label for each element in the `inputs` (0 for the negative class
and 1 for the positive class).
alpha (`float`, *optional*, defaults to `0.25`):
Optional weighting factor in the range (0,1) to balance positive vs. negative examples.
gamma (`int`, *optional*, defaults to `2`):
Exponent of the modulating factor (1 - p_t) to balance easy vs hard examples.
Returns:
Loss tensor
"""
prob = inputs.sigmoid()
ce_loss = nn.functional.binary_cross_entropy_with_logits(inputs, targets, reduction="none")
# add modulating factor
p_t = prob * targets + (1 - prob) * (1 - targets)
loss = ce_loss * ((1 - p_t) ** gamma)
if alpha >= 0:
alpha_t = alpha * targets + (1 - alpha) * (1 - targets)
loss = alpha_t * loss
return loss.mean(1).sum() / num_boxes
class DetaLoss(nn.Module):
"""
This class computes the losses for `DetaForObjectDetection`. The process happens in two steps: 1) we compute
hungarian assignment between ground truth boxes and the outputs of the model 2) we supervise each pair of matched
ground-truth / prediction (supervised class and box).
Args:
matcher (`DetaHungarianMatcher`):
Module able to compute a matching between targets and proposals.
num_classes (`int`):
Number of object categories, omitting the special no-object category.
focal_alpha (`float`):
Alpha parameter in focal loss.
losses (`List[str]`):
List of all the losses to be applied. See `get_loss` for a list of all available losses.
"""
def __init__(
self,
matcher,
num_classes,
focal_alpha,
losses,
num_queries,
assign_first_stage=False,
assign_second_stage=False,
):
super().__init__()
self.matcher = matcher
self.num_classes = num_classes
self.focal_alpha = focal_alpha
self.losses = losses
self.assign_first_stage = assign_first_stage
self.assign_second_stage = assign_second_stage
if self.assign_first_stage:
self.stg1_assigner = DetaStage1Assigner()
if self.assign_second_stage:
self.stg2_assigner = DetaStage2Assigner(num_queries)
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrLoss.loss_labels
def loss_labels(self, outputs, targets, indices, num_boxes):
"""
Classification loss (Binary focal loss) targets dicts must contain the key "class_labels" containing a tensor
of dim [nb_target_boxes]
"""
if "logits" not in outputs:
raise KeyError("No logits were found in the outputs")
source_logits = outputs["logits"]
idx = self._get_source_permutation_idx(indices)
target_classes_o = torch.cat([t["class_labels"][J] for t, (_, J) in zip(targets, indices)])
target_classes = torch.full(
source_logits.shape[:2], self.num_classes, dtype=torch.int64, device=source_logits.device
)
target_classes[idx] = target_classes_o
target_classes_onehot = torch.zeros(
[source_logits.shape[0], source_logits.shape[1], source_logits.shape[2] + 1],
dtype=source_logits.dtype,
layout=source_logits.layout,
device=source_logits.device,
)
target_classes_onehot.scatter_(2, target_classes.unsqueeze(-1), 1)
target_classes_onehot = target_classes_onehot[:, :, :-1]
loss_ce = (
sigmoid_focal_loss(source_logits, target_classes_onehot, num_boxes, alpha=self.focal_alpha, gamma=2)
* source_logits.shape[1]
)
losses = {"loss_ce": loss_ce}
return losses
@torch.no_grad()
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrLoss.loss_cardinality
def loss_cardinality(self, outputs, targets, indices, num_boxes):
"""
Compute the cardinality error, i.e. the absolute error in the number of predicted non-empty boxes.
This is not really a loss, it is intended for logging purposes only. It doesn't propagate gradients.
"""
logits = outputs["logits"]
device = logits.device
target_lengths = torch.as_tensor([len(v["class_labels"]) for v in targets], device=device)
# Count the number of predictions that are NOT "no-object" (which is the last class)
card_pred = (logits.argmax(-1) != logits.shape[-1] - 1).sum(1)
card_err = nn.functional.l1_loss(card_pred.float(), target_lengths.float())
losses = {"cardinality_error": card_err}
return losses
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrLoss.loss_boxes
def loss_boxes(self, outputs, targets, indices, num_boxes):
"""
Compute the losses related to the bounding boxes, the L1 regression loss and the GIoU loss.
Targets dicts must contain the key "boxes" containing a tensor of dim [nb_target_boxes, 4]. The target boxes
are expected in format (center_x, center_y, w, h), normalized by the image size.
"""
if "pred_boxes" not in outputs:
raise KeyError("No predicted boxes found in outputs")
idx = self._get_source_permutation_idx(indices)
source_boxes = outputs["pred_boxes"][idx]
target_boxes = torch.cat([t["boxes"][i] for t, (_, i) in zip(targets, indices)], dim=0)
loss_bbox = nn.functional.l1_loss(source_boxes, target_boxes, reduction="none")
losses = {}
losses["loss_bbox"] = loss_bbox.sum() / num_boxes
loss_giou = 1 - torch.diag(
generalized_box_iou(center_to_corners_format(source_boxes), center_to_corners_format(target_boxes))
)
losses["loss_giou"] = loss_giou.sum() / num_boxes
return losses
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrLoss._get_source_permutation_idx
def _get_source_permutation_idx(self, indices):
# permute predictions following indices
batch_idx = torch.cat([torch.full_like(source, i) for i, (source, _) in enumerate(indices)])
source_idx = torch.cat([source for (source, _) in indices])
return batch_idx, source_idx
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrLoss._get_target_permutation_idx
def _get_target_permutation_idx(self, indices):
# permute targets following indices
batch_idx = torch.cat([torch.full_like(target, i) for i, (_, target) in enumerate(indices)])
target_idx = torch.cat([target for (_, target) in indices])
return batch_idx, target_idx
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrLoss.get_loss
def get_loss(self, loss, outputs, targets, indices, num_boxes):
loss_map = {
"labels": self.loss_labels,
"cardinality": self.loss_cardinality,
"boxes": self.loss_boxes,
}
if loss not in loss_map:
raise ValueError(f"Loss {loss} not supported")
return loss_map[loss](outputs, targets, indices, num_boxes)
def forward(self, outputs, targets):
"""
This performs the loss computation.
Args:
outputs (`dict`, *optional*):
Dictionary of tensors, see the output specification of the model for the format.
targets (`List[dict]`, *optional*):
List of dicts, such that `len(targets) == batch_size`. The expected keys in each dict depends on the
losses applied, see each loss' doc.
"""
outputs_without_aux = {k: v for k, v in outputs.items() if k != "auxiliary_outputs"}
# Retrieve the matching between the outputs of the last layer and the targets
if self.assign_second_stage:
indices = self.stg2_assigner(outputs_without_aux, targets)
else:
indices = self.matcher(outputs_without_aux, targets)
# Compute the average number of target boxes accross all nodes, for normalization purposes
num_boxes = sum(len(t["class_labels"]) for t in targets)
num_boxes = torch.as_tensor([num_boxes], dtype=torch.float, device=next(iter(outputs.values())).device)
# (Niels): comment out function below, distributed training to be added
# if is_dist_avail_and_initialized():
# torch.distributed.all_reduce(num_boxes)
# (Niels) in original implementation, num_boxes is divided by get_world_size()
num_boxes = torch.clamp(num_boxes, min=1).item()
# Compute all the requested losses
losses = {}
for loss in self.losses:
losses.update(self.get_loss(loss, outputs, targets, indices, num_boxes))
# In case of auxiliary losses, we repeat this process with the output of each intermediate layer.
if "auxiliary_outputs" in outputs:
for i, auxiliary_outputs in enumerate(outputs["auxiliary_outputs"]):
if not self.assign_second_stage:
indices = self.matcher(auxiliary_outputs, targets)
for loss in self.losses:
l_dict = self.get_loss(loss, auxiliary_outputs, targets, indices, num_boxes)
l_dict = {k + f"_{i}": v for k, v in l_dict.items()}
losses.update(l_dict)
if "enc_outputs" in outputs:
enc_outputs = outputs["enc_outputs"]
bin_targets = copy.deepcopy(targets)
for bt in bin_targets:
bt["labels"] = torch.zeros_like(bt["labels"])
if self.assign_first_stage:
indices = self.stg1_assigner(enc_outputs, bin_targets)
else:
indices = self.matcher(enc_outputs, bin_targets)
for loss in self.losses:
kwargs = {}
if loss == "labels":
# Logging is enabled only for the last layer
kwargs["log"] = False
l_dict = self.get_loss(loss, enc_outputs, bin_targets, indices, num_boxes, **kwargs)
l_dict = {k + "_enc": v for k, v in l_dict.items()}
losses.update(l_dict)
return losses
# Copied from transformers.models.detr.modeling_detr.DetrMLPPredictionHead
class DetaMLPPredictionHead(nn.Module):
"""
Very simple multi-layer perceptron (MLP, also called FFN), used to predict the normalized center coordinates,
height and width of a bounding box w.r.t. an image.
Copied from https://github.com/facebookresearch/detr/blob/master/models/detr.py
"""
def __init__(self, input_dim, hidden_dim, output_dim, num_layers):
super().__init__()
self.num_layers = num_layers
h = [hidden_dim] * (num_layers - 1)
self.layers = nn.ModuleList(nn.Linear(n, k) for n, k in zip([input_dim] + h, h + [output_dim]))
def forward(self, x):
for i, layer in enumerate(self.layers):
x = nn.functional.relu(layer(x)) if i < self.num_layers - 1 else layer(x)
return x
# Copied from transformers.models.deformable_detr.modeling_deformable_detr.DeformableDetrHungarianMatcher with DeformableDetr->Deta
class DetaHungarianMatcher(nn.Module):
"""
This class computes an assignment between the targets and the predictions of the network.
For efficiency reasons, the targets don't include the no_object. Because of this, in general, there are more
predictions than targets. In this case, we do a 1-to-1 matching of the best predictions, while the others are
un-matched (and thus treated as non-objects).
Args:
class_cost:
The relative weight of the classification error in the matching cost.
bbox_cost:
The relative weight of the L1 error of the bounding box coordinates in the matching cost.
giou_cost:
The relative weight of the giou loss of the bounding box in the matching cost.
"""
def __init__(self, class_cost: float = 1, bbox_cost: float = 1, giou_cost: float = 1):
super().__init__()
requires_backends(self, ["scipy"])
self.class_cost = class_cost
self.bbox_cost = bbox_cost
self.giou_cost = giou_cost
if class_cost == 0 and bbox_cost == 0 and giou_cost == 0:
raise ValueError("All costs of the Matcher can't be 0")
@torch.no_grad()
def forward(self, outputs, targets):
"""
Args:
outputs (`dict`):
A dictionary that contains at least these entries:
* "logits": Tensor of dim [batch_size, num_queries, num_classes] with the classification logits
* "pred_boxes": Tensor of dim [batch_size, num_queries, 4] with the predicted box coordinates.
targets (`List[dict]`):
A list of targets (len(targets) = batch_size), where each target is a dict containing:
* "class_labels": Tensor of dim [num_target_boxes] (where num_target_boxes is the number of
ground-truth
objects in the target) containing the class labels
* "boxes": Tensor of dim [num_target_boxes, 4] containing the target box coordinates.
Returns:
`List[Tuple]`: A list of size `batch_size`, containing tuples of (index_i, index_j) where:
- index_i is the indices of the selected predictions (in order)
- index_j is the indices of the corresponding selected targets (in order)
For each batch element, it holds: len(index_i) = len(index_j) = min(num_queries, num_target_boxes)
"""
batch_size, num_queries = outputs["logits"].shape[:2]
# We flatten to compute the cost matrices in a batch
out_prob = outputs["logits"].flatten(0, 1).sigmoid() # [batch_size * num_queries, num_classes]
out_bbox = outputs["pred_boxes"].flatten(0, 1) # [batch_size * num_queries, 4]
# Also concat the target labels and boxes
target_ids = torch.cat([v["class_labels"] for v in targets])
target_bbox = torch.cat([v["boxes"] for v in targets])
# Compute the classification cost.
alpha = 0.25
gamma = 2.0
neg_cost_class = (1 - alpha) * (out_prob**gamma) * (-(1 - out_prob + 1e-8).log())
pos_cost_class = alpha * ((1 - out_prob) ** gamma) * (-(out_prob + 1e-8).log())
class_cost = pos_cost_class[:, target_ids] - neg_cost_class[:, target_ids]
# Compute the L1 cost between boxes
bbox_cost = torch.cdist(out_bbox, target_bbox, p=1)
# Compute the giou cost between boxes
giou_cost = -generalized_box_iou(center_to_corners_format(out_bbox), center_to_corners_format(target_bbox))
# Final cost matrix
cost_matrix = self.bbox_cost * bbox_cost + self.class_cost * class_cost + self.giou_cost * giou_cost
cost_matrix = cost_matrix.view(batch_size, num_queries, -1).cpu()
sizes = [len(v["boxes"]) for v in targets]
indices = [linear_sum_assignment(c[i]) for i, c in enumerate(cost_matrix.split(sizes, -1))]
return [(torch.as_tensor(i, dtype=torch.int64), torch.as_tensor(j, dtype=torch.int64)) for i, j in indices]
# Copied from transformers.models.detr.modeling_detr._upcast
def _upcast(t: Tensor) -> Tensor:
# Protects from numerical overflows in multiplications by upcasting to the equivalent higher type
if t.is_floating_point():
return t if t.dtype in (torch.float32, torch.float64) else t.float()
else:
return t if t.dtype in (torch.int32, torch.int64) else t.int()
# Copied from transformers.models.detr.modeling_detr.box_area
def box_area(boxes: Tensor) -> Tensor:
"""
Computes the area of a set of bounding boxes, which are specified by its (x1, y1, x2, y2) coordinates.
Args:
boxes (`torch.FloatTensor` of shape `(number_of_boxes, 4)`):
Boxes for which the area will be computed. They are expected to be in (x1, y1, x2, y2) format with `0 <= x1
< x2` and `0 <= y1 < y2`.
Returns:
`torch.FloatTensor`: a tensor containing the area for each box.
"""
boxes = _upcast(boxes)
return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1])
# Copied from transformers.models.detr.modeling_detr.box_iou
def box_iou(boxes1, boxes2):
area1 = box_area(boxes1)
area2 = box_area(boxes2)
left_top = torch.max(boxes1[:, None, :2], boxes2[:, :2]) # [N,M,2]
right_bottom = torch.min(boxes1[:, None, 2:], boxes2[:, 2:]) # [N,M,2]
width_height = (right_bottom - left_top).clamp(min=0) # [N,M,2]
inter = width_height[:, :, 0] * width_height[:, :, 1] # [N,M]
union = area1[:, None] + area2 - inter
iou = inter / union
return iou, union
# Copied from transformers.models.detr.modeling_detr.generalized_box_iou
def generalized_box_iou(boxes1, boxes2):
"""
Generalized IoU from https://giou.stanford.edu/. The boxes should be in [x0, y0, x1, y1] (corner) format.
Returns:
`torch.FloatTensor`: a [N, M] pairwise matrix, where N = len(boxes1) and M = len(boxes2)
"""
# degenerate boxes gives inf / nan results
# so do an early check
if not (boxes1[:, 2:] >= boxes1[:, :2]).all():
raise ValueError(f"boxes1 must be in [x0, y0, x1, y1] (corner) format, but got {boxes1}")
if not (boxes2[:, 2:] >= boxes2[:, :2]).all():
raise ValueError(f"boxes2 must be in [x0, y0, x1, y1] (corner) format, but got {boxes2}")
iou, union = box_iou(boxes1, boxes2)
top_left = torch.min(boxes1[:, None, :2], boxes2[:, :2])
bottom_right = torch.max(boxes1[:, None, 2:], boxes2[:, 2:])
width_height = (bottom_right - top_left).clamp(min=0) # [N,M,2]
area = width_height[:, :, 0] * width_height[:, :, 1]
return iou - (area - union) / area
# from https://github.com/facebookresearch/detectron2/blob/cbbc1ce26473cb2a5cc8f58e8ada9ae14cb41052/detectron2/layers/wrappers.py#L100
def nonzero_tuple(x):
"""
A 'as_tuple=True' version of torch.nonzero to support torchscript. because of
https://github.com/pytorch/pytorch/issues/38718
"""
if torch.jit.is_scripting():
if x.dim() == 0:
return x.unsqueeze(0).nonzero().unbind(1)
return x.nonzero().unbind(1)
else:
return x.nonzero(as_tuple=True)
# from https://github.com/facebookresearch/detectron2/blob/9921a2caa585d4fa66c4b534b6fab6e74d89b582/detectron2/modeling/matcher.py#L9
class DetaMatcher(object):
"""
This class assigns to each predicted "element" (e.g., a box) a ground-truth element. Each predicted element will
have exactly zero or one matches; each ground-truth element may be matched to zero or more predicted elements.
The matching is determined by the MxN match_quality_matrix, that characterizes how well each (ground-truth,
prediction)-pair match each other. For example, if the elements are boxes, this matrix may contain box
intersection-over-union overlap values.
The matcher returns (a) a vector of length N containing the index of the ground-truth element m in [0, M) that
matches to prediction n in [0, N). (b) a vector of length N containing the labels for each prediction.
"""
def __init__(self, thresholds: List[float], labels: List[int], allow_low_quality_matches: bool = False):
"""
Args:
thresholds (`list[float]`):
A list of thresholds used to stratify predictions into levels.
labels (`list[int`):
A list of values to label predictions belonging at each level. A label can be one of {-1, 0, 1}
signifying {ignore, negative class, positive class}, respectively.
allow_low_quality_matches (`bool`, *optional*, defaults to `False`):
If `True`, produce additional matches for predictions with maximum match quality lower than
high_threshold. See `set_low_quality_matches_` for more details.
For example,
thresholds = [0.3, 0.5] labels = [0, -1, 1] All predictions with iou < 0.3 will be marked with 0 and
thus will be considered as false positives while training. All predictions with 0.3 <= iou < 0.5 will
be marked with -1 and thus will be ignored. All predictions with 0.5 <= iou will be marked with 1 and
thus will be considered as true positives.
"""
# Add -inf and +inf to first and last position in thresholds
thresholds = thresholds[:]
if thresholds[0] < 0:
raise ValueError("Thresholds should be positive")
thresholds.insert(0, -float("inf"))
thresholds.append(float("inf"))
# Currently torchscript does not support all + generator
if not all([low <= high for (low, high) in zip(thresholds[:-1], thresholds[1:])]):
raise ValueError("Thresholds should be sorted.")
if not all([l in [-1, 0, 1] for l in labels]):
raise ValueError("All labels should be either -1, 0 or 1")
if len(labels) != len(thresholds) - 1:
raise ValueError("Number of labels should be equal to number of thresholds - 1")
self.thresholds = thresholds
self.labels = labels
self.allow_low_quality_matches = allow_low_quality_matches
def __call__(self, match_quality_matrix):
"""
Args:
match_quality_matrix (Tensor[float]): an MxN tensor, containing the
pairwise quality between M ground-truth elements and N predicted elements. All elements must be >= 0
(due to the us of `torch.nonzero` for selecting indices in `set_low_quality_matches_`).
Returns:
matches (Tensor[int64]): a vector of length N, where matches[i] is a matched
ground-truth index in [0, M)
match_labels (Tensor[int8]): a vector of length N, where pred_labels[i] indicates
whether a prediction is a true or false positive or ignored
"""
assert match_quality_matrix.dim() == 2
if match_quality_matrix.numel() == 0:
default_matches = match_quality_matrix.new_full((match_quality_matrix.size(1),), 0, dtype=torch.int64)
# When no gt boxes exist, we define IOU = 0 and therefore set labels
# to `self.labels[0]`, which usually defaults to background class 0
# To choose to ignore instead, can make labels=[-1,0,-1,1] + set appropriate thresholds
default_match_labels = match_quality_matrix.new_full(
(match_quality_matrix.size(1),), self.labels[0], dtype=torch.int8
)
return default_matches, default_match_labels
assert torch.all(match_quality_matrix >= 0)
# match_quality_matrix is M (gt) x N (predicted)
# Max over gt elements (dim 0) to find best gt candidate for each prediction
matched_vals, matches = match_quality_matrix.max(dim=0)
match_labels = matches.new_full(matches.size(), 1, dtype=torch.int8)
for l, low, high in zip(self.labels, self.thresholds[:-1], self.thresholds[1:]):
low_high = (matched_vals >= low) & (matched_vals < high)
match_labels[low_high] = l
if self.allow_low_quality_matches:
self.set_low_quality_matches_(match_labels, match_quality_matrix)
return matches, match_labels
def set_low_quality_matches_(self, match_labels, match_quality_matrix):
"""
Produce additional matches for predictions that have only low-quality matches. Specifically, for each
ground-truth G find the set of predictions that have maximum overlap with it (including ties); for each
prediction in that set, if it is unmatched, then match it to the ground-truth G.
This function implements the RPN assignment case (i) in Sec. 3.1.2 of :paper:`Faster R-CNN`.
"""
# For each gt, find the prediction with which it has highest quality
highest_quality_foreach_gt, _ = match_quality_matrix.max(dim=1)
# Find the highest quality match available, even if it is low, including ties.
# Note that the matches qualities must be positive due to the use of
# `torch.nonzero`.
_, pred_inds_with_highest_quality = nonzero_tuple(match_quality_matrix == highest_quality_foreach_gt[:, None])
# If an anchor was labeled positive only due to a low-quality match
# with gt_A, but it has larger overlap with gt_B, it's matched index will still be gt_B.
# This follows the implementation in Detectron, and is found to have no significant impact.
match_labels[pred_inds_with_highest_quality] = 1
# from https://github.com/facebookresearch/detectron2/blob/cbbc1ce26473cb2a5cc8f58e8ada9ae14cb41052/detectron2/modeling/sampling.py#L9
def subsample_labels(labels: torch.Tensor, num_samples: int, positive_fraction: float, bg_label: int):
"""
Return `num_samples` (or fewer, if not enough found) random samples from `labels` which is a mixture of positives &
negatives. It will try to return as many positives as possible without exceeding `positive_fraction * num_samples`,
and then try to fill the remaining slots with negatives.
Args:
labels (Tensor): (N, ) label vector with values:
* -1: ignore
* bg_label: background ("negative") class
* otherwise: one or more foreground ("positive") classes
num_samples (int): The total number of labels with value >= 0 to return.
Values that are not sampled will be filled with -1 (ignore).
positive_fraction (float): The number of subsampled labels with values > 0
is `min(num_positives, int(positive_fraction * num_samples))`. The number of negatives sampled is
`min(num_negatives, num_samples - num_positives_sampled)`. In order words, if there are not enough
positives, the sample is filled with negatives. If there are also not enough negatives, then as many
elements are sampled as is possible.
bg_label (int): label index of background ("negative") class.
Returns:
pos_idx, neg_idx (Tensor):
1D vector of indices. The total length of both is `num_samples` or fewer.
"""
positive = nonzero_tuple((labels != -1) & (labels != bg_label))[0]
negative = nonzero_tuple(labels == bg_label)[0]
num_pos = int(num_samples * positive_fraction)
# protect against not enough positive examples
num_pos = min(positive.numel(), num_pos)
num_neg = num_samples - num_pos
# protect against not enough negative examples
num_neg = min(negative.numel(), num_neg)
# randomly select positive and negative examples
perm1 = torch.randperm(positive.numel(), device=positive.device)[:num_pos]
perm2 = torch.randperm(negative.numel(), device=negative.device)[:num_neg]
pos_idx = positive[perm1]
neg_idx = negative[perm2]
return pos_idx, neg_idx
def sample_topk_per_gt(pr_inds, gt_inds, iou, k):
if len(gt_inds) == 0:
return pr_inds, gt_inds
# find topk matches for each gt
gt_inds2, counts = gt_inds.unique(return_counts=True)
scores, pr_inds2 = iou[gt_inds2].topk(k, dim=1)
gt_inds2 = gt_inds2[:, None].repeat(1, k)
# filter to as many matches that gt has
pr_inds3 = torch.cat([pr[:c] for c, pr in zip(counts, pr_inds2)])
gt_inds3 = torch.cat([gt[:c] for c, gt in zip(counts, gt_inds2)])
return pr_inds3, gt_inds3
# modified from https://github.com/facebookresearch/detectron2/blob/cbbc1ce26473cb2a5cc8f58e8ada9ae14cb41052/detectron2/modeling/roi_heads/roi_heads.py#L123
class DetaStage2Assigner(nn.Module):
def __init__(self, num_queries, max_k=4):
super().__init__()
self.positive_fraction = 0.25
self.bg_label = 400 # number > 91 to filter out later
self.batch_size_per_image = num_queries
self.proposal_matcher = DetaMatcher(thresholds=[0.6], labels=[0, 1], allow_low_quality_matches=True)
self.k = max_k
def _sample_proposals(self, matched_idxs: torch.Tensor, matched_labels: torch.Tensor, gt_classes: torch.Tensor):
"""
Based on the matching between N proposals and M groundtruth, sample the proposals and set their classification
labels.
Args:
matched_idxs (Tensor): a vector of length N, each is the best-matched
gt index in [0, M) for each proposal.
matched_labels (Tensor): a vector of length N, the matcher's label
(one of cfg.MODEL.ROI_HEADS.IOU_LABELS) for each proposal.
gt_classes (Tensor): a vector of length M.
Returns:
Tensor: a vector of indices of sampled proposals. Each is in [0, N). Tensor: a vector of the same length,
the classification label for
each sampled proposal. Each sample is labeled as either a category in [0, num_classes) or the
background (num_classes).
"""
has_gt = gt_classes.numel() > 0
# Get the corresponding GT for each proposal
if has_gt:
gt_classes = gt_classes[matched_idxs]
# Label unmatched proposals (0 label from matcher) as background (label=num_classes)
gt_classes[matched_labels == 0] = self.bg_label
# Label ignore proposals (-1 label)
gt_classes[matched_labels == -1] = -1
else:
gt_classes = torch.zeros_like(matched_idxs) + self.bg_label
sampled_fg_idxs, sampled_bg_idxs = subsample_labels(
gt_classes, self.batch_size_per_image, self.positive_fraction, self.bg_label
)
sampled_idxs = torch.cat([sampled_fg_idxs, sampled_bg_idxs], dim=0)
return sampled_idxs, gt_classes[sampled_idxs]
def forward(self, outputs, targets, return_cost_matrix=False):
# COCO categories are from 1 to 90. They set num_classes=91 and apply sigmoid.
bs = len(targets)
indices = []
ious = []
for b in range(bs):
iou, _ = box_iou(
center_to_corners_format(targets[b]["boxes"]),
center_to_corners_format(outputs["init_reference"][b].detach()),
)
matched_idxs, matched_labels = self.proposal_matcher(
iou
) # proposal_id -> highest_iou_gt_id, proposal_id -> [1 if iou > 0.6, 0 ow]
(
sampled_idxs,
sampled_gt_classes,
) = self._sample_proposals( # list of sampled proposal_ids, sampled_id -> [0, num_classes)+[bg_label]
matched_idxs, matched_labels, targets[b]["labels"]
)
pos_pr_inds = sampled_idxs[sampled_gt_classes != self.bg_label]
pos_gt_inds = matched_idxs[pos_pr_inds]
pos_pr_inds, pos_gt_inds = self.postprocess_indices(pos_pr_inds, pos_gt_inds, iou)
indices.append((pos_pr_inds, pos_gt_inds))
ious.append(iou)
if return_cost_matrix:
return indices, ious
return indices
def postprocess_indices(self, pr_inds, gt_inds, iou):
return sample_topk_per_gt(pr_inds, gt_inds, iou, self.k)
# modified from https://github.com/facebookresearch/detectron2/blob/cbbc1ce26473cb2a5cc8f58e8ada9ae14cb41052/detectron2/modeling/proposal_generator/rpn.py#L181
class DetaStage1Assigner(nn.Module):
def __init__(self, t_low=0.3, t_high=0.7, max_k=4):
super().__init__()
self.positive_fraction = 0.5
self.batch_size_per_image = 256
self.k = max_k
self.t_low = t_low
self.t_high = t_high
self.anchor_matcher = DetaMatcher(
thresholds=[t_low, t_high], labels=[0, -1, 1], allow_low_quality_matches=True
)
def _subsample_labels(self, label):
"""
Randomly sample a subset of positive and negative examples, and overwrite the label vector to the ignore value
(-1) for all elements that are not included in the sample.
Args:
labels (Tensor): a vector of -1, 0, 1. Will be modified in-place and returned.
"""
pos_idx, neg_idx = subsample_labels(label, self.batch_size_per_image, self.positive_fraction, 0)
# Fill with the ignore label (-1), then set positive and negative labels
label.fill_(-1)
label.scatter_(0, pos_idx, 1)
label.scatter_(0, neg_idx, 0)
return label
def forward(self, outputs, targets):
bs = len(targets)
indices = []
for b in range(bs):
anchors = outputs["anchors"][b]
if len(targets[b]["boxes"]) == 0:
indices.append(
(
torch.tensor([], dtype=torch.long, device=anchors.device),
torch.tensor([], dtype=torch.long, device=anchors.device),
)
)
continue
iou, _ = box_iou(
center_to_corners_format(targets[b]["boxes"]),
center_to_corners_format(anchors),
)
matched_idxs, matched_labels = self.anchor_matcher(
iou
) # proposal_id -> highest_iou_gt_id, proposal_id -> [1 if iou > 0.7, 0 if iou < 0.3, -1 ow]
matched_labels = self._subsample_labels(matched_labels)
all_pr_inds = torch.arange(len(anchors))
pos_pr_inds = all_pr_inds[matched_labels == 1]
pos_gt_inds = matched_idxs[pos_pr_inds]
pos_pr_inds, pos_gt_inds = self.postprocess_indices(pos_pr_inds, pos_gt_inds, iou)
pos_pr_inds, pos_gt_inds = pos_pr_inds.to(anchors.device), pos_gt_inds.to(anchors.device)
indices.append((pos_pr_inds, pos_gt_inds))
return indices
def postprocess_indices(self, pr_inds, gt_inds, iou):
return sample_topk_per_gt(pr_inds, gt_inds, iou, self.k)
|
2881099/dotnetGen_mysql | 1,264 | Server/Resources/Infrastructure/Extensions/GlobalExtensions.cs | using Newtonsoft.Json;
using System;
using System.Text.RegularExpressions;
public static class GlobalExtensions {
public static object Json(this Microsoft.AspNetCore.Mvc.Rendering.IHtmlHelper html, object obj) {
string str = JsonConvert.SerializeObject(obj);
if (!string.IsNullOrEmpty(str)) str = Regex.Replace(str, @"<(/?script[\s>])", "<\"+\"$1", RegexOptions.IgnoreCase);
if (html == null) return str;
return html.Raw(str);
}
/// <summary>
/// 转格林时间,并以ISO8601格式化字符串
/// </summary>
/// <param name="time"></param>
/// <returns></returns>
public static string ToGmtISO8601(this DateTime time) {
return time.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ");
}
/// <summary>
/// 获取时间戳,按1970-1-1
/// </summary>
/// <param name="time"></param>
/// <returns></returns>
public static long GetTime(this DateTime time) {
return (long)time.ToUniversalTime().Subtract(new DateTime(1970, 1, 1)).TotalSeconds;
}
static DateTime dt19700101 = new DateTime(1970, 1, 1);
/// <summary>
/// 获取时间戳毫秒数,按1970-1-1
/// </summary>
/// <param name="time"></param>
/// <returns></returns>
public static long GetTimeMilliseconds(this DateTime time) {
return (long)time.ToUniversalTime().Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds;
}
}
|
2881099/dotnetGen_sqlserver | 4,696 | Server/Protocol.cs | using System;
using System.Collections.Generic;
using System.Text;
using Model;
namespace Server {
public class Protocol : IDisposable {
internal ServerSocket _socket;
private Dictionary<int, CodeBuild> _builds;
private object _builds_lock = new object();
private Protocol(int port) {
_builds = new Dictionary<int, CodeBuild>();
_socket = new ServerSocket(port);
_socket.Closed += this.OnClosed;
_socket.Accepted += this.OnAccepted;
_socket.Error += this.OnError;
_socket.Receive += this.OnReceive;
_socket.Start();
}
protected virtual void OnClosed(object sender, ServerSocketClosedEventArgs e) {
_builds.Remove(e.AcceptSocketId);
}
protected virtual void OnReceive(object sender, ServerSocketReceiveEventArgs e) {
switch (e.Messager.Action) {
case "GetDatabases":
ClientInfo ci = e.Messager.Arg as ClientInfo;
if (ci == null) {
e.AcceptSocket.AccessDenied();
debugAppendLog?.Invoke($"AccessDenied(GetDatabases): 连接信息未提供");
} else {
CodeBuild build = new CodeBuild(ci, e.AcceptSocket);
lock (_builds_lock) {
_builds.Remove(e.AcceptSocket.Id);
_builds.Add(e.AcceptSocket.Id, build);
}
List<DatabaseInfo> dbs = build.GetDatabases();
debugAppendLog?.Invoke("GetDatabases: dbs.Length " + dbs.Count);
SocketMessager messager = new SocketMessager(e.Messager.Action, dbs);
messager.Id = e.Messager.Id;
e.AcceptSocket.Write(messager);
}
break;
case "GetTablesByDatabase":
string database = string.Concat(e.Messager.Arg);
if (string.IsNullOrEmpty(database)) {
e.AcceptSocket.AccessDenied();
debugAppendLog?.Invoke($"AccessDenied(GetTablesByDatabase): database为空");
} else {
CodeBuild build = null;
if (!_builds.TryGetValue(e.AcceptSocket.Id, out build)) {
e.AcceptSocket.AccessDenied();
debugAppendLog?.Invoke($"AccessDenied(GetTablesByDatabase): _builds.TryGetValue(sockId) 未找到,数据错乱了");
} else {
List<TableInfo> tables = build.GetTablesByDatabase(database);
SocketMessager messager = new SocketMessager(e.Messager.Action, tables);
messager.Id = e.Messager.Id;
e.AcceptSocket.Write(messager);
}
}
break;
case "Build":
object[] parms = e.Messager.Arg as object[];
if (parms.Length < 4) {
e.AcceptSocket.AccessDenied();
debugAppendLog?.Invoke($"AccessDenied(Build): 参数错误,params.Length < 4");
} else {
string solutionName = string.Concat(parms[0]);
bool isSolution, isMakeAdmin, isDownloadRes;
string op10 = string.Concat(parms[2]);
if (string.IsNullOrEmpty(solutionName) ||
!bool.TryParse(string.Concat(parms[1]), out isSolution) ||
string.IsNullOrEmpty(op10)) {
e.AcceptSocket.AccessDenied();
debugAppendLog?.Invoke($"AccessDenied(Build): -N为空 or -S未使用 or 生成的表列表为空");
} else {
isMakeAdmin = false;
isDownloadRes = false;
if (parms.Length >= 4) bool.TryParse(string.Concat(parms[3]), out isMakeAdmin);
if (parms.Length >= 5) bool.TryParse(string.Concat(parms[4]), out isDownloadRes);
CodeBuild build = null;
if (!_builds.TryGetValue(e.AcceptSocket.Id, out build)) {
e.AcceptSocket.AccessDenied();
debugAppendLog?.Invoke($"AccessDenied(Build): _builds.TryGetValue(sockId) 未找到,数据错乱了");
} else {
List<bool> outputs = new List<bool>();
char[] cs = op10.ToCharArray();
foreach (char c in cs) {
outputs.Add(c == '1');
}
build.SetOutput(outputs.ToArray());
object parm = null;
try {
parm = build.Build(solutionName, isSolution, isMakeAdmin, isDownloadRes);
} catch (Exception ex) {
parm = ex;
}
SocketMessager messager = new SocketMessager(e.Messager.Action, parm);
messager.Id = e.Messager.Id;
e.AcceptSocket.Write(messager);
}
}
}
break;
default:
e.AcceptSocket.AccessDenied();
debugAppendLog?.Invoke($"AccessDenied(default): 未实现");
break;
}
}
protected virtual void OnAccepted(object sender, ServerSocketAcceptedEventArgs e) {
}
protected virtual void OnError(object sender, ServerSocketErrorEventArgs e) {
Logger.remotor.Debug("Errors: " + e.Errors, e.Exception);
debugAppendLog?.Invoke($"OnError: {e.Exception.Message} \r\n {e.Exception.StackTrace}");
}
public static Protocol Create(int port) {
return new Protocol(port);
}
public static Action<string> debugAppendLog;
#region IDisposable 成员
public void Dispose() {
if (_socket != null) {
_socket.Dispose();
}
}
#endregion
}
}
|
2881099/dotnetGen_sqlserver | 3,354 | Server/Server.csproj | <?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="14.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.50727</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{EFE1F5D6-AB1F-4FA6-8E10-9B8A197B31C7}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Server</RootNamespace>
<AssemblyName>Server</AssemblyName>
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
<FileUpgradeFlags>
</FileUpgradeFlags>
<UpgradeBackupLocation>
</UpgradeBackupLocation>
<OldToolsVersion>2.0</OldToolsVersion>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="log4net, Version=1.2.10.0, Culture=neutral, PublicKeyToken=1b44e1d426115821, processorArchitecture=MSIL">
<SpecificVersion>False</SpecificVersion>
<HintPath>..\log4net.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="CodeBuild%28Code%29.cs" />
<Compile Include="CodeBuild%28Const%29.cs" />
<Compile Include="CodeBuild%28DB%29.cs" />
<Compile Include="CodeBuild%28Lib%29.cs" />
<Compile Include="Logger.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DesignTime>True</DesignTime>
<DependentUpon>Resources.resx</DependentUpon>
</Compile>
<Compile Include="Protocol.cs" />
<Compile Include="ServerSocket.cs" />
</ItemGroup>
<ItemGroup>
<None Include="log4net.config" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj">
<Project>{F0054101-9AC9-4E0E-9E78-44EA89FC5C19}</Project>
<Name>Common</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
</EmbeddedResource>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> |
2881099/dotnetGen_sqlserver | 1,261 | Common/Deflate.cs | using System;
using System.IO;
using System.IO.Compression;
using System.Text;
public static class Deflate {
public static string cs_head = string.Empty;
public static byte[] Decompress(Stream stream) {
try {
stream.Position = 0;
using (MemoryStream ms = new MemoryStream()) {
using (DeflateStream def = new DeflateStream(stream, CompressionMode.Decompress)) {
byte[] data = new byte[1024];
int size = 0;
while ((size = def.Read(data, 0, data.Length)) > 0) {
ms.Write(data, 0, size);
}
}
return ms.ToArray();
}
} catch { return (stream as MemoryStream).ToArray(); };
}
public static byte[] Decompress(byte[] bt) {
return Decompress(new MemoryStream(bt));
}
public static byte[] Compress(string text) {
if (text.Trim().StartsWith("using ")) {
text = Deflate.cs_head + text;
}
return Compress(Encoding.UTF8.GetBytes(text));
}
public static byte[] Compress(byte[] bt) {
return Compress(bt, 0, bt.Length);
}
public static byte[] Compress(byte[] bt, int startIndex, int length) {
using (MemoryStream ms = new MemoryStream()) {
using (DeflateStream def = new DeflateStream(ms, CompressionMode.Compress)) {
def.Write(bt, startIndex, length);
}
return ms.ToArray();
}
}
}
|
2881099/dotnetGen_postgresql | 1,938 | MakeCode/app.config | <?xml version="1.0" encoding="utf-8" ?>
<configuration>
<configSections>
<sectionGroup name="userSettings" type="System.Configuration.UserSettingsGroup, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" >
<section name="MakeCode.Settings" type="System.Configuration.ClientSettingsSection, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" allowExeDefinition="MachineToLocalUser" requirePermission="false" />
<section name="MakeCode.Properties.Settings" type="System.Configuration.ClientSettingsSection, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" allowExeDefinition="MachineToLocalUser" requirePermission="false" />
</sectionGroup>
</configSections>
<userSettings>
<MakeCode.Settings>
<setting name="server" serializeAs="String">
<value>127.0.0.1:38888</value>
</setting>
</MakeCode.Settings>
<MakeCode.Properties.Settings>
<setting name="txtServer_text" serializeAs="String">
<value />
</setting>
<setting name="txtUsername_text" serializeAs="String">
<value />
</setting>
<setting name="txtPassword_text" serializeAs="String">
<value />
</setting>
<setting name="txtSolution_text" serializeAs="String">
<value />
</setting>
<setting name="chkSolution_checked" serializeAs="String">
<value>False</value>
</setting>
<setting name="chkIntegrated_Checked" serializeAs="String">
<value>True</value>
</setting>
<setting name="chkMultiDB_checked" serializeAs="String">
<value>False</value>
</setting>
<setting name="chkWebAdmin_checked" serializeAs="String">
<value>False</value>
</setting>
<setting name="chkDownloadRes_checked" serializeAs="String">
<value>False</value>
</setting>
<setting name="txtPort_text" serializeAs="String">
<value>5432</value>
</setting>
</MakeCode.Properties.Settings>
</userSettings>
</configuration> |
2881099/dotnetGen_postgresql | 7,753 | MakeCode/ClientSocket.cs | using System;
using System.IO;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
public class ClientSocket : BaseSocket, IDisposable {
private bool _isDisposed;
private IPEndPoint _remotePoint;
private TcpClient _tcpClient;
private Thread _thread;
private bool _running;
private int _receives;
private int _errors;
private object _errors_lock = new object();
private object _write_lock = new object();
private Dictionary<int, SyncReceive> _receiveHandlers = new Dictionary<int, SyncReceive>();
private object _receiveHandlers_lock = new object();
private DateTime _lastActive;
public event ClientSocketClosedEventHandler Closed;
public event ClientSocketReceiveEventHandler Receive;
public event ClientSocketErrorEventHandler Error;
public void Connect(string hostname, int port) {
if (this._isDisposed == false && this._running == false) {
this._running = true;
try {
IPAddress[] ips = Dns.GetHostAddresses(hostname);
if (ips.Length == 0) throw new Exception("无法解析“" + hostname + "”");
this._remotePoint = new IPEndPoint(ips[0], port);
this._tcpClient = new TcpClient();
this._tcpClient.Connect(this._remotePoint);
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
this.OnClosed();
return;
}
this._receives = 0;
this._errors = 0;
this._lastActive = DateTime.Now;
this._thread = new Thread(delegate() {
while (this._running) {
try {
NetworkStream ns = this._tcpClient.GetStream();
ns.ReadTimeout = 1000 * 20;
if (ns.DataAvailable) {
SocketMessager messager = base.Read(ns);
if (string.Compare(messager.Action, SocketMessager.SYS_TEST_LINK.Action) == 0) {
} else if (this._receives == 0 &&
string.Compare(messager.Action, SocketMessager.SYS_HELLO_WELCOME.Action) == 0) {
this._receives++;
this.Write(messager);
} else if (string.Compare(messager.Action, SocketMessager.SYS_ACCESS_DENIED.Action) == 0) {
throw new Exception(SocketMessager.SYS_ACCESS_DENIED.Action);
} else {
ClientSocketReceiveEventArgs e = new ClientSocketReceiveEventArgs(this._receives++, messager);
SyncReceive receive = null;
if (this._receiveHandlers.TryGetValue(messager.Id, out receive)) {
new Thread(delegate() {
try {
receive.ReceiveHandler(this, e);
} catch (Exception ex) {
this.OnError(ex);
} finally {
receive.Wait.Set();
}
}).Start();
} else if (this.Receive != null) {
new Thread(delegate() {
this.OnReceive(e);
}).Start();
}
}
this._lastActive = DateTime.Now;
} else {
TimeSpan ts = DateTime.Now - _lastActive;
if (ts.TotalSeconds > 3) {
this.Write(SocketMessager.SYS_TEST_LINK);
}
}
if (!ns.DataAvailable) Thread.CurrentThread.Join(1);
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
}
}
this.Close();
this.OnClosed();
});
this._thread.Start();
}
}
public void Close() {
this._running = false;
if (this._tcpClient != null) {
this._tcpClient.Close();
}
int[] keys = new int[this._receiveHandlers.Count];
try {
this._receiveHandlers.Keys.CopyTo(keys, 0);
} catch {
lock (this._receiveHandlers_lock) {
keys = new int[this._receiveHandlers.Count];
this._receiveHandlers.Keys.CopyTo(keys, 0);
}
}
foreach (int key in keys) {
SyncReceive receiveHandler = null;
if (this._receiveHandlers.TryGetValue(key, out receiveHandler)) {
receiveHandler.Wait.Set();
}
}
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Clear();
}
}
public void Write(SocketMessager messager) {
this.Write(messager, null, TimeSpan.Zero);
}
public void Write(SocketMessager messager, ClientSocketReceiveEventHandler receiveHandler) {
this.Write(messager, receiveHandler, TimeSpan.FromSeconds(20));
}
public void Write(SocketMessager messager, ClientSocketReceiveEventHandler receiveHandler, TimeSpan timeout) {
SyncReceive syncReceive = null;
try {
if (receiveHandler != null) {
syncReceive = new SyncReceive(receiveHandler);
lock (this._receiveHandlers_lock) {
if (!this._receiveHandlers.ContainsKey(messager.Id)) {
this._receiveHandlers.Add(messager.Id, syncReceive);
} else {
this._receiveHandlers[messager.Id] = syncReceive;
}
}
}
lock (_write_lock) {
NetworkStream ns = this._tcpClient.GetStream();
base.Write(ns, messager);
}
this._lastActive = DateTime.Now;
if (syncReceive != null) {
syncReceive.Wait.Reset();
syncReceive.Wait.WaitOne(timeout, false);
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
if (syncReceive != null) {
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
}
}
protected virtual void OnClosed(EventArgs e) {
if (this.Closed != null) {
new Thread(delegate() {
try {
this.Closed(this, e);
} catch (Exception ex) {
this.OnError(ex);
}
}).Start();
}
}
protected void OnClosed() {
this.OnClosed(new EventArgs());
}
protected virtual void OnReceive(ClientSocketReceiveEventArgs e) {
if (this.Receive != null) {
try {
this.Receive(this, e);
} catch (Exception ex) {
this.OnError(ex);
}
}
}
protected virtual void OnError(ClientSocketErrorEventArgs e) {
if (this.Error != null) {
this.Error(this, e);
}
}
protected void OnError(Exception ex) {
int errors = 0;
lock (this._errors_lock) {
errors = ++this._errors;
}
ClientSocketErrorEventArgs e = new ClientSocketErrorEventArgs(ex, errors);
this.OnError(e);
}
public bool Running {
get { return this._running; }
}
class SyncReceive : IDisposable {
private ClientSocketReceiveEventHandler _receiveHandler;
private ManualResetEvent _wait;
public SyncReceive(ClientSocketReceiveEventHandler receiveHandler) {
this._receiveHandler = receiveHandler;
this._wait = new ManualResetEvent(false);
}
public ClientSocketReceiveEventHandler ReceiveHandler {
get { return _receiveHandler; }
}
public ManualResetEvent Wait {
get { return _wait; }
}
#region IDisposable 成员
public void Dispose() {
this._wait.Set();
this._wait.Close();
}
#endregion
}
#region IDisposable 成员
public void Dispose() {
this._isDisposed = true;
this.Close();
}
#endregion
}
public delegate void ClientSocketClosedEventHandler(object sender, EventArgs e);
public delegate void ClientSocketErrorEventHandler(object sender, ClientSocketErrorEventArgs e);
public delegate void ClientSocketReceiveEventHandler(object sender, ClientSocketReceiveEventArgs e);
public class ClientSocketErrorEventArgs : EventArgs {
private int _errors;
private Exception _exception;
public ClientSocketErrorEventArgs(Exception exception, int errors) {
this._exception = exception;
this._errors = errors;
}
public int Errors {
get { return _errors; }
}
public Exception Exception {
get { return _exception; }
}
}
public class ClientSocketReceiveEventArgs : EventArgs {
private int _receives;
private SocketMessager _messager;
public ClientSocketReceiveEventArgs(int receives, SocketMessager messager) {
this._receives = receives;
this._messager = messager;
}
public int Receives {
get { return _receives; }
}
public SocketMessager Messager {
get { return _messager; }
}
} |
2881099/dotnetGen_postgresql | 4,497 | ServerWinService/ServerWinService.csproj | <?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="14.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.50727</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{551011E1-3310-44F2-A7F1-61F559441F66}</ProjectGuid>
<OutputType>WinExe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>ServerWinService</RootNamespace>
<AssemblyName>ServerWinService</AssemblyName>
<SccProjectName>
</SccProjectName>
<SccLocalPath>
</SccLocalPath>
<SccAuxPath>
</SccAuxPath>
<SccProvider>
</SccProvider>
<FileUpgradeFlags>
</FileUpgradeFlags>
<UpgradeBackupLocation>
</UpgradeBackupLocation>
<OldToolsVersion>3.5</OldToolsVersion>
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
<PublishUrl>publish\</PublishUrl>
<Install>true</Install>
<InstallFrom>Disk</InstallFrom>
<UpdateEnabled>false</UpdateEnabled>
<UpdateMode>Foreground</UpdateMode>
<UpdateInterval>7</UpdateInterval>
<UpdateIntervalUnits>Days</UpdateIntervalUnits>
<UpdatePeriodically>false</UpdatePeriodically>
<UpdateRequired>false</UpdateRequired>
<MapFileExtensions>true</MapFileExtensions>
<ApplicationRevision>0</ApplicationRevision>
<ApplicationVersion>1.0.0.%2a</ApplicationVersion>
<IsWebBootstrapper>false</IsWebBootstrapper>
<UseApplicationTrust>false</UseApplicationTrust>
<BootstrapperEnabled>true</BootstrapperEnabled>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Configuration.Install" />
<Reference Include="System.Data" />
<Reference Include="System.Runtime.Remoting" />
<Reference Include="System.ServiceProcess" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="Install1.cs">
<SubType>Component</SubType>
</Compile>
<Compile Include="Service1.cs">
<SubType>Component</SubType>
</Compile>
<Compile Include="Service1.Designer.cs">
<DependentUpon>Service1.cs</DependentUpon>
</Compile>
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Settings.Designer.cs">
<DependentUpon>Settings.settings</DependentUpon>
<AutoGen>True</AutoGen>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
</ItemGroup>
<ItemGroup>
<None Include="app.config" />
<None Include="Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
</ItemGroup>
<ItemGroup>
<BootstrapperPackage Include="Microsoft.Net.Framework.3.5.SP1">
<Visible>False</Visible>
<ProductName>.NET Framework 3.5 SP1</ProductName>
<Install>true</Install>
</BootstrapperPackage>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj">
<Project>{f0054101-9ac9-4e0e-9e78-44ea89fc5c19}</Project>
<Name>Common</Name>
</ProjectReference>
<ProjectReference Include="..\Server\Server.csproj">
<Project>{efe1f5d6-ab1f-4fa6-8e10-9b8a197b31c7}</Project>
<Name>Server</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> |
2881099/dotnetGen_sqlserver | 7,245 | Common/BaseSocket.cs | /**********************************************************************************
*
* 此文件代码由 NicPetShop.exe 自动生成,您没有必要修改它或删除它
* NicPetShop.exe 能将数据库的关系映射到 c#,让您使用更方便,您无需要担心它的性能
* NicPetShop.exe 将永久免费给大家使用
*
* Author: Nic
* QQ: 2881099
* Email: kellynic@163.com
* 帮助: http://www.kellynic.com/default.asp?tag=NicPetShop
*
**********************************************************************************/
using System;
using System.IO;
using System.Collections.Generic;
using System.Globalization;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using System.Reflection;
public class BaseSocket {
protected void Write(Stream stream, SocketMessager messager) {
MemoryStream ms = new MemoryStream();
byte[] buff = Encoding.UTF8.GetBytes(messager.GetCanParseString());
ms.Write(buff, 0, buff.Length);
if (messager.Arg != null) {
buff = Deflate.Compress(BaseSocket.Serialize(messager.Arg));
ms.Write(buff, 0, buff.Length);
}
this.Write(stream, ms.ToArray());
ms.Close();
}
private void Write(Stream stream, byte[] data) {
MemoryStream ms = new MemoryStream();
byte[] buff = Encoding.UTF8.GetBytes(Convert.ToString(data.Length + 8, 16).PadRight(8));
ms.Write(buff, 0, buff.Length);
ms.Write(data, 0, data.Length);
buff = ms.ToArray();
ms.Close();
stream.Write(buff, 0, buff.Length);
}
protected SocketMessager Read(Stream stream) {
byte[] data = new byte[8];
int bytes = 0;
int overs = data.Length;
string size = string.Empty;
while (overs > 0) {
bytes = stream.Read(data, 0, overs);
overs -= bytes;
size += Encoding.UTF8.GetString(data, 0, bytes);
}
if (int.TryParse(size, NumberStyles.HexNumber, null, out overs) == false) {
return null;
}
overs -= data.Length;
MemoryStream ms = new MemoryStream();
data = new Byte[1024];
while (overs > 0) {
bytes = stream.Read(data, 0, overs < data.Length ? overs : data.Length);
overs -= bytes;
ms.Write(data, 0, bytes);
}
data = ms.ToArray();
ms.Close();
return SocketMessager.Parse(data);
}
public static int findBytes(byte[] source, byte[] find, int startIndex) {
if (find == null) return -1;
if (find.Length == 0) return -1;
if (source == null) return -1;
if (source.Length == 0) return -1;
if (startIndex < 0) startIndex = 0;
int idx = -1, idx2 = startIndex - 1;
do {
idx2 = idx = Array.FindIndex<byte>(source, Math.Min(idx2 + 1, source.Length), delegate(byte b) {
return b == find[0];
});
if (idx2 != -1) {
for (int a = 1; a < find.Length; a++) {
if (++idx2 >= source.Length || source[idx2] != find[a]) {
idx = -1;
break;
}
}
if (idx != -1) break;
}
} while (idx2 != -1);
return idx;
}
public static byte[] Serialize(object obj) {
IFormatter formatter = new BinaryFormatter();
MemoryStream ms = new MemoryStream();
formatter.Serialize(ms, obj);
byte[] data = ms.ToArray();
ms.Close();
return data;
}
public static object Deserialize(byte[] stream) {
IFormatter formatter = new BinaryFormatter();
formatter.Binder = new TransmissionBinder();
MemoryStream ms = new MemoryStream(stream);
object obj = formatter.Deserialize(ms);
ms.Close();
return obj;
}
}
internal class TransmissionBinder : SerializationBinder {
public override Type BindToType(string assemblyName, string typeName) {
var ass = AppDomain.CurrentDomain.GetAssemblies();
foreach (var a in ass) if (a.FullName == assemblyName) return a.GetType(typeName);
foreach (var a in ass) if (a.GetName().Name == "Common") return a.GetType(typeName);
return Type.GetType(typeName);
}
}
public class SocketMessager {
private static int _identity;
public static readonly SocketMessager SYS_TEST_LINK = new SocketMessager("\0");
public static readonly SocketMessager SYS_HELLO_WELCOME = new SocketMessager("Hello, Welcome!");
public static readonly SocketMessager SYS_ACCESS_DENIED = new SocketMessager("Access Denied.");
private int _id;
public bool _isChangeId;
private string _action;
private string _permission;
private DateTime _remoteTime;
private object _arg;
private Exception _exception;
public SocketMessager(string action)
: this(action, null, null) {
}
public SocketMessager(string action, object arg)
: this(action, null, arg) {
}
public SocketMessager(string action, string permission, object arg) {
this._id = Interlocked.Increment(ref _identity);
this._action = action == null ? string.Empty : action;
this._permission = permission == null ? string.Empty : permission;
this._arg = arg;
this._remoteTime = DateTime.Now;
}
public override string ToString() {
return
this._remoteTime.ToString("yyyy-MM-dd HH:mm:ss") + "\t" +
this._id + "\t" +
this._action.Replace("\t", "\\t") + "\t" +
this._permission.Replace("\t", "\\t") + "\t" +
this._arg;
}
public string GetCanParseString() {
if (string.Compare(this._action, SocketMessager.SYS_TEST_LINK.Action) == 0) {
return this.Action;
} else if (
string.Compare(this._action, SocketMessager.SYS_HELLO_WELCOME.Action) == 0 ||
string.Compare(this._action, SocketMessager.SYS_ACCESS_DENIED.Action) == 0) {
return
this._id + "\t" +
this.Action + "\r\n";
} else {
return
this._id + "\t" +
this._action.Replace("\\", "\\\\").Replace("\t", "\\t").Replace("\r\n", "\\n") + "\t" +
this._permission.Replace("\\", "\\\\").Replace("\t", "\\t").Replace("\r\n", "\\n") + "\t" +
this._remoteTime.ToString("yyyy-MM-dd HH:mm:ss") + "\r\n";
}
}
public static SocketMessager Parse(byte[] data) {
if (data == null) return new SocketMessager("NULL");
if (data.Length == 1 && data[0] == 0) return SocketMessager.SYS_TEST_LINK;
int idx = BaseSocket.findBytes(data, new byte[] { 13, 10 }, 0);
string text = Encoding.UTF8.GetString(data, 0, idx);
string[] loc1 = text.Split(new string[] { "\t" }, 4, StringSplitOptions.None);
string loc2 = loc1[0];
string loc3 = loc1.Length > 1 ? loc1[1].Replace("\\\\", "\\").Replace("\\t", "\t").Replace("\\n", "\r\n") : null;
string loc4 = loc1.Length > 2 ? loc1[2].Replace("\\\\", "\\").Replace("\\t", "\t").Replace("\\n", "\r\n") : null;
string loc5 = loc1.Length > 3 ? loc1[3] : null;
MemoryStream ms = new MemoryStream();
ms.Write(data, idx + 2, data.Length - idx - 2);
SocketMessager messager = new SocketMessager(loc3, loc4,
ms.Length > 0 ? BaseSocket.Deserialize(Deflate.Decompress(ms.ToArray())) : null);
if (int.TryParse(loc2, out idx)) messager._id = idx;
if (!string.IsNullOrEmpty(loc5)) DateTime.TryParse(loc5, out messager._remoteTime);
if (messager._arg is Exception) messager._exception = messager._arg as Exception;
return messager;
}
/// <summary>
/// 服务端为 -,客户端为 +
/// </summary>
public int Id {
get { return _id; }
set {
if (_id != value) {
_isChangeId = true;
}
_id = value;
}
}
public string Action {
get { return _action; }
}
public string Permission {
get { return _permission; }
}
public DateTime RemoteTime {
get { return _remoteTime; }
}
public object Arg {
get { return _arg; }
}
public Exception Exception {
get { return _exception; }
}
} |
2881099/dotnetGen_mysql | 1,194 | Server/Resources/Infrastructure/Controllers/CustomExceptionFilter.cs | using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
public class CustomExceptionFilter : Attribute, IExceptionFilter {
private ILogger _logger = null;
private IConfiguration _cfg = null;
private IHostingEnvironment _env = null;
public CustomExceptionFilter (ILogger<CustomExceptionFilter> logger, IConfiguration cfg, IHostingEnvironment env) {
_logger = logger;
_cfg = cfg;
_env = env;
}
public void OnException(ExceptionContext context) {
//在这里记录错误日志,context.Exception 为异常对象
context.Result = APIReturn.失败.SetMessage(context.Exception.Message); //返回给调用方
var innerLog = context.Exception.InnerException != null ? $" \r\n{context.Exception.InnerException.Message} \r\n{ context.Exception.InnerException.StackTrace}" : "";
_logger.LogError($"=============错误:{context.Exception.Message} \r\n{context.Exception.StackTrace}{innerLog}");
context.ExceptionHandled = true;
}
} |
2881099/dotnetGen_mysql | 5,159 | Server/Resources/Infrastructure/Controllers/BaseController.cs | using Microsoft.AspNetCore.Cors;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using System;
using System.Collections;
using System.Linq;
using System.Threading.Tasks;
[ServiceFilter(typeof(CustomExceptionFilter)), EnableCors("cors_all")]
public partial class BaseController : Controller {
public ILogger _logger;
public ISession Session { get { return HttpContext.Session; } }
public HttpRequest Req { get { return Request; } }
public HttpResponse Res { get { return Response; } }
public string Ip => this.Request.Headers["X-Real-IP"].FirstOrDefault() ?? this.Request.HttpContext.Connection.RemoteIpAddress.ToString();
public IConfiguration Configuration => (IConfiguration) HttpContext.RequestServices.GetService(typeof(IConfiguration));
//public SysuserInfo LoginUser { get; private set; }
public BaseController(ILogger logger) { _logger = logger; }
public override void OnActionExecuting(ActionExecutingContext context) {
#region 参数验证
if (context.ModelState.IsValid == false)
foreach (var value in context.ModelState.Values)
if (value.Errors.Any()) {
context.Result = APIReturn.参数格式不正确.SetMessage($"参数格式不正确:{value.Errors.First().ErrorMessage}");
return;
}
#endregion
#region 初始化当前登陆账号
//string username = Session.GetString("login.username");
//if (!string.IsNullOrEmpty(username)) LoginUser = Sysuser.GetItemByUsername(username);
//var method = (context.ActionDescriptor as ControllerActionDescriptor).MethodInfo;
//if (method.GetCustomAttribute<需要登陆Attribute>() != null && LoginUser == null)
// context.Result = new RedirectResult("/signin");
//else if (method.GetCustomAttribute<匿名访问Attribute>() == null && LoginUser == null)
// context.Result = new RedirectResult("/signin");
//ViewBag.user = LoginUser;
#endregion
base.OnActionExecuting(context);
}
public override void OnActionExecuted(ActionExecutedContext context) {
base.OnActionExecuted(context);
}
#region 角色权限验证
//public bool sysrole_check(string url) {
// url = url.ToLower();
// //Response.Write(url + "<br>");
// if (url == "/" || url.IndexOf("/default.aspx") == 0) return true;
// foreach(var role in this.LoginUser.Obj_sysroles) {
// //Response.Write(role.ToString());
// foreach(var dir in role.Obj_sysdirs) {
// //Response.Write("-----------------" + dir.ToString() + "<br>");
// string tmp = dir.Url;
// if (tmp.EndsWith("/")) tmp += "default.aspx";
// if (url.IndexOf(tmp) == 0) return true;
// }
// }
// return false;
//}
#endregion
}
#region 需要登陆、匿名访问
public partial class 需要登陆Attribute : Attribute { }
public partial class 匿名访问Attribute : Attribute { }
#endregion
#region APIReturn
[JsonObject(MemberSerialization.OptIn)]
public partial class APIReturn : ContentResult {
[JsonProperty("code")] public int Code { get; protected set; }
[JsonProperty("message")] public string Message { get; protected set; }
[JsonProperty("data")] public Hashtable Data { get; protected set; } = new Hashtable();
[JsonProperty("success")] public bool Success { get { return this.Code == 0; } }
public APIReturn() { }
public APIReturn(int code) { this.SetCode(code); }
public APIReturn(string message) { this.SetMessage(message); }
public APIReturn(int code, string message, params object[] data) { this.SetCode(code).SetMessage(message).AppendData(data); }
public APIReturn SetCode(int value) { this.Code = value; return this; }
public APIReturn SetMessage(string value) { this.Message = value; return this; }
public APIReturn SetData(params object[] value) {
this.Data.Clear();
return this.AppendData(value);
}
public APIReturn AppendData(params object[] value) {
if (value == null || value.Length < 2 || value[0] == null) return this;
for (int a = 0; a < value.Length; a += 2) {
if (value[a] == null) continue;
this.Data[value[a]] = a + 1 < value.Length ? value[a + 1] : null;
}
return this;
}
#region form 表单 target=iframe 提交回调处理
private void Jsonp(ActionContext context) {
string __callback = context.HttpContext.Request.HasFormContentType ? context.HttpContext.Request.Form["__callback"].ToString() : null;
if (string.IsNullOrEmpty(__callback)) {
this.ContentType = "text/json;charset=utf-8;";
this.Content = JsonConvert.SerializeObject(this);
}else {
this.ContentType = "text/html;charset=utf-8";
this.Content = $"<script>top.{__callback}({GlobalExtensions.Json(null, this)});</script>";
}
}
public override void ExecuteResult(ActionContext context) {
Jsonp(context);
base.ExecuteResult(context);
}
public override Task ExecuteResultAsync(ActionContext context) {
Jsonp(context);
return base.ExecuteResultAsync(context);
}
#endregion
public static APIReturn 成功 { get { return new APIReturn(0, "成功"); } }
public static APIReturn 失败 { get { return new APIReturn(99, "失败"); } }
public static APIReturn 记录不存在_或者没有权限 { get { return new APIReturn(98, "记录不存在,或者没有权限"); } }
public static APIReturn 参数格式不正确 { get { return new APIReturn(97, "参数格式不正确"); } }
}
#endregion
|
2881099/dotnetGen_postgresql | 1,814 | ServerWinService/Settings.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace ServerWinService {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "14.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string cs_head {
get {
return ((string)(this["cs_head"]));
}
set {
this["cs_head"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("38888")]
public int socket_port {
get {
return ((int)(this["socket_port"]));
}
set {
this["socket_port"] = value;
}
}
}
}
|
27182812/ChatGLM-LLaMA-chinese-insturct | 44,247 | src/transformers/models/deta/image_processing_deta.py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Image processor class for Deformable DETR."""
import pathlib
import warnings
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
import numpy as np
from ...feature_extraction_utils import BatchFeature
from ...image_processing_utils import BaseImageProcessor, get_size_dict
from ...image_transforms import (
PaddingMode,
center_to_corners_format,
corners_to_center_format,
normalize,
pad,
rescale,
resize,
rgb_to_id,
to_channel_dimension_format,
)
from ...image_utils import (
IMAGENET_DEFAULT_MEAN,
IMAGENET_DEFAULT_STD,
ChannelDimension,
ImageInput,
PILImageResampling,
get_image_size,
infer_channel_dimension_format,
is_batched,
to_numpy_array,
valid_coco_detection_annotations,
valid_coco_panoptic_annotations,
valid_images,
)
from ...utils import (
is_flax_available,
is_jax_tensor,
is_tf_available,
is_tf_tensor,
is_torch_available,
is_torch_tensor,
is_torchvision_available,
is_vision_available,
)
from ...utils.generic import ExplicitEnum, TensorType
if is_torch_available():
import torch
from ...pytorch_utils import torch_int_div
if is_torchvision_available():
from torchvision.ops.boxes import batched_nms
if is_vision_available():
import PIL
class AnnotionFormat(ExplicitEnum):
COCO_DETECTION = "coco_detection"
COCO_PANOPTIC = "coco_panoptic"
SUPPORTED_ANNOTATION_FORMATS = (AnnotionFormat.COCO_DETECTION, AnnotionFormat.COCO_PANOPTIC)
# Copied from transformers.models.detr.image_processing_detr.get_size_with_aspect_ratio
def get_size_with_aspect_ratio(image_size, size, max_size=None) -> Tuple[int, int]:
"""
Computes the output image size given the input image size and the desired output size.
Args:
image_size (`Tuple[int, int]`):
The input image size.
size (`int`):
The desired output size.
max_size (`int`, *optional*):
The maximum allowed output size.
"""
height, width = image_size
if max_size is not None:
min_original_size = float(min((height, width)))
max_original_size = float(max((height, width)))
if max_original_size / min_original_size * size > max_size:
size = int(round(max_size * min_original_size / max_original_size))
if (height <= width and height == size) or (width <= height and width == size):
return height, width
if width < height:
ow = size
oh = int(size * height / width)
else:
oh = size
ow = int(size * width / height)
return (oh, ow)
# Copied from transformers.models.detr.image_processing_detr.get_resize_output_image_size
def get_resize_output_image_size(
input_image: np.ndarray, size: Union[int, Tuple[int, int], List[int]], max_size: Optional[int] = None
) -> Tuple[int, int]:
"""
Computes the output image size given the input image size and the desired output size. If the desired output size
is a tuple or list, the output image size is returned as is. If the desired output size is an integer, the output
image size is computed by keeping the aspect ratio of the input image size.
Args:
image_size (`Tuple[int, int]`):
The input image size.
size (`int`):
The desired output size.
max_size (`int`, *optional*):
The maximum allowed output size.
"""
image_size = get_image_size(input_image)
if isinstance(size, (list, tuple)):
return size
return get_size_with_aspect_ratio(image_size, size, max_size)
# Copied from transformers.models.detr.image_processing_detr.get_numpy_to_framework_fn
def get_numpy_to_framework_fn(arr) -> Callable:
"""
Returns a function that converts a numpy array to the framework of the input array.
Args:
arr (`np.ndarray`): The array to convert.
"""
if isinstance(arr, np.ndarray):
return np.array
if is_tf_available() and is_tf_tensor(arr):
import tensorflow as tf
return tf.convert_to_tensor
if is_torch_available() and is_torch_tensor(arr):
import torch
return torch.tensor
if is_flax_available() and is_jax_tensor(arr):
import jax.numpy as jnp
return jnp.array
raise ValueError(f"Cannot convert arrays of type {type(arr)}")
# Copied from transformers.models.detr.image_processing_detr.safe_squeeze
def safe_squeeze(arr: np.ndarray, axis: Optional[int] = None) -> np.ndarray:
"""
Squeezes an array, but only if the axis specified has dim 1.
"""
if axis is None:
return arr.squeeze()
try:
return arr.squeeze(axis=axis)
except ValueError:
return arr
# Copied from transformers.models.detr.image_processing_detr.normalize_annotation
def normalize_annotation(annotation: Dict, image_size: Tuple[int, int]) -> Dict:
image_height, image_width = image_size
norm_annotation = {}
for key, value in annotation.items():
if key == "boxes":
boxes = value
boxes = corners_to_center_format(boxes)
boxes /= np.asarray([image_width, image_height, image_width, image_height], dtype=np.float32)
norm_annotation[key] = boxes
else:
norm_annotation[key] = value
return norm_annotation
# Copied from transformers.models.detr.image_processing_detr.max_across_indices
def max_across_indices(values: Iterable[Any]) -> List[Any]:
"""
Return the maximum value across all indices of an iterable of values.
"""
return [max(values_i) for values_i in zip(*values)]
# Copied from transformers.models.detr.image_processing_detr.get_max_height_width
def get_max_height_width(images: List[np.ndarray]) -> List[int]:
"""
Get the maximum height and width across all images in a batch.
"""
input_channel_dimension = infer_channel_dimension_format(images[0])
if input_channel_dimension == ChannelDimension.FIRST:
_, max_height, max_width = max_across_indices([img.shape for img in images])
elif input_channel_dimension == ChannelDimension.LAST:
max_height, max_width, _ = max_across_indices([img.shape for img in images])
else:
raise ValueError(f"Invalid channel dimension format: {input_channel_dimension}")
return (max_height, max_width)
# Copied from transformers.models.detr.image_processing_detr.make_pixel_mask
def make_pixel_mask(image: np.ndarray, output_size: Tuple[int, int]) -> np.ndarray:
"""
Make a pixel mask for the image, where 1 indicates a valid pixel and 0 indicates padding.
Args:
image (`np.ndarray`):
Image to make the pixel mask for.
output_size (`Tuple[int, int]`):
Output size of the mask.
"""
input_height, input_width = get_image_size(image)
mask = np.zeros(output_size, dtype=np.int64)
mask[:input_height, :input_width] = 1
return mask
# Copied from transformers.models.detr.image_processing_detr.convert_coco_poly_to_mask
def convert_coco_poly_to_mask(segmentations, height: int, width: int) -> np.ndarray:
"""
Convert a COCO polygon annotation to a mask.
Args:
segmentations (`List[List[float]]`):
List of polygons, each polygon represented by a list of x-y coordinates.
height (`int`):
Height of the mask.
width (`int`):
Width of the mask.
"""
try:
from pycocotools import mask as coco_mask
except ImportError:
raise ImportError("Pycocotools is not installed in your environment.")
masks = []
for polygons in segmentations:
rles = coco_mask.frPyObjects(polygons, height, width)
mask = coco_mask.decode(rles)
if len(mask.shape) < 3:
mask = mask[..., None]
mask = np.asarray(mask, dtype=np.uint8)
mask = np.any(mask, axis=2)
masks.append(mask)
if masks:
masks = np.stack(masks, axis=0)
else:
masks = np.zeros((0, height, width), dtype=np.uint8)
return masks
# Copied from transformers.models.detr.image_processing_detr.prepare_coco_detection_annotation with DETR->DETA
def prepare_coco_detection_annotation(image, target, return_segmentation_masks: bool = False):
"""
Convert the target in COCO format into the format expected by DETA.
"""
image_height, image_width = get_image_size(image)
image_id = target["image_id"]
image_id = np.asarray([image_id], dtype=np.int64)
# Get all COCO annotations for the given image.
annotations = target["annotations"]
annotations = [obj for obj in annotations if "iscrowd" not in obj or obj["iscrowd"] == 0]
classes = [obj["category_id"] for obj in annotations]
classes = np.asarray(classes, dtype=np.int64)
# for conversion to coco api
area = np.asarray([obj["area"] for obj in annotations], dtype=np.float32)
iscrowd = np.asarray([obj["iscrowd"] if "iscrowd" in obj else 0 for obj in annotations], dtype=np.int64)
boxes = [obj["bbox"] for obj in annotations]
# guard against no boxes via resizing
boxes = np.asarray(boxes, dtype=np.float32).reshape(-1, 4)
boxes[:, 2:] += boxes[:, :2]
boxes[:, 0::2] = boxes[:, 0::2].clip(min=0, max=image_width)
boxes[:, 1::2] = boxes[:, 1::2].clip(min=0, max=image_height)
keep = (boxes[:, 3] > boxes[:, 1]) & (boxes[:, 2] > boxes[:, 0])
new_target = {}
new_target["image_id"] = image_id
new_target["class_labels"] = classes[keep]
new_target["boxes"] = boxes[keep]
new_target["area"] = area[keep]
new_target["iscrowd"] = iscrowd[keep]
new_target["orig_size"] = np.asarray([int(image_height), int(image_width)], dtype=np.int64)
if annotations and "keypoints" in annotations[0]:
keypoints = [obj["keypoints"] for obj in annotations]
keypoints = np.asarray(keypoints, dtype=np.float32)
num_keypoints = keypoints.shape[0]
keypoints = keypoints.reshape((-1, 3)) if num_keypoints else keypoints
new_target["keypoints"] = keypoints[keep]
if return_segmentation_masks:
segmentation_masks = [obj["segmentation"] for obj in annotations]
masks = convert_coco_poly_to_mask(segmentation_masks, image_height, image_width)
new_target["masks"] = masks[keep]
return new_target
# Copied from transformers.models.detr.image_processing_detr.masks_to_boxes
def masks_to_boxes(masks: np.ndarray) -> np.ndarray:
"""
Compute the bounding boxes around the provided panoptic segmentation masks.
Args:
masks: masks in format `[number_masks, height, width]` where N is the number of masks
Returns:
boxes: bounding boxes in format `[number_masks, 4]` in xyxy format
"""
if masks.size == 0:
return np.zeros((0, 4))
h, w = masks.shape[-2:]
y = np.arange(0, h, dtype=np.float32)
x = np.arange(0, w, dtype=np.float32)
# see https://github.com/pytorch/pytorch/issues/50276
y, x = np.meshgrid(y, x, indexing="ij")
x_mask = masks * np.expand_dims(x, axis=0)
x_max = x_mask.reshape(x_mask.shape[0], -1).max(-1)
x = np.ma.array(x_mask, mask=~(np.array(masks, dtype=bool)))
x_min = x.filled(fill_value=1e8)
x_min = x_min.reshape(x_min.shape[0], -1).min(-1)
y_mask = masks * np.expand_dims(y, axis=0)
y_max = y_mask.reshape(x_mask.shape[0], -1).max(-1)
y = np.ma.array(y_mask, mask=~(np.array(masks, dtype=bool)))
y_min = y.filled(fill_value=1e8)
y_min = y_min.reshape(y_min.shape[0], -1).min(-1)
return np.stack([x_min, y_min, x_max, y_max], 1)
# Copied from transformers.models.detr.image_processing_detr.prepare_coco_panoptic_annotation with DETR->DETA
def prepare_coco_panoptic_annotation(
image: np.ndarray, target: Dict, masks_path: Union[str, pathlib.Path], return_masks: bool = True
) -> Dict:
"""
Prepare a coco panoptic annotation for DETA.
"""
image_height, image_width = get_image_size(image)
annotation_path = pathlib.Path(masks_path) / target["file_name"]
new_target = {}
new_target["image_id"] = np.asarray([target["image_id"] if "image_id" in target else target["id"]], dtype=np.int64)
new_target["size"] = np.asarray([image_height, image_width], dtype=np.int64)
new_target["orig_size"] = np.asarray([image_height, image_width], dtype=np.int64)
if "segments_info" in target:
masks = np.asarray(PIL.Image.open(annotation_path), dtype=np.uint32)
masks = rgb_to_id(masks)
ids = np.array([segment_info["id"] for segment_info in target["segments_info"]])
masks = masks == ids[:, None, None]
masks = masks.astype(np.uint8)
if return_masks:
new_target["masks"] = masks
new_target["boxes"] = masks_to_boxes(masks)
new_target["class_labels"] = np.array(
[segment_info["category_id"] for segment_info in target["segments_info"]], dtype=np.int64
)
new_target["iscrowd"] = np.asarray(
[segment_info["iscrowd"] for segment_info in target["segments_info"]], dtype=np.int64
)
new_target["area"] = np.asarray(
[segment_info["area"] for segment_info in target["segments_info"]], dtype=np.float32
)
return new_target
# Copied from transformers.models.detr.image_processing_detr.resize_annotation
def resize_annotation(
annotation: Dict[str, Any],
orig_size: Tuple[int, int],
target_size: Tuple[int, int],
threshold: float = 0.5,
resample: PILImageResampling = PILImageResampling.NEAREST,
):
"""
Resizes an annotation to a target size.
Args:
annotation (`Dict[str, Any]`):
The annotation dictionary.
orig_size (`Tuple[int, int]`):
The original size of the input image.
target_size (`Tuple[int, int]`):
The target size of the image, as returned by the preprocessing `resize` step.
threshold (`float`, *optional*, defaults to 0.5):
The threshold used to binarize the segmentation masks.
resample (`PILImageResampling`, defaults to `PILImageResampling.NEAREST`):
The resampling filter to use when resizing the masks.
"""
ratios = tuple(float(s) / float(s_orig) for s, s_orig in zip(target_size, orig_size))
ratio_height, ratio_width = ratios
new_annotation = {}
new_annotation["size"] = target_size
for key, value in annotation.items():
if key == "boxes":
boxes = value
scaled_boxes = boxes * np.asarray([ratio_width, ratio_height, ratio_width, ratio_height], dtype=np.float32)
new_annotation["boxes"] = scaled_boxes
elif key == "area":
area = value
scaled_area = area * (ratio_width * ratio_height)
new_annotation["area"] = scaled_area
elif key == "masks":
masks = value[:, None]
masks = np.array([resize(mask, target_size, resample=resample) for mask in masks])
masks = masks.astype(np.float32)
masks = masks[:, 0] > threshold
new_annotation["masks"] = masks
elif key == "size":
new_annotation["size"] = target_size
else:
new_annotation[key] = value
return new_annotation
class DetaImageProcessor(BaseImageProcessor):
r"""
Constructs a Deformable DETR image processor.
Args:
format (`str`, *optional*, defaults to `"coco_detection"`):
Data format of the annotations. One of "coco_detection" or "coco_panoptic".
do_resize (`bool`, *optional*, defaults to `True`):
Controls whether to resize the image's (height, width) dimensions to the specified `size`. Can be
overridden by the `do_resize` parameter in the `preprocess` method.
size (`Dict[str, int]` *optional*, defaults to `{"shortest_edge": 800, "longest_edge": 1333}`):
Size of the image's (height, width) dimensions after resizing. Can be overridden by the `size` parameter in
the `preprocess` method.
resample (`PILImageResampling`, *optional*, defaults to `PILImageResampling.BILINEAR`):
Resampling filter to use if resizing the image.
do_rescale (`bool`, *optional*, defaults to `True`):
Controls whether to rescale the image by the specified scale `rescale_factor`. Can be overridden by the
`do_rescale` parameter in the `preprocess` method.
rescale_factor (`int` or `float`, *optional*, defaults to `1/255`):
Scale factor to use if rescaling the image. Can be overridden by the `rescale_factor` parameter in the
`preprocess` method.
do_normalize:
Controls whether to normalize the image. Can be overridden by the `do_normalize` parameter in the
`preprocess` method.
image_mean (`float` or `List[float]`, *optional*, defaults to `IMAGENET_DEFAULT_MEAN`):
Mean values to use when normalizing the image. Can be a single value or a list of values, one for each
channel. Can be overridden by the `image_mean` parameter in the `preprocess` method.
image_std (`float` or `List[float]`, *optional*, defaults to `IMAGENET_DEFAULT_STD`):
Standard deviation values to use when normalizing the image. Can be a single value or a list of values, one
for each channel. Can be overridden by the `image_std` parameter in the `preprocess` method.
do_pad (`bool`, *optional*, defaults to `True`):
Controls whether to pad the image to the largest image in a batch and create a pixel mask. Can be
overridden by the `do_pad` parameter in the `preprocess` method.
"""
model_input_names = ["pixel_values", "pixel_mask"]
def __init__(
self,
format: Union[str, AnnotionFormat] = AnnotionFormat.COCO_DETECTION,
do_resize: bool = True,
size: Dict[str, int] = None,
resample: PILImageResampling = PILImageResampling.BILINEAR,
do_rescale: bool = True,
rescale_factor: Union[int, float] = 1 / 255,
do_normalize: bool = True,
image_mean: Union[float, List[float]] = None,
image_std: Union[float, List[float]] = None,
do_pad: bool = True,
**kwargs,
) -> None:
if "pad_and_return_pixel_mask" in kwargs:
do_pad = kwargs.pop("pad_and_return_pixel_mask")
size = size if size is not None else {"shortest_edge": 800, "longest_edge": 1333}
size = get_size_dict(size, default_to_square=False)
super().__init__(**kwargs)
self.format = format
self.do_resize = do_resize
self.size = size
self.resample = resample
self.do_rescale = do_rescale
self.rescale_factor = rescale_factor
self.do_normalize = do_normalize
self.image_mean = image_mean if image_mean is not None else IMAGENET_DEFAULT_MEAN
self.image_std = image_std if image_std is not None else IMAGENET_DEFAULT_STD
self.do_pad = do_pad
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.prepare_annotation with DETR->DETA
def prepare_annotation(
self,
image: np.ndarray,
target: Dict,
format: Optional[AnnotionFormat] = None,
return_segmentation_masks: bool = None,
masks_path: Optional[Union[str, pathlib.Path]] = None,
) -> Dict:
"""
Prepare an annotation for feeding into DETA model.
"""
format = format if format is not None else self.format
if format == AnnotionFormat.COCO_DETECTION:
return_segmentation_masks = False if return_segmentation_masks is None else return_segmentation_masks
target = prepare_coco_detection_annotation(image, target, return_segmentation_masks)
elif format == AnnotionFormat.COCO_PANOPTIC:
return_segmentation_masks = True if return_segmentation_masks is None else return_segmentation_masks
target = prepare_coco_panoptic_annotation(
image, target, masks_path=masks_path, return_masks=return_segmentation_masks
)
else:
raise ValueError(f"Format {format} is not supported.")
return target
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.prepare
def prepare(self, image, target, return_segmentation_masks=None, masks_path=None):
warnings.warn(
"The `prepare` method is deprecated and will be removed in a future version. "
"Please use `prepare_annotation` instead. Note: the `prepare_annotation` method "
"does not return the image anymore.",
)
target = self.prepare_annotation(image, target, return_segmentation_masks, masks_path, self.format)
return image, target
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.convert_coco_poly_to_mask
def convert_coco_poly_to_mask(self, *args, **kwargs):
warnings.warn("The `convert_coco_poly_to_mask` method is deprecated and will be removed in a future version. ")
return convert_coco_poly_to_mask(*args, **kwargs)
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.prepare_coco_detection
def prepare_coco_detection(self, *args, **kwargs):
warnings.warn("The `prepare_coco_detection` method is deprecated and will be removed in a future version. ")
return prepare_coco_detection_annotation(*args, **kwargs)
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.prepare_coco_panoptic
def prepare_coco_panoptic(self, *args, **kwargs):
warnings.warn("The `prepare_coco_panoptic` method is deprecated and will be removed in a future version. ")
return prepare_coco_panoptic_annotation(*args, **kwargs)
def resize(
self,
image: np.ndarray,
size: Dict[str, int],
resample: PILImageResampling = PILImageResampling.BILINEAR,
data_format: Optional[ChannelDimension] = None,
**kwargs,
) -> np.ndarray:
"""
Resize the image to the given size. Size can be `min_size` (scalar) or `(height, width)` tuple. If size is an
int, smaller edge of the image will be matched to this number.
"""
size = get_size_dict(size, default_to_square=False)
if "shortest_edge" in size and "longest_edge" in size:
size = get_resize_output_image_size(image, size["shortest_edge"], size["longest_edge"])
elif "height" in size and "width" in size:
size = (size["height"], size["width"])
else:
raise ValueError(
"Size must contain 'height' and 'width' keys or 'shortest_edge' and 'longest_edge' keys. Got"
f" {size.keys()}."
)
image = resize(image, size=size, resample=resample, data_format=data_format)
return image
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.resize_annotation
def resize_annotation(
self,
annotation,
orig_size,
size,
resample: PILImageResampling = PILImageResampling.NEAREST,
) -> Dict:
"""
Resize the annotation to match the resized image. If size is an int, smaller edge of the mask will be matched
to this number.
"""
return resize_annotation(annotation, orig_size=orig_size, target_size=size, resample=resample)
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.rescale
def rescale(
self, image: np.ndarray, rescale_factor: Union[float, int], data_format: Optional[ChannelDimension] = None
) -> np.ndarray:
"""
Rescale the image by the given factor.
"""
return rescale(image, rescale_factor, data_format=data_format)
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.normalize
def normalize(
self,
image: np.ndarray,
mean: Union[float, Iterable[float]],
std: Union[float, Iterable[float]],
data_format: Optional[ChannelDimension] = None,
) -> np.ndarray:
"""
Normalize the image with the given mean and standard deviation.
"""
return normalize(image, mean=mean, std=std, data_format=data_format)
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.normalize_annotation
def normalize_annotation(self, annotation: Dict, image_size: Tuple[int, int]) -> Dict:
"""
Normalize the boxes in the annotation from `[top_left_x, top_left_y, bottom_right_x, bottom_right_y]` to
`[center_x, center_y, width, height]` format.
"""
return normalize_annotation(annotation, image_size=image_size)
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.pad_and_create_pixel_mask
def pad_and_create_pixel_mask(
self,
pixel_values_list: List[ImageInput],
return_tensors: Optional[Union[str, TensorType]] = None,
data_format: Optional[ChannelDimension] = None,
) -> BatchFeature:
"""
Pads a batch of images with zeros to the size of largest height and width in the batch and returns their
corresponding pixel mask.
Args:
images (`List[np.ndarray]`):
Batch of images to pad.
return_tensors (`str` or `TensorType`, *optional*):
The type of tensors to return. Can be one of:
- Unset: Return a list of `np.ndarray`.
- `TensorType.TENSORFLOW` or `'tf'`: Return a batch of type `tf.Tensor`.
- `TensorType.PYTORCH` or `'pt'`: Return a batch of type `torch.Tensor`.
- `TensorType.NUMPY` or `'np'`: Return a batch of type `np.ndarray`.
- `TensorType.JAX` or `'jax'`: Return a batch of type `jax.numpy.ndarray`.
data_format (`str` or `ChannelDimension`, *optional*):
The channel dimension format of the image. If not provided, it will be the same as the input image.
"""
warnings.warn(
"This method is deprecated and will be removed in v4.27.0. Please use pad instead.", FutureWarning
)
# pad expects a list of np.ndarray, but the previous feature extractors expected torch tensors
images = [to_numpy_array(image) for image in pixel_values_list]
return self.pad(
images=images,
return_pixel_mask=True,
return_tensors=return_tensors,
data_format=data_format,
)
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor._pad_image
def _pad_image(
self,
image: np.ndarray,
output_size: Tuple[int, int],
constant_values: Union[float, Iterable[float]] = 0,
data_format: Optional[ChannelDimension] = None,
) -> np.ndarray:
"""
Pad an image with zeros to the given size.
"""
input_height, input_width = get_image_size(image)
output_height, output_width = output_size
pad_bottom = output_height - input_height
pad_right = output_width - input_width
padding = ((0, pad_bottom), (0, pad_right))
padded_image = pad(
image, padding, mode=PaddingMode.CONSTANT, constant_values=constant_values, data_format=data_format
)
return padded_image
# Copied from transformers.models.detr.image_processing_detr.DetrImageProcessor.pad
def pad(
self,
images: List[np.ndarray],
constant_values: Union[float, Iterable[float]] = 0,
return_pixel_mask: bool = True,
return_tensors: Optional[Union[str, TensorType]] = None,
data_format: Optional[ChannelDimension] = None,
) -> np.ndarray:
"""
Pads a batch of images to the bottom and right of the image with zeros to the size of largest height and width
in the batch and optionally returns their corresponding pixel mask.
Args:
image (`np.ndarray`):
Image to pad.
constant_values (`float` or `Iterable[float]`, *optional*):
The value to use for the padding if `mode` is `"constant"`.
return_pixel_mask (`bool`, *optional*, defaults to `True`):
Whether to return a pixel mask.
input_channel_dimension (`ChannelDimension`, *optional*):
The channel dimension format of the image. If not provided, it will be inferred from the input image.
data_format (`str` or `ChannelDimension`, *optional*):
The channel dimension format of the image. If not provided, it will be the same as the input image.
"""
pad_size = get_max_height_width(images)
padded_images = [
self._pad_image(image, pad_size, constant_values=constant_values, data_format=data_format)
for image in images
]
data = {"pixel_values": padded_images}
if return_pixel_mask:
masks = [make_pixel_mask(image=image, output_size=pad_size) for image in images]
data["pixel_mask"] = masks
return BatchFeature(data=data, tensor_type=return_tensors)
def preprocess(
self,
images: ImageInput,
annotations: Optional[Union[List[Dict], List[List[Dict]]]] = None,
return_segmentation_masks: bool = None,
masks_path: Optional[Union[str, pathlib.Path]] = None,
do_resize: Optional[bool] = None,
size: Optional[Dict[str, int]] = None,
resample=None, # PILImageResampling
do_rescale: Optional[bool] = None,
rescale_factor: Optional[Union[int, float]] = None,
do_normalize: Optional[bool] = None,
image_mean: Optional[Union[float, List[float]]] = None,
image_std: Optional[Union[float, List[float]]] = None,
do_pad: Optional[bool] = None,
format: Optional[Union[str, AnnotionFormat]] = None,
return_tensors: Optional[Union[TensorType, str]] = None,
data_format: Union[str, ChannelDimension] = ChannelDimension.FIRST,
**kwargs,
) -> BatchFeature:
"""
Preprocess an image or a batch of images so that it can be used by the model.
Args:
images (`ImageInput`):
Image or batch of images to preprocess.
annotations (`List[Dict]` or `List[List[Dict]]`, *optional*):
List of annotations associated with the image or batch of images. If annotionation is for object
detection, the annotations should be a dictionary with the following keys:
- "image_id" (`int`): The image id.
- "annotations" (`List[Dict]`): List of annotations for an image. Each annotation should be a
dictionary. An image can have no annotations, in which case the list should be empty.
If annotionation is for segmentation, the annotations should be a dictionary with the following keys:
- "image_id" (`int`): The image id.
- "segments_info" (`List[Dict]`): List of segments for an image. Each segment should be a dictionary.
An image can have no segments, in which case the list should be empty.
- "file_name" (`str`): The file name of the image.
return_segmentation_masks (`bool`, *optional*, defaults to self.return_segmentation_masks):
Whether to return segmentation masks.
masks_path (`str` or `pathlib.Path`, *optional*):
Path to the directory containing the segmentation masks.
do_resize (`bool`, *optional*, defaults to self.do_resize):
Whether to resize the image.
size (`Dict[str, int]`, *optional*, defaults to self.size):
Size of the image after resizing.
resample (`PILImageResampling`, *optional*, defaults to self.resample):
Resampling filter to use when resizing the image.
do_rescale (`bool`, *optional*, defaults to self.do_rescale):
Whether to rescale the image.
rescale_factor (`float`, *optional*, defaults to self.rescale_factor):
Rescale factor to use when rescaling the image.
do_normalize (`bool`, *optional*, defaults to self.do_normalize):
Whether to normalize the image.
image_mean (`float` or `List[float]`, *optional*, defaults to self.image_mean):
Mean to use when normalizing the image.
image_std (`float` or `List[float]`, *optional*, defaults to self.image_std):
Standard deviation to use when normalizing the image.
do_pad (`bool`, *optional*, defaults to self.do_pad):
Whether to pad the image.
format (`str` or `AnnotionFormat`, *optional*, defaults to self.format):
Format of the annotations.
return_tensors (`str` or `TensorType`, *optional*, defaults to self.return_tensors):
Type of tensors to return. If `None`, will return the list of images.
data_format (`str` or `ChannelDimension`, *optional*, defaults to self.data_format):
The channel dimension format of the image. If not provided, it will be the same as the input image.
"""
if "pad_and_return_pixel_mask" in kwargs:
warnings.warn(
"The `pad_and_return_pixel_mask` argument is deprecated and will be removed in a future version, "
"use `do_pad` instead.",
FutureWarning,
)
do_pad = kwargs.pop("pad_and_return_pixel_mask")
do_resize = self.do_resize if do_resize is None else do_resize
size = self.size if size is None else size
size = get_size_dict(size=size, default_to_square=False)
resample = self.resample if resample is None else resample
do_rescale = self.do_rescale if do_rescale is None else do_rescale
rescale_factor = self.rescale_factor if rescale_factor is None else rescale_factor
do_normalize = self.do_normalize if do_normalize is None else do_normalize
image_mean = self.image_mean if image_mean is None else image_mean
image_std = self.image_std if image_std is None else image_std
do_pad = self.do_pad if do_pad is None else do_pad
format = self.format if format is None else format
if do_resize is not None and size is None:
raise ValueError("Size and max_size must be specified if do_resize is True.")
if do_rescale is not None and rescale_factor is None:
raise ValueError("Rescale factor must be specified if do_rescale is True.")
if do_normalize is not None and (image_mean is None or image_std is None):
raise ValueError("Image mean and std must be specified if do_normalize is True.")
if not is_batched(images):
images = [images]
annotations = [annotations] if annotations is not None else None
if annotations is not None and len(images) != len(annotations):
raise ValueError(
f"The number of images ({len(images)}) and annotations ({len(annotations)}) do not match."
)
if not valid_images(images):
raise ValueError(
"Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, "
"torch.Tensor, tf.Tensor or jax.ndarray."
)
format = AnnotionFormat(format)
if annotations is not None:
if format == AnnotionFormat.COCO_DETECTION and not valid_coco_detection_annotations(annotations):
raise ValueError(
"Invalid COCO detection annotations. Annotations must a dict (single image) of list of dicts"
"(batch of images) with the following keys: `image_id` and `annotations`, with the latter "
"being a list of annotations in the COCO format."
)
elif format == AnnotionFormat.COCO_PANOPTIC and not valid_coco_panoptic_annotations(annotations):
raise ValueError(
"Invalid COCO panoptic annotations. Annotations must a dict (single image) of list of dicts "
"(batch of images) with the following keys: `image_id`, `file_name` and `segments_info`, with "
"the latter being a list of annotations in the COCO format."
)
elif format not in SUPPORTED_ANNOTATION_FORMATS:
raise ValueError(
f"Unsupported annotation format: {format} must be one of {SUPPORTED_ANNOTATION_FORMATS}"
)
if (
masks_path is not None
and format == AnnotionFormat.COCO_PANOPTIC
and not isinstance(masks_path, (pathlib.Path, str))
):
raise ValueError(
"The path to the directory containing the mask PNG files should be provided as a"
f" `pathlib.Path` or string object, but is {type(masks_path)} instead."
)
# All transformations expect numpy arrays
images = [to_numpy_array(image) for image in images]
# prepare (COCO annotations as a list of Dict -> DETR target as a single Dict per image)
if annotations is not None:
prepared_images = []
prepared_annotations = []
for image, target in zip(images, annotations):
target = self.prepare_annotation(
image, target, format, return_segmentation_masks=return_segmentation_masks, masks_path=masks_path
)
prepared_images.append(image)
prepared_annotations.append(target)
images = prepared_images
annotations = prepared_annotations
del prepared_images, prepared_annotations
# transformations
if do_resize:
if annotations is not None:
resized_images, resized_annotations = [], []
for image, target in zip(images, annotations):
orig_size = get_image_size(image)
resized_image = self.resize(image, size=size, resample=resample)
resized_annotation = self.resize_annotation(target, orig_size, get_image_size(resized_image))
resized_images.append(resized_image)
resized_annotations.append(resized_annotation)
images = resized_images
annotations = resized_annotations
del resized_images, resized_annotations
else:
images = [self.resize(image, size=size, resample=resample) for image in images]
if do_rescale:
images = [self.rescale(image, rescale_factor) for image in images]
if do_normalize:
images = [self.normalize(image, image_mean, image_std) for image in images]
if annotations is not None:
annotations = [
self.normalize_annotation(annotation, get_image_size(image))
for annotation, image in zip(annotations, images)
]
if do_pad:
# Pads images and returns their mask: {'pixel_values': ..., 'pixel_mask': ...}
data = self.pad(images, return_pixel_mask=True, data_format=data_format)
else:
images = [to_channel_dimension_format(image, data_format) for image in images]
data = {"pixel_values": images}
encoded_inputs = BatchFeature(data=data, tensor_type=return_tensors)
if annotations is not None:
encoded_inputs["labels"] = [
BatchFeature(annotation, tensor_type=return_tensors) for annotation in annotations
]
return encoded_inputs
def post_process_object_detection(
self,
outputs,
threshold: float = 0.5,
target_sizes: Union[TensorType, List[Tuple]] = None,
nms_threshold: float = 0.7,
):
"""
Converts the output of [`DetaForObjectDetection`] into final bounding boxes in (top_left_x, top_left_y,
bottom_right_x, bottom_right_y) format. Only supports PyTorch.
Args:
outputs ([`DetrObjectDetectionOutput`]):
Raw outputs of the model.
threshold (`float`, *optional*, defaults to 0.5):
Score threshold to keep object detection predictions.
target_sizes (`torch.Tensor` or `List[Tuple[int, int]]`, *optional*):
Tensor of shape `(batch_size, 2)` or list of tuples (`Tuple[int, int]`) containing the target size
(height, width) of each image in the batch. If left to None, predictions will not be resized.
nms_threshold (`float`, *optional*, defaults to 0.7):
NMS threshold.
Returns:
`List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels and boxes for an image
in the batch as predicted by the model.
"""
out_logits, out_bbox = outputs.logits, outputs.pred_boxes
batch_size, num_queries, num_labels = out_logits.shape
if target_sizes is not None:
if len(out_logits) != len(target_sizes):
raise ValueError(
"Make sure that you pass in as many target sizes as the batch dimension of the logits"
)
prob = out_logits.sigmoid()
all_scores = prob.view(batch_size, num_queries * num_labels).to(out_logits.device)
all_indexes = torch.arange(num_queries * num_labels)[None].repeat(batch_size, 1).to(out_logits.device)
all_boxes = torch_int_div(all_indexes, out_logits.shape[2])
all_labels = all_indexes % out_logits.shape[2]
boxes = center_to_corners_format(out_bbox)
boxes = torch.gather(boxes, 1, all_boxes.unsqueeze(-1).repeat(1, 1, 4))
# and from relative [0, 1] to absolute [0, height] coordinates
if target_sizes is not None:
if isinstance(target_sizes, List):
img_h = torch.Tensor([i[0] for i in target_sizes])
img_w = torch.Tensor([i[1] for i in target_sizes])
else:
img_h, img_w = target_sizes.unbind(1)
scale_fct = torch.stack([img_w, img_h, img_w, img_h], dim=1).to(boxes.device)
boxes = boxes * scale_fct[:, None, :]
results = []
for b in range(batch_size):
box = boxes[b]
score = all_scores[b]
lbls = all_labels[b]
pre_topk = score.topk(min(10000, len(score))).indices
box = box[pre_topk]
score = score[pre_topk]
lbls = lbls[pre_topk]
# apply NMS
keep_inds = batched_nms(box, score, lbls, nms_threshold)[:100]
score = score[keep_inds]
lbls = lbls[keep_inds]
box = box[keep_inds]
results.append(
{
"scores": score[score > threshold],
"labels": lbls[score > threshold],
"boxes": box[score > threshold],
}
)
return results
|
2881099/dotnetGen_sqlserver | 2,634 | Common/Common.csproj | <?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="14.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.50727</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{F0054101-9AC9-4E0E-9E78-44EA89FC5C19}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Common</RootNamespace>
<AssemblyName>Common</AssemblyName>
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
<FileUpgradeFlags>
</FileUpgradeFlags>
<UpgradeBackupLocation>
</UpgradeBackupLocation>
<OldToolsVersion>2.0</OldToolsVersion>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="BaseSocket.cs" />
<Compile Include="Deflate.cs" />
<Compile Include="Misc.cs" />
<Compile Include="Model\BuildInfo.cs" />
<Compile Include="Model\ClientInfo.cs" />
<Compile Include="Model\ColumnInfo.cs" />
<Compile Include="Model\DatabaseInfo.cs" />
<Compile Include="Model\DataSort.cs" />
<Compile Include="Model\ForeignKeyInfo.cs" />
<Compile Include="Model\TableInfo.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
</ItemGroup>
<ItemGroup>
<None Include="Model\vssver2.scc" />
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> |
2881099/dotnetGen_sqlserver | 1,261 | GenMs/Deflate.cs | using System;
using System.IO;
using System.IO.Compression;
using System.Text;
public static class Deflate {
public static string cs_head = string.Empty;
public static byte[] Decompress(Stream stream) {
try {
stream.Position = 0;
using (MemoryStream ms = new MemoryStream()) {
using (DeflateStream def = new DeflateStream(stream, CompressionMode.Decompress)) {
byte[] data = new byte[1024];
int size = 0;
while ((size = def.Read(data, 0, data.Length)) > 0) {
ms.Write(data, 0, size);
}
}
return ms.ToArray();
}
} catch { return (stream as MemoryStream).ToArray(); };
}
public static byte[] Decompress(byte[] bt) {
return Decompress(new MemoryStream(bt));
}
public static byte[] Compress(string text) {
if (text.Trim().StartsWith("using ")) {
text = Deflate.cs_head + text;
}
return Compress(Encoding.UTF8.GetBytes(text));
}
public static byte[] Compress(byte[] bt) {
return Compress(bt, 0, bt.Length);
}
public static byte[] Compress(byte[] bt, int startIndex, int length) {
using (MemoryStream ms = new MemoryStream()) {
using (DeflateStream def = new DeflateStream(ms, CompressionMode.Compress)) {
def.Write(bt, startIndex, length);
}
return ms.ToArray();
}
}
}
|
2881099/dotnetGen_sqlserver | 25,896 | GenMs/ConsoleApp.cs | using Model;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
namespace GenMs {
public class ConsoleApp {
ClientInfo _client;
ClientSocket _socket;
public string ConnectionString {
get {
string connStr = null;
if (string.IsNullOrEmpty(this._client.Username)) {
connStr = "Data Source={0};Integrated Security=True;Initial Catalog={3}";
} else {
connStr = "Data Source={0};User ID={1};Password={2};Initial Catalog={3}";
}
return string.Format(connStr, this._client.Server, this._client.Username, this._client.Password, this._client.Database);
}
}
public string Server;
public string Username;
public string Password;
public string Database;
public string SolutionName;
public bool IsMakeSolution;
public bool IsMakeWebAdmin;
public bool IsDownloadRes;
public string OutputPath;
public ConsoleApp(string[] args, ManualResetEvent wait) {
this.OutputPath = Directory.GetCurrentDirectory();
string args0 = args[0].Trim().ToLower();
if (args[0] == "?" || args0 == "--help" || args0 == "-help") {
var bgcolor = Console.BackgroundColor;
var fgcolor = Console.ForegroundColor;
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.Write("#######################################");
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = fgcolor;
Console.WriteLine("");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.DarkRed;
Console.Write(" ");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = fgcolor;
Console.WriteLine("");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.DarkGreen;
Console.Write(" .NETCore 2.1 + SQLServer 生成器 ");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = fgcolor;
Console.WriteLine("");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.DarkRed;
Console.Write(" ");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = fgcolor;
Console.WriteLine("");
Console.BackgroundColor = ConsoleColor.DarkMagenta;
Console.ForegroundColor = ConsoleColor.White;
Console.Write("##");
Console.Write("#######################################");
Console.Write("##");
Console.BackgroundColor = bgcolor;
Console.ForegroundColor = ConsoleColor.DarkMagenta;
Console.Write(@"
用于快速创建和更新 .NETCore 2.1 + SQLServer 项目,非常合适敏捷开发;
Github: https://github.com/2881099/dotnetgen_sqlserver
");
Console.ForegroundColor = ConsoleColor.DarkMagenta;
Console.Write("Example:");
Console.ForegroundColor = fgcolor;
Console.WriteLine(@"
> GenMs 127.0.0.1 -U sa -P 123456 -D dyschool -N dyschool -S -A -R
> GenMs 127.0.0.1 -D dyschool -N dyschool -S -A -R //使用windows登陆
-U SQLServer账号
-P SQLServer密码
-D 需要生成的数据库
-N 字符串,生成代码的解决方案名和命名空间
-S 生成解决方案,在项目第一次生成时使用
-A 生成后台管理
-R 下载资源
-O 输出路径(默认:当前目录)");
wait.Set();
return;
}
this.Server = args[0];
for (int a = 1; a < args.Length; a++) {
switch (args[a]) {
case "-U":
if (a + 1 >= args.Length) Console.WriteLine("-U 参数错误");
else this.Username = args[a + 1];
a++;
break;
case "-P":
if (a + 1 >= args.Length) Console.WriteLine("-P 参数错误");
else this.Password = args[a + 1];
a++;
break;
case "-D":
if (a + 1 >= args.Length) Console.WriteLine("-D 参数错误");
else this.Database = args[a + 1];
a++;
break;
case "-N":
if (a + 1 >= args.Length) Console.WriteLine("-N 参数错误");
else this.SolutionName = args[a + 1];
a++;
break;
case "-O":
if (a + 1 >= args.Length) Console.WriteLine("-O 参数错误");
else this.OutputPath = args[a + 1];
a++;
break;
case "-S":
this.IsMakeSolution = true;
break;
case "-A":
this.IsMakeWebAdmin = true;
break;
case "-R":
this.IsDownloadRes = true;
break;
}
}
this._client = new ClientInfo(this.Server, this.Username, this.Password);
StreamReader sr = new StreamReader(System.Net.WebRequest.Create("https://files.cnblogs.com/files/kellynic/GenMs_server.css").GetResponse().GetResponseStream(), Encoding.UTF8);
string server = sr.ReadToEnd()?.Trim();
//server = "127.0.0.1:29918";
Uri uri = new Uri("tcp://" + server + "/");
this._socket = new ClientSocket();
this._socket.Error += Socket_OnError;
this._socket.Receive += Socket_OnReceive;
this._socket.Connect(uri.Host, uri.Port);
Thread.CurrentThread.Join(TimeSpan.FromSeconds(1));
if (this._socket.Running == false) {
wait.Set();
return;
}
WriteLine("正在生成,稍候 …", ConsoleColor.DarkGreen);
SocketMessager messager = new SocketMessager("GetDatabases", this._client);
this._socket.Write(messager, delegate (object sender2, ClientSocketReceiveEventArgs e2) {
List<DatabaseInfo> dbs = e2.Messager.Arg as List<DatabaseInfo>;
});
this._client.Database = this.Database;
List<TableInfo> tables = null;
messager = new SocketMessager("GetTablesByDatabase", this._client.Database);
this._socket.Write(messager, delegate (object sender2, ClientSocketReceiveEventArgs e2) {
tables = e2.Messager.Arg as List<TableInfo>;
});
if (tables == null) {
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] 无法读取表");
this._socket.Close();
this._socket.Dispose();
wait.Set();
return;
}
tables.ForEach(a => a.IsOutput = true);
List<BuildInfo> bs = null;
messager = new SocketMessager("Build", new object[] {
SolutionName,
IsMakeSolution,
string.Join("", tables.ConvertAll<string>(delegate(TableInfo table){
return string.Concat(table.IsOutput ? 1 : 0);
}).ToArray()),
IsMakeWebAdmin,
IsDownloadRes
});
this._socket.Write(messager, delegate (object sender2, ClientSocketReceiveEventArgs e2) {
bs = e2.Messager.Arg as List<BuildInfo>;
if (e2.Messager.Arg is Exception) throw e2.Messager.Arg as Exception;
}, TimeSpan.FromSeconds(60 * 5));
if (bs != null) {
foreach (BuildInfo b in bs) {
string path = Path.Combine(OutputPath, b.Path);
Directory.CreateDirectory(Path.GetDirectoryName(path));
string fileName = Path.GetFileName(b.Path);
string ext = Path.GetExtension(b.Path);
Encoding encode = Encoding.UTF8;
if (fileName.EndsWith(".rar") || fileName.EndsWith(".zip") || fileName.EndsWith(".dll")) {
using (FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write)) {
fs.Write(b.Data, 0, b.Data.Length);
fs.Close();
}
continue;
}
byte[] data = Deflate.Decompress(b.Data);
string content = Encoding.UTF8.GetString(data);
if (string.Compare(fileName, "web.config") == 0) {
string place = System.Web.HttpUtility.HtmlEncode(this.ConnectionString);
content = content.Replace("{connectionString}", place);
}
if (fileName.EndsWith(".json")) {
content = content.Replace("{connectionString}", this.ConnectionString);
}
if (string.Compare(ext, ".refresh") == 0) {
encode = Encoding.Unicode;
}
using (StreamWriter sw = new StreamWriter(path, false, encode)) {
sw.Write(content);
sw.Close();
}
}
var appsettingsPath = Path.Combine(OutputPath, "appsettings.json");
var appsettingsPathWebHost = Path.Combine(OutputPath, @"src\WebHost\appsettings.json");
var htmZipPath = Path.Combine(OutputPath, "htm.zip");
//解压htm.zip
if (this.IsDownloadRes && File.Exists(htmZipPath)) {
try {
System.IO.Compression.ZipFile.ExtractToDirectory(htmZipPath, OutputPath, Encoding.UTF8, true);
} catch (Exception ex) {
var color = Console.ForegroundColor;
Console.ForegroundColor = ConsoleColor.Red;
Console.WriteLine($"解压 htm.zip 失败:{ex.Message}");
Console.ForegroundColor = color;
}
}
if (this.IsMakeSolution) {
WriteLine("代码已生成完毕!使用 -S 生成完整项目,正在建立脚手架,大约需要10秒 …", ConsoleColor.DarkGreen);
var shellret = ShellRun(OutputPath, "gulp -v");
if (!string.IsNullOrEmpty(shellret.err)) {
WriteLine("");
WriteLine(@"正在安装gulp-cli …", ConsoleColor.DarkGreen);
shellret = ShellRun(OutputPath, "npm install --global gulp-cli");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
}
//WriteLine("");
//WriteLine("正在还原项目 …", ConsoleColor.DarkGreen);
//shellret = ShellRun(OutputPath, "dotnet1 restore");
//if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
//if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
//if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine(@"正在编译Module\Test …", ConsoleColor.DarkGreen);
shellret = ShellRun(Path.Combine(OutputPath, @"src\Module\Test"), "dotnet build");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine(@"正在编译Module\Admin …", ConsoleColor.DarkGreen);
shellret = ShellRun(Path.Combine(OutputPath, @"src\Module\Admin"), "dotnet build");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine("正在安装npm包 …", ConsoleColor.DarkGreen);
shellret = ShellRun(Path.Combine(OutputPath, @"src\WebHost"), "npm install");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine("正在编译WebHost …", ConsoleColor.DarkGreen);
shellret = ShellRun(Path.Combine(OutputPath, @"src\WebHost"), "dotnet build");
if (!string.IsNullOrEmpty(shellret.err)) WriteLine(shellret.err, ConsoleColor.Red);
if (!string.IsNullOrEmpty(shellret.warn)) WriteLine(shellret.warn, ConsoleColor.Yellow);
if (!string.IsNullOrEmpty(shellret.info)) WriteLine(shellret.info, ConsoleColor.DarkGray);
WriteLine("");
WriteLine($"脚手架建立完成。", ConsoleColor.DarkGreen);
//WriteLine("");
//Write($"项目运行依赖 ", ConsoleColor.DarkYellow);
//Write($"redis-server", ConsoleColor.Green);
//Write($",安装地址:", ConsoleColor.DarkYellow);
//Write("https://files.cnblogs.com/files/kellynic/Redis-x64-2.8.2402.zip", ConsoleColor.Blue);
//WriteLine($",或前往官方下载", ConsoleColor.DarkYellow);
WriteLine($"{Path.Combine(OutputPath, @"src\WebHost")} 目执行 dotnet run", ConsoleColor.DarkYellow);
WriteLine("");
//Console.WriteLine(ShellRun(Path.Combine(OutputPath, @"src\WebHost"), "dotnet run"));
var pro = new System.Diagnostics.Process();
pro.StartInfo = new System.Diagnostics.ProcessStartInfo("dotnet", "run --urls=http://0.0.0.0:5000") {
WorkingDirectory = Path.Combine(OutputPath, @"src\WebHost"),
EnvironmentVariables = { ["ASPNETCORE_ENVIRONMENT"] = "Development" }
};
pro.Start();
pro.WaitForExit();
}
//如果三个选项为false,并且 src\WebHost\appsettings.json 不存在,则在当前目录使用 appsettings.json
if (this.IsDownloadRes == false && this.IsMakeSolution == false && this.IsMakeWebAdmin == false && File.Exists(appsettingsPathWebHost) == false) {
var appsettings = Newtonsoft.Json.JsonConvert.DeserializeObject(File.Exists(appsettingsPath) ? File.ReadAllText(appsettingsPath) : "{}") as JToken;
var oldtxt = appsettings.ToString();
if (appsettings["ConnectionStrings"] == null) appsettings["ConnectionStrings"] = new JObject();
if (appsettings["ConnectionStrings"][$"{this.SolutionName}_mssql"] == null) appsettings["ConnectionStrings"][$"{this.SolutionName}_mssql"] = this.ConnectionString + ";Pooling=true;Max Pool Size=100";
if (appsettings["ConnectionStrings"]["redis1"] == null) appsettings["ConnectionStrings"]["redis1"] = $"127.0.0.1:6379,password=,defaultDatabase=13,poolsize=10,ssl=false,writeBuffer=20480,prefix={this.SolutionName}";
if (appsettings["ConnectionStrings"]["redis2"] == null) appsettings["ConnectionStrings"]["redis2"] = $"127.0.0.1:6379,password=,defaultDatabase=13,poolsize=10,ssl=false,writeBuffer=20480,prefix={this.SolutionName}";
if (appsettings[$"{this.SolutionName}_BLL_ITEM_CACHE"] == null) appsettings[$"{this.SolutionName}_BLL_ITEM_CACHE"] = JToken.FromObject(new {
Timeout = 180
});
if (appsettings["Logging"] == null) appsettings["Logging"] = new JObject();
if (appsettings["Logging"]["IncludeScopes"] == null) appsettings["Logging"]["IncludeScopes"] = false;
if (appsettings["Logging"]["LogLevel"] == null) appsettings["Logging"]["LogLevel"] = new JObject();
if (appsettings["Logging"]["LogLevel"]["Default"] == null) appsettings["Logging"]["LogLevel"]["Default"] = "Debug";
if (appsettings["Logging"]["LogLevel"]["System"] == null) appsettings["Logging"]["LogLevel"]["System"] = "Information";
if (appsettings["Logging"]["LogLevel"]["Microsoft"] == null) appsettings["Logging"]["LogLevel"]["Microsoft"] = "Information";
var newtxt = appsettings.ToString();
if (newtxt != oldtxt) File.WriteAllText(appsettingsPath, newtxt, Encoding.UTF8);
//增加当前目录 .csproj nuguet 引用 <PackageReference Include="dng.Mssql" Version="" />
string csprojPath = Directory.GetFiles(OutputPath, "*.csproj").FirstOrDefault();
if (!string.IsNullOrEmpty(csprojPath) && File.Exists(csprojPath)) {
if (Regex.IsMatch(File.ReadAllText(csprojPath), @"dng\.Mssql""\s+Version=""", RegexOptions.IgnoreCase) == false) {
System.Diagnostics.Process pro = new System.Diagnostics.Process();
pro.StartInfo = new System.Diagnostics.ProcessStartInfo("dotnet", "add package dng.Mssql") {
WorkingDirectory = OutputPath
};
pro.Start();
pro.WaitForExit();
}
if (Regex.IsMatch(File.ReadAllText(csprojPath), @"CSRedisCore""\s+Version=""", RegexOptions.IgnoreCase) == false) {
System.Diagnostics.Process pro = new System.Diagnostics.Process();
pro.StartInfo = new System.Diagnostics.ProcessStartInfo("dotnet", "add package CSRedisCore") {
WorkingDirectory = OutputPath
};
pro.Start();
pro.WaitForExit();
}
}
//向startup.cs注入代码
string startupPath = Path.Combine(OutputPath, "Startup.cs");
if (!string.IsNullOrEmpty(startupPath) && File.Exists(startupPath)) {
//web项目才需要 Caching.CSRedis
if (Regex.IsMatch(File.ReadAllText(csprojPath), @"Caching.CSRedis""\s+Version=""", RegexOptions.IgnoreCase) == false) {
System.Diagnostics.Process pro = new System.Diagnostics.Process();
pro.StartInfo = new System.Diagnostics.ProcessStartInfo("dotnet", "add package Caching.CSRedis") {
WorkingDirectory = OutputPath
};
pro.Start();
pro.WaitForExit();
}
bool isChanged = false;
var startupCode = File.ReadAllText(startupPath);
if (Regex.IsMatch(startupCode, @"using\s+Microsoft\.Extensions\.Caching\.Distributed;") == false) {
isChanged = true;
startupCode = "using Microsoft.Extensions.Caching.Distributed;\r\n" + startupCode;
}
if (Regex.IsMatch(startupCode, @"using\s+Microsoft\.Extensions\.Logging;") == false) {
isChanged = true;
startupCode = "using Microsoft.Extensions.Logging;\r\n" + startupCode;
}
if (Regex.IsMatch(startupCode, @"using\s+Microsoft\.Extensions\.Configuration;") == false) {
isChanged = true;
startupCode = "using Microsoft.Extensions.Configuration;\r\n" + startupCode;
}
var servicesName = "services";
if (startupCode.IndexOf("RedisHelper.Initialization") == -1) {
startupCode = Regex.Replace(startupCode, @"[\t ]+public\s+void\s+ConfigureServices\s*\(\s*IServiceCollection\s+(\w+)[^\{]+\{", m => {
isChanged = true;
var connStr1 = @"Configuration[""ConnectionStrings:redis2""]";
var connStr2 = @"Configuration[""ConnectionStrings:redis1""]";
if (File.Exists(appsettingsPath) == false) {
connStr1 = $"127.0.0.1:6379,password=,defaultDatabase=13,poolsize=50,ssl=false,writeBuffer=20480,prefix={this.SolutionName}";
connStr2 = $"127.0.0.1:6379,password=,defaultDatabase=13,poolsize=50,ssl=false,writeBuffer=20480,prefix={this.SolutionName}";
}
return m.Groups[0].Value + $@"
//单redis节点模式,如需开启集群负载,请将注释去掉并做相应配置
RedisHelper.Initialization(
csredis: new CSRedis.CSRedisClient(//null,
//{connStr1},
{connStr2}));
{servicesName = m.Groups[1].Value}.AddSingleton<IDistributedCache>(new Microsoft.Extensions.Caching.Redis.CSRedisCache(RedisHelper.Instance));
";
}, RegexOptions.Multiline);
}
if (Regex.IsMatch(startupCode, @"\s+IConfiguration(Root)?\s+Configuration(;|\s+\{)") == false) {
startupCode = Regex.Replace(startupCode, @"[\t ]+public\s+void\s+ConfigureServices\s*\(\s*IServiceCollection\s+(\w+)[^\{]+\{", m => {
isChanged = true;
return $@"
public IConfiguration Configuration {{ get; set; }}
{m.Groups[0].Value}
Configuration = {servicesName = m.Groups[1].Value}.BuildServiceProvider().GetService<IConfiguration>();";
}, RegexOptions.Multiline);
}
if (startupCode.IndexOf(this.SolutionName + ".BLL.SqlHelper.Initialization") == -1) {
startupCode = Regex.Replace(startupCode, @"([\t ]+public\s+void\s+Configure\s*\()([^\{]+)\{", m => {
isChanged = true;
var str1 = m.Groups[1].Value;
var str2 = m.Groups[2].Value;
var loggerFactory = Regex.Match(str2, @"\bILoggerFactory\s+(\w+)");
if (loggerFactory.Success == false) str2 = "ILoggerFactory loggerFactory, " + str2;
loggerFactory = Regex.Match(str2, @"\bILoggerFactory\s+(\w+)");
var appName = Regex.Match(str2, @"\bIApplicationBuilder\s+(\w+)");
if (appName.Success == false) str2 = "IApplicationBuilder app, " + str2;
appName = Regex.Match(str2, @"\bIApplicationBuilder\s+(\w+)");
var connStr = $@"Configuration[""ConnectionStrings:{this.SolutionName}_mssql""]";
if (File.Exists(appsettingsPath) == false) {
connStr = $"{this.ConnectionString};Pooling=true;Maximum Pool Size=100";
}
return str1 + str2 + $@"{{
{this.SolutionName}.BLL.SqlHelper.Initialization({appName.Groups[1].Value}.ApplicationServices.GetService<IDistributedCache>(), Configuration.GetSection(""{this.SolutionName}_BLL_ITEM_CACHE""),
{connStr}, /* 此参数可以配置【从数据库】 */ null, {loggerFactory.Groups[1].Value}.CreateLogger(""{this.SolutionName}_DAL_sqlhelper""));
";
}, RegexOptions.Multiline);
}
if (isChanged) File.WriteAllText(startupPath, startupCode);
}
}
if (File.Exists(Path.Combine(OutputPath, "GenMs只更新db.bat")) == false) {
var batPath = Path.Combine(OutputPath, $"GenMs_{this.SolutionName}_{this.Server}_{this.Database}.bat");
if (File.Exists(batPath) == false)
if (string.IsNullOrEmpty(this.Username))
File.WriteAllText(batPath, $@"
GenMs {this.Server} -D {this.Database} -N {this.SolutionName}");
else File.WriteAllText(batPath, $@"
GenMs {this.Server} -U {this.Username} -P {this.Password} -D {this.Database} -N {this.SolutionName}");
}
}
this._socket.Close();
this._socket.Dispose();
GC.Collect();
ConsoleColor fc = Console.ForegroundColor;
Console.ForegroundColor = ConsoleColor.Green;
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] The code files be maked in \"" + OutputPath + "\", please check.");
Console.ForegroundColor = fc;
wait.Set();
}
private void Socket_OnError(object sender, ClientSocketErrorEventArgs e) {
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] " + e.Exception.Message);
}
private void Socket_OnReceive(object sender, ClientSocketReceiveEventArgs e) {
SocketMessager messager = null;
switch (e.Messager.Action) {
case "ExecuteDataSet":
string sql = e.Messager.Arg.ToString();
object[][] ds = null;
try {
ds = ConsoleApp.ExecuteDataSet(this.ConnectionString, sql);
} catch (Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, ds);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
case "ExecuteNonQuery":
string sql2 = e.Messager.Arg.ToString();
int val = 0;
try {
val = ConsoleApp.ExecuteNonQuery(this.ConnectionString, sql2);
} catch (Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, val);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
default:
Console.WriteLine("[" + DateTime.Now.ToString("MM-dd HH:mm:ss") + "] " + "您当前使用的版本未能实现功能!");
break;
}
}
public static int ExecuteNonQuery(string connectionString, string cmdText) {
int val = 0;
using (SqlConnection conn = new SqlConnection(connectionString)) {
SqlCommand cmd = new SqlCommand(cmdText, conn);
try {
cmd.Connection.Open();
val = cmd.ExecuteNonQuery();
} catch {
cmd.Parameters.Clear();
cmd.Connection.Close();
throw;
}
}
return val;
}
public static object[][] ExecuteDataSet(string connectionString, string cmdText) {
List<object[]> ret = new List<object[]>();
using (SqlConnection conn = new SqlConnection(connectionString)) {
SqlCommand cmd = new SqlCommand(cmdText, conn);
try {
cmd.Connection.Open();
using (var dr = cmd.ExecuteReader()) {
while(dr.Read()) {
object[] vals = new object[dr.FieldCount];
dr.GetValues(vals);
ret.Add(vals);
}
}
} catch {
cmd.Parameters.Clear();
cmd.Connection.Close();
throw;
}
cmd.Connection.Close();
cmd.Parameters.Clear();
}
return ret.ToArray();
}
public static (string info, string warn, string err) ShellRun(string cddir, params string[] bat) {
if (bat == null || bat.Any() == false) return ("", "", "");
var proc = new System.Diagnostics.Process();
proc.StartInfo = new System.Diagnostics.ProcessStartInfo {
CreateNoWindow = true,
FileName = "cmd.exe",
UseShellExecute = false,
RedirectStandardError = true,
RedirectStandardInput = true,
RedirectStandardOutput = true,
WorkingDirectory = cddir
};
proc.Start();
foreach (var cmd in bat)
proc.StandardInput.WriteLine(cmd);
proc.StandardInput.WriteLine("exit");
var outStr = proc.StandardOutput.ReadToEnd();
var errStr = proc.StandardError.ReadToEnd();
proc.Close();
var idx = outStr.IndexOf($">{bat[0]}");
if (idx != -1) {
idx = outStr.IndexOf("\n", idx);
if (idx != -1) outStr = outStr.Substring(idx + 1);
}
idx = outStr.LastIndexOf(">exit");
if (idx != -1) {
idx = outStr.LastIndexOf("\n", idx);
if (idx != -1) outStr = outStr.Remove(idx);
}
outStr = outStr.Trim();
if (outStr == "") outStr = null;
if (errStr == "") errStr = null;
return (outStr, string.IsNullOrEmpty(outStr) ? null : errStr, string.IsNullOrEmpty(outStr) ? errStr : null);
}
public static void WriteLine(string text, ConsoleColor? foregroundColor = null, ConsoleColor? backgroundColor = null) => Write($"{text}\r\n", foregroundColor, backgroundColor);
public static void Write(string text, ConsoleColor? foregroundColor = null, ConsoleColor? backgroundColor = null) {
var bgcolor = Console.BackgroundColor;
var fgcolor = Console.ForegroundColor;
if (backgroundColor != null) Console.BackgroundColor = backgroundColor.Value;
if (foregroundColor != null) Console.ForegroundColor = foregroundColor.Value;
Console.Write(text);
if (backgroundColor != null) Console.BackgroundColor = bgcolor;
if (foregroundColor != null) Console.ForegroundColor = fgcolor;
}
}
}
|
2881099/dotnetGen_sqlserver | 1,847 | ServerWinForm/Form1.Designer.cs | namespace ServerWinForm
{
partial class Form1
{
/// <summary>
/// 必需的设计器变量。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 清理所有正在使用的资源。
/// </summary>
/// <param name="disposing">如果应释放托管资源,为 true;否则为 false。</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows 窗体设计器生成的代码
/// <summary>
/// 设计器支持所需的方法 - 不要
/// 使用代码编辑器修改此方法的内容。
/// </summary>
private void InitializeComponent()
{
this.textBox1 = new System.Windows.Forms.TextBox();
this.SuspendLayout();
//
// textBox1
//
this.textBox1.Dock = System.Windows.Forms.DockStyle.Fill;
this.textBox1.Location = new System.Drawing.Point(12, 12);
this.textBox1.Multiline = true;
this.textBox1.Name = "textBox1";
this.textBox1.Size = new System.Drawing.Size(509, 306);
this.textBox1.TabIndex = 0;
//
// Form1
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(533, 330);
this.Controls.Add(this.textBox1);
this.Margin = new System.Windows.Forms.Padding(2);
this.Name = "Form1";
this.Padding = new System.Windows.Forms.Padding(12);
this.Text = "Form1";
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.Form1_FormClosing);
this.Load += new System.EventHandler(this.Form1_Load);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.TextBox textBox1;
}
}
|
2881099/dotnetGen_sqlserver | 1,567 | ServerWinForm/Form1.cs | using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
namespace ServerWinForm
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
Server.Protocol _prol;
private void Form1_Load(object sender, EventArgs e)
{
Deflate.cs_head = Settings.Default.cs_head;
_prol = Server.Protocol.Create(Settings.Default.socket_port);
Server.Protocol.debugAppendLog = log => safeAppendText(this.textBox1, log);
}
private void Form1_FormClosing(object sender, FormClosingEventArgs e)
{
_prol.Dispose();
}
delegate void SafeSetText(Component c, string text);
protected void safeSetText(Component c, string text) {
if (this.InvokeRequired)
this.Invoke(new SafeSetText(safeSetText), new object[] { c, text });
else {
if (c is ToolStripItem) {
ToolStripItem o = c as ToolStripItem;
if (o == null) return;
o.Text = text;
} else if (c is Control) {
Control o = c as Control;
if (o == null) return;
o.Text = text;
}
}
}
protected void safeAppendText(Component c, string text) {
if (this.InvokeRequired)
this.Invoke(new SafeSetText(safeAppendText), new object[] { c, text });
else {
TextBox o = c as TextBox;
if (o == null) return;
o.AppendText(DateTime.Now.ToString("MM:ss ") + text + "\r\n");
o.ScrollToCaret();
}
}
}
} |
2881099/dotnetGen_sqlserver | 1,811 | ServerWinForm/Settings.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace ServerWinForm {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "15.7.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string cs_head {
get {
return ((string)(this["cs_head"]));
}
set {
this["cs_head"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("29918")]
public int socket_port {
get {
return ((int)(this["socket_port"]));
}
set {
this["socket_port"] = value;
}
}
}
}
|
2881099/dotnetGen_sqlserver | 4,228 | ServerWinForm/ServerWinForm.csproj | <?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="14.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.50727</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{4622A792-72AA-4519-94D7-E00A90609394}</ProjectGuid>
<OutputType>WinExe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>ServerWinForm</RootNamespace>
<AssemblyName>ServerWinForm</AssemblyName>
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
<FileUpgradeFlags>
</FileUpgradeFlags>
<UpgradeBackupLocation>
</UpgradeBackupLocation>
<OldToolsVersion>2.0</OldToolsVersion>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Data" />
<Reference Include="System.Deployment" />
<Reference Include="System.Drawing" />
<Reference Include="System.Windows.Forms" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="Form1.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="Form1.Designer.cs">
<DependentUpon>Form1.cs</DependentUpon>
</Compile>
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<EmbeddedResource Include="Form1.resx">
<SubType>Designer</SubType>
<DependentUpon>Form1.cs</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
<DesignTime>True</DesignTime>
</Compile>
<None Include="app.config" />
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<None Include="Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
<Compile Include="Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
<DependentUpon>Settings.settings</DependentUpon>
</Compile>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj">
<Project>{F0054101-9AC9-4E0E-9E78-44EA89FC5C19}</Project>
<Name>Common</Name>
</ProjectReference>
<ProjectReference Include="..\Server\Server.csproj">
<Project>{EFE1F5D6-AB1F-4FA6-8E10-9B8A197B31C7}</Project>
<Name>Server</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> |
2881099/dotnetGen_postgresql | 1,248 | ServerWinService/app.config | <?xml version="1.0" encoding="utf-8" ?>
<configuration>
<configSections>
<sectionGroup name="userSettings" type="System.Configuration.UserSettingsGroup, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" >
<section name="ServerWinService.Settings" type="System.Configuration.ClientSettingsSection, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" allowExeDefinition="MachineToLocalUser" requirePermission="false" />
<section name="ServerWinForm.Settings" type="System.Configuration.ClientSettingsSection, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" allowExeDefinition="MachineToLocalUser" requirePermission="false" />
</sectionGroup>
</configSections>
<userSettings>
<ServerWinService.Settings>
<setting name="cs_head" serializeAs="String">
<value />
</setting>
<setting name="socket_port" serializeAs="String">
<value>38888</value>
</setting>
</ServerWinService.Settings>
<ServerWinForm.Settings>
<setting name="cs_head" serializeAs="String">
<value />
</setting>
<setting name="socket_port" serializeAs="String">
<value>28888</value>
</setting>
</ServerWinForm.Settings>
</userSettings>
</configuration> |
27182812/ChatGLM-LLaMA-chinese-insturct | 11,932 | src/transformers/models/deta/configuration_deta.py | # coding=utf-8
# Copyright 2022 SenseTime and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" DETA model configuration"""
import copy
from ...configuration_utils import PretrainedConfig
from ...utils import logging
from ..auto import CONFIG_MAPPING
logger = logging.get_logger(__name__)
DETA_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"ut/deta": "https://huggingface.co/ut/deta/resolve/main/config.json",
}
class DetaConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`DetaModel`]. It is used to instantiate a DETA
model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
defaults will yield a similar configuration to that of the DETA
[SenseTime/deformable-detr](https://huggingface.co/SenseTime/deformable-detr) architecture.
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
documentation from [`PretrainedConfig`] for more information.
Args:
backbone_config (`PretrainedConfig` or `dict`, *optional*, defaults to `ResNetConfig()`):
The configuration of the backbone model.
num_queries (`int`, *optional*, defaults to 900):
Number of object queries, i.e. detection slots. This is the maximal number of objects [`DetaModel`] can
detect in a single image. In case `two_stage` is set to `True`, we use `two_stage_num_proposals` instead.
d_model (`int`, *optional*, defaults to 256):
Dimension of the layers.
encoder_layers (`int`, *optional*, defaults to 6):
Number of encoder layers.
decoder_layers (`int`, *optional*, defaults to 6):
Number of decoder layers.
encoder_attention_heads (`int`, *optional*, defaults to 8):
Number of attention heads for each attention layer in the Transformer encoder.
decoder_attention_heads (`int`, *optional*, defaults to 8):
Number of attention heads for each attention layer in the Transformer decoder.
decoder_ffn_dim (`int`, *optional*, defaults to 2048):
Dimension of the "intermediate" (often named feed-forward) layer in decoder.
encoder_ffn_dim (`int`, *optional*, defaults to 2048):
Dimension of the "intermediate" (often named feed-forward) layer in decoder.
activation_function (`str` or `function`, *optional*, defaults to `"relu"`):
The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`,
`"relu"`, `"silu"` and `"gelu_new"` are supported.
dropout (`float`, *optional*, defaults to 0.1):
The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
attention_dropout (`float`, *optional*, defaults to 0.0):
The dropout ratio for the attention probabilities.
activation_dropout (`float`, *optional*, defaults to 0.0):
The dropout ratio for activations inside the fully connected layer.
init_std (`float`, *optional*, defaults to 0.02):
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
init_xavier_std (`float`, *optional*, defaults to 1):
The scaling factor used for the Xavier initialization gain in the HM Attention map module.
encoder_layerdrop: (`float`, *optional*, defaults to 0.0):
The LayerDrop probability for the encoder. See the [LayerDrop paper](see https://arxiv.org/abs/1909.11556)
for more details.
auxiliary_loss (`bool`, *optional*, defaults to `False`):
Whether auxiliary decoding losses (loss at each decoder layer) are to be used.
position_embedding_type (`str`, *optional*, defaults to `"sine"`):
Type of position embeddings to be used on top of the image features. One of `"sine"` or `"learned"`.
class_cost (`float`, *optional*, defaults to 1):
Relative weight of the classification error in the Hungarian matching cost.
bbox_cost (`float`, *optional*, defaults to 5):
Relative weight of the L1 error of the bounding box coordinates in the Hungarian matching cost.
giou_cost (`float`, *optional*, defaults to 2):
Relative weight of the generalized IoU loss of the bounding box in the Hungarian matching cost.
mask_loss_coefficient (`float`, *optional*, defaults to 1):
Relative weight of the Focal loss in the panoptic segmentation loss.
dice_loss_coefficient (`float`, *optional*, defaults to 1):
Relative weight of the DICE/F-1 loss in the panoptic segmentation loss.
bbox_loss_coefficient (`float`, *optional*, defaults to 5):
Relative weight of the L1 bounding box loss in the object detection loss.
giou_loss_coefficient (`float`, *optional*, defaults to 2):
Relative weight of the generalized IoU loss in the object detection loss.
eos_coefficient (`float`, *optional*, defaults to 0.1):
Relative classification weight of the 'no-object' class in the object detection loss.
num_feature_levels (`int`, *optional*, defaults to 5):
The number of input feature levels.
encoder_n_points (`int`, *optional*, defaults to 4):
The number of sampled keys in each feature level for each attention head in the encoder.
decoder_n_points (`int`, *optional*, defaults to 4):
The number of sampled keys in each feature level for each attention head in the decoder.
two_stage (`bool`, *optional*, defaults to `True`):
Whether to apply a two-stage deformable DETR, where the region proposals are also generated by a variant of
DETA, which are further fed into the decoder for iterative bounding box refinement.
two_stage_num_proposals (`int`, *optional*, defaults to 300):
The number of region proposals to be generated, in case `two_stage` is set to `True`.
with_box_refine (`bool`, *optional*, defaults to `True`):
Whether to apply iterative bounding box refinement, where each decoder layer refines the bounding boxes
based on the predictions from the previous layer.
focal_alpha (`float`, *optional*, defaults to 0.25):
Alpha parameter in the focal loss.
Examples:
```python
>>> from transformers import DetaConfig, DetaModel
>>> # Initializing a DETA SenseTime/deformable-detr style configuration
>>> configuration = DetaConfig()
>>> # Initializing a model (with random weights) from the SenseTime/deformable-detr style configuration
>>> model = DetaModel(configuration)
>>> # Accessing the model configuration
>>> configuration = model.config
```"""
model_type = "deta"
attribute_map = {
"hidden_size": "d_model",
"num_attention_heads": "encoder_attention_heads",
}
def __init__(
self,
backbone_config=None,
num_queries=900,
max_position_embeddings=2048,
encoder_layers=6,
encoder_ffn_dim=2048,
encoder_attention_heads=8,
decoder_layers=6,
decoder_ffn_dim=1024,
decoder_attention_heads=8,
encoder_layerdrop=0.0,
is_encoder_decoder=True,
activation_function="relu",
d_model=256,
dropout=0.1,
attention_dropout=0.0,
activation_dropout=0.0,
init_std=0.02,
init_xavier_std=1.0,
return_intermediate=True,
auxiliary_loss=False,
position_embedding_type="sine",
num_feature_levels=5,
encoder_n_points=4,
decoder_n_points=4,
two_stage=True,
two_stage_num_proposals=300,
with_box_refine=True,
assign_first_stage=True,
class_cost=1,
bbox_cost=5,
giou_cost=2,
mask_loss_coefficient=1,
dice_loss_coefficient=1,
bbox_loss_coefficient=5,
giou_loss_coefficient=2,
eos_coefficient=0.1,
focal_alpha=0.25,
**kwargs,
):
if backbone_config is None:
logger.info("`backbone_config` is `None`. Initializing the config with the default `ResNet` backbone.")
backbone_config = CONFIG_MAPPING["resnet"](out_features=["stage2", "stage3", "stage4"])
else:
if isinstance(backbone_config, dict):
backbone_model_type = backbone_config.pop("model_type")
config_class = CONFIG_MAPPING[backbone_model_type]
backbone_config = config_class.from_dict(backbone_config)
self.backbone_config = backbone_config
self.num_queries = num_queries
self.max_position_embeddings = max_position_embeddings
self.d_model = d_model
self.encoder_ffn_dim = encoder_ffn_dim
self.encoder_layers = encoder_layers
self.encoder_attention_heads = encoder_attention_heads
self.decoder_ffn_dim = decoder_ffn_dim
self.decoder_layers = decoder_layers
self.decoder_attention_heads = decoder_attention_heads
self.dropout = dropout
self.attention_dropout = attention_dropout
self.activation_dropout = activation_dropout
self.activation_function = activation_function
self.init_std = init_std
self.init_xavier_std = init_xavier_std
self.encoder_layerdrop = encoder_layerdrop
self.auxiliary_loss = auxiliary_loss
self.position_embedding_type = position_embedding_type
# deformable attributes
self.num_feature_levels = num_feature_levels
self.encoder_n_points = encoder_n_points
self.decoder_n_points = decoder_n_points
self.two_stage = two_stage
self.two_stage_num_proposals = two_stage_num_proposals
self.with_box_refine = with_box_refine
self.assign_first_stage = assign_first_stage
if two_stage is True and with_box_refine is False:
raise ValueError("If two_stage is True, with_box_refine must be True.")
# Hungarian matcher
self.class_cost = class_cost
self.bbox_cost = bbox_cost
self.giou_cost = giou_cost
# Loss coefficients
self.mask_loss_coefficient = mask_loss_coefficient
self.dice_loss_coefficient = dice_loss_coefficient
self.bbox_loss_coefficient = bbox_loss_coefficient
self.giou_loss_coefficient = giou_loss_coefficient
self.eos_coefficient = eos_coefficient
self.focal_alpha = focal_alpha
super().__init__(is_encoder_decoder=is_encoder_decoder, **kwargs)
@property
def num_attention_heads(self) -> int:
return self.encoder_attention_heads
@property
def hidden_size(self) -> int:
return self.d_model
def to_dict(self):
"""
Serializes this instance to a Python dictionary. Override the default [`~PretrainedConfig.to_dict`]. Returns:
`Dict[str, any]`: Dictionary of all the attributes that make up this configuration instance,
"""
output = copy.deepcopy(self.__dict__)
output["backbone_config"] = self.backbone_config.to_dict()
output["model_type"] = self.__class__.model_type
return output
|
2881099/dotnetGen_mysql | 1,815 | Common/Model/ForeignKeyInfo.cs | using System;
using System.Text;
using System.Collections.Generic;
namespace Model {
[Serializable]
public class ForeignKeyInfo {
private TableInfo _table;
private List<ColumnInfo> _columns = new List<ColumnInfo>();
private TableInfo _referencedTable;
private List<ColumnInfo> _referencedColumns = new List<ColumnInfo>();
private string _referencedDBName;
private string _referencedTableName;
private List<string> _referencedColumnNames = new List<string>();
private bool _referencedIsPrimaryKey;
public ForeignKeyInfo(TableInfo table, TableInfo referencedTable) {
_table = table;
_referencedTable = referencedTable;
}
public ForeignKeyInfo(string referencedSln, string referencedTableName, bool referencedIsPK) {
_referencedDBName = referencedSln;
_referencedTableName = referencedTableName;
_referencedIsPrimaryKey = referencedIsPK;
}
public TableInfo Table {
get { return _table; }
set { _table = value; }
}
public List<ColumnInfo> Columns {
get { return _columns; }
set { _columns = value; }
}
public TableInfo ReferencedTable {
get { return _referencedTable; }
set { _referencedTable = value; }
}
public List<ColumnInfo> ReferencedColumns {
get { return _referencedColumns; }
set { _referencedColumns = value; }
}
public string ReferencedDBName {
get { return _referencedDBName; }
set { _referencedDBName = value; }
}
public string ReferencedTableName {
get { return _referencedTableName; }
set { _referencedTableName = value; }
}
public List<string> ReferencedColumnNames {
get { return _referencedColumnNames; }
set { _referencedColumnNames = value; }
}
public bool ReferencedIsPrimaryKey {
get { return _referencedIsPrimaryKey; }
set { _referencedIsPrimaryKey = value; }
}
}
}
|
2881099/dotnetGen_mysql | 1,644 | Common/Model/ColumnInfo.cs | using System;
using System.Collections.Generic;
using MySql.Data.MySqlClient;
using System.Text;
namespace Model {
[Serializable]
public class ColumnInfo {
private string _name;
private MySqlDbType _type;
private long _length;
private string _sqlType;
private DataSort _orderby;
private bool _isNullable;
private bool _isIdentity;
private bool _isClustered;
private bool _isPrimaryKey;
public ColumnInfo() { }
public ColumnInfo(string name, MySqlDbType type, long length, string sqlType, DataSort orderby, bool isNullable, bool isIdentity, bool isClustered, bool isPrimaryKey) {
_name = name;
_type = type;
_length = length;
_sqlType = sqlType;
_orderby = orderby;
_isNullable = isNullable;
_isIdentity = isIdentity;
_isClustered = isClustered;
_isPrimaryKey = isPrimaryKey;
}
public string Name {
get { return _name; }
set { _name = value; }
}
public MySqlDbType Type {
get { return _type; }
set { _type = value; }
}
public long Length {
get { return _length; }
set { _length = value; }
}
public string SqlType {
get { return _sqlType; }
set { _sqlType = value; }
}
public DataSort Orderby {
get { return _orderby; }
set { _orderby = value; }
}
public bool IsNullable {
get { return _isNullable; }
set { _isNullable = value; }
}
public bool IsIdentity {
get { return _isIdentity; }
set { _isIdentity = value; }
}
public bool IsClustered {
get { return _isClustered; }
set { _isClustered = value; }
}
public bool IsPrimaryKey {
get { return _isPrimaryKey; }
set { _isPrimaryKey = value; }
}
}
}
|
2881099/dotnetGen_mysql | 2,670 | Common/Model/TableInfo.cs | using System;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections.Generic;
namespace Model {
[Serializable]
public class TableInfo {
private string _id;
private string _owner;
private string _name;
private List<ColumnInfo> _columns = new List<ColumnInfo>();
private List<List<ColumnInfo>> _uniques = new List<List<ColumnInfo>>();
private List<List<ColumnInfo>> _indexes = new List<List<ColumnInfo>>();
private List<ForeignKeyInfo> _foreignKeys = new List<ForeignKeyInfo>();
private List<ColumnInfo> _identitys = new List<ColumnInfo>();
private List<ColumnInfo> _clustereds = new List<ColumnInfo>();
private List<ColumnInfo> _primaryKeys = new List<ColumnInfo>();
private string _Type;
private bool _IsOutput;
public TableInfo(string id, string owner, string name, string type) {
_id = id;
_owner = owner;
_name = name;
_Type = type;
}
public static string GetClassName(string name) {
int rr = 0;
string n = name.StartsWith(".") ? name.Substring(1) : Regex.Replace(name, @"\.", delegate(Match m) {
if (rr++ > 0) return m.Groups[0].Value;
return "_";
});
return char.IsLetter(n, 0) ? n : string.Concat("_", n);
}
public static string GetEntryName(string name) {
int idx = name.IndexOf('.');
return idx == -1 ? name : name.Substring(idx + 1);
}
public string Id {
get { return _id; }
}
public string Owner {
get { return _owner; }
}
public string Name {
get { return _name; }
}
public string ClassName {
get {
return GetClassName(_owner.ToLower() + "." + _name);
}
}
public string FullName {
get { return string.IsNullOrEmpty(_owner) ? _name : string.Format("{0}.{1}", _owner, _name); }
}
public string Type {
get { return _Type; }
}
public List<ColumnInfo> Columns {
get { return _columns; }
}
public List<List<ColumnInfo>> Uniques {
get {
if (_uniques == null) {
}
return _uniques;
}
}
public List<List<ColumnInfo>> Indexes {
get {
if (_indexes == null) {
}
return _indexes;
}
}
public List<ForeignKeyInfo> ForeignKeys {
get {
if (_foreignKeys == null) {
}
return _foreignKeys;
}
}
public List<ColumnInfo> PrimaryKeys {
get {
if (_primaryKeys == null) {
}
return _primaryKeys;
}
}
public List<ColumnInfo> Clustereds {
get {
if (_clustereds == null) {
}
return _clustereds;
}
}
public List<ColumnInfo> Identitys {
get {
if (_identitys == null) {
}
return _identitys;
}
}
public bool IsOutput {
get { return _IsOutput; }
set { _IsOutput = value; }
}
}
}
|
27182812/ChatGLM-LLaMA-chinese-insturct | 19,028 | src/transformers/models/deta/convert_deta_swin_to_pytorch.py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convert DETA checkpoints from the original repository.
URL: https://github.com/jozhang97/DETA/tree/master"""
import argparse
import json
from pathlib import Path
import requests
import torch
from huggingface_hub import cached_download, hf_hub_download, hf_hub_url
from PIL import Image
from transformers import DetaConfig, DetaForObjectDetection, DetaImageProcessor, SwinConfig
from transformers.utils import logging
logging.set_verbosity_info()
logger = logging.get_logger(__name__)
def get_deta_config(model_name):
backbone_config = SwinConfig(
embed_dim=192,
depths=(2, 2, 18, 2),
num_heads=(6, 12, 24, 48),
window_size=12,
out_features=["stage2", "stage3", "stage4"],
)
config = DetaConfig(
backbone_config=backbone_config,
num_queries=900,
encoder_ffn_dim=2048,
decoder_ffn_dim=2048,
num_feature_levels=5,
assign_first_stage=True,
with_box_refine=True,
two_stage=True,
)
# set labels
repo_id = "huggingface/label-files"
if "o365" in model_name:
num_labels = 366
filename = "object365-id2label.json"
else:
num_labels = 91
filename = "coco-detection-id2label.json"
config.num_labels = num_labels
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = {int(k): v for k, v in id2label.items()}
config.id2label = id2label
config.label2id = {v: k for k, v in id2label.items()}
return config
# here we list all keys to be renamed (original name on the left, our name on the right)
def create_rename_keys(config):
rename_keys = []
# stem
# fmt: off
rename_keys.append(("backbone.0.body.patch_embed.proj.weight", "model.backbone.model.embeddings.patch_embeddings.projection.weight"))
rename_keys.append(("backbone.0.body.patch_embed.proj.bias", "model.backbone.model.embeddings.patch_embeddings.projection.bias"))
rename_keys.append(("backbone.0.body.patch_embed.norm.weight", "model.backbone.model.embeddings.norm.weight"))
rename_keys.append(("backbone.0.body.patch_embed.norm.bias", "model.backbone.model.embeddings.norm.bias"))
# stages
for i in range(len(config.backbone_config.depths)):
for j in range(config.backbone_config.depths[i]):
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.norm1.weight", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.layernorm_before.weight"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.norm1.bias", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.layernorm_before.bias"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.attn.relative_position_bias_table", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.self.relative_position_bias_table"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.attn.relative_position_index", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.self.relative_position_index"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.attn.proj.weight", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.output.dense.weight"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.attn.proj.bias", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.output.dense.bias"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.norm2.weight", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.layernorm_after.weight"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.norm2.bias", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.layernorm_after.bias"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.mlp.fc1.weight", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.intermediate.dense.weight"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.mlp.fc1.bias", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.intermediate.dense.bias"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.mlp.fc2.weight", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.output.dense.weight"))
rename_keys.append((f"backbone.0.body.layers.{i}.blocks.{j}.mlp.fc2.bias", f"model.backbone.model.encoder.layers.{i}.blocks.{j}.output.dense.bias"))
if i < 3:
rename_keys.append((f"backbone.0.body.layers.{i}.downsample.reduction.weight", f"model.backbone.model.encoder.layers.{i}.downsample.reduction.weight"))
rename_keys.append((f"backbone.0.body.layers.{i}.downsample.norm.weight", f"model.backbone.model.encoder.layers.{i}.downsample.norm.weight"))
rename_keys.append((f"backbone.0.body.layers.{i}.downsample.norm.bias", f"model.backbone.model.encoder.layers.{i}.downsample.norm.bias"))
rename_keys.append(("backbone.0.body.norm1.weight", "model.backbone.model.hidden_states_norms.stage2.weight"))
rename_keys.append(("backbone.0.body.norm1.bias", "model.backbone.model.hidden_states_norms.stage2.bias"))
rename_keys.append(("backbone.0.body.norm2.weight", "model.backbone.model.hidden_states_norms.stage3.weight"))
rename_keys.append(("backbone.0.body.norm2.bias", "model.backbone.model.hidden_states_norms.stage3.bias"))
rename_keys.append(("backbone.0.body.norm3.weight", "model.backbone.model.hidden_states_norms.stage4.weight"))
rename_keys.append(("backbone.0.body.norm3.bias", "model.backbone.model.hidden_states_norms.stage4.bias"))
# transformer encoder
for i in range(config.encoder_layers):
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.sampling_offsets.weight", f"model.encoder.layers.{i}.self_attn.sampling_offsets.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.sampling_offsets.bias", f"model.encoder.layers.{i}.self_attn.sampling_offsets.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.attention_weights.weight", f"model.encoder.layers.{i}.self_attn.attention_weights.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.attention_weights.bias", f"model.encoder.layers.{i}.self_attn.attention_weights.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.value_proj.weight", f"model.encoder.layers.{i}.self_attn.value_proj.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.value_proj.bias", f"model.encoder.layers.{i}.self_attn.value_proj.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.output_proj.weight", f"model.encoder.layers.{i}.self_attn.output_proj.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.self_attn.output_proj.bias", f"model.encoder.layers.{i}.self_attn.output_proj.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.norm1.weight", f"model.encoder.layers.{i}.self_attn_layer_norm.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.norm1.bias", f"model.encoder.layers.{i}.self_attn_layer_norm.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.linear1.weight", f"model.encoder.layers.{i}.fc1.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.linear1.bias", f"model.encoder.layers.{i}.fc1.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.linear2.weight", f"model.encoder.layers.{i}.fc2.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.linear2.bias", f"model.encoder.layers.{i}.fc2.bias"))
rename_keys.append((f"transformer.encoder.layers.{i}.norm2.weight", f"model.encoder.layers.{i}.final_layer_norm.weight"))
rename_keys.append((f"transformer.encoder.layers.{i}.norm2.bias", f"model.encoder.layers.{i}.final_layer_norm.bias"))
# transformer decoder
for i in range(config.decoder_layers):
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.sampling_offsets.weight", f"model.decoder.layers.{i}.encoder_attn.sampling_offsets.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.sampling_offsets.bias", f"model.decoder.layers.{i}.encoder_attn.sampling_offsets.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.attention_weights.weight", f"model.decoder.layers.{i}.encoder_attn.attention_weights.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.attention_weights.bias", f"model.decoder.layers.{i}.encoder_attn.attention_weights.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.value_proj.weight", f"model.decoder.layers.{i}.encoder_attn.value_proj.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.value_proj.bias", f"model.decoder.layers.{i}.encoder_attn.value_proj.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.output_proj.weight", f"model.decoder.layers.{i}.encoder_attn.output_proj.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.cross_attn.output_proj.bias", f"model.decoder.layers.{i}.encoder_attn.output_proj.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm1.weight", f"model.decoder.layers.{i}.encoder_attn_layer_norm.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm1.bias", f"model.decoder.layers.{i}.encoder_attn_layer_norm.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.self_attn.out_proj.weight", f"model.decoder.layers.{i}.self_attn.out_proj.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.self_attn.out_proj.bias", f"model.decoder.layers.{i}.self_attn.out_proj.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm2.weight", f"model.decoder.layers.{i}.self_attn_layer_norm.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm2.bias", f"model.decoder.layers.{i}.self_attn_layer_norm.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.linear1.weight", f"model.decoder.layers.{i}.fc1.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.linear1.bias", f"model.decoder.layers.{i}.fc1.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.linear2.weight", f"model.decoder.layers.{i}.fc2.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.linear2.bias", f"model.decoder.layers.{i}.fc2.bias"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm3.weight", f"model.decoder.layers.{i}.final_layer_norm.weight"))
rename_keys.append((f"transformer.decoder.layers.{i}.norm3.bias", f"model.decoder.layers.{i}.final_layer_norm.bias"))
# fmt: on
return rename_keys
def rename_key(dct, old, new):
val = dct.pop(old)
dct[new] = val
# we split up the matrix of each encoder layer into queries, keys and values
def read_in_swin_q_k_v(state_dict, backbone_config):
num_features = [int(backbone_config.embed_dim * 2**i) for i in range(len(backbone_config.depths))]
for i in range(len(backbone_config.depths)):
dim = num_features[i]
for j in range(backbone_config.depths[i]):
# fmt: off
# read in weights + bias of input projection layer (in original implementation, this is a single matrix + bias)
in_proj_weight = state_dict.pop(f"backbone.0.body.layers.{i}.blocks.{j}.attn.qkv.weight")
in_proj_bias = state_dict.pop(f"backbone.0.body.layers.{i}.blocks.{j}.attn.qkv.bias")
# next, add query, keys and values (in that order) to the state dict
state_dict[f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.self.query.weight"] = in_proj_weight[:dim, :]
state_dict[f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.self.query.bias"] = in_proj_bias[: dim]
state_dict[f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.self.key.weight"] = in_proj_weight[
dim : dim * 2, :
]
state_dict[f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.self.key.bias"] = in_proj_bias[
dim : dim * 2
]
state_dict[f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.self.value.weight"] = in_proj_weight[
-dim :, :
]
state_dict[f"model.backbone.model.encoder.layers.{i}.blocks.{j}.attention.self.value.bias"] = in_proj_bias[-dim :]
# fmt: on
def read_in_decoder_q_k_v(state_dict, config):
# transformer decoder self-attention layers
hidden_size = config.d_model
for i in range(config.decoder_layers):
# read in weights + bias of input projection layer of self-attention
in_proj_weight = state_dict.pop(f"transformer.decoder.layers.{i}.self_attn.in_proj_weight")
in_proj_bias = state_dict.pop(f"transformer.decoder.layers.{i}.self_attn.in_proj_bias")
# next, add query, keys and values (in that order) to the state dict
state_dict[f"model.decoder.layers.{i}.self_attn.q_proj.weight"] = in_proj_weight[:hidden_size, :]
state_dict[f"model.decoder.layers.{i}.self_attn.q_proj.bias"] = in_proj_bias[:hidden_size]
state_dict[f"model.decoder.layers.{i}.self_attn.k_proj.weight"] = in_proj_weight[
hidden_size : hidden_size * 2, :
]
state_dict[f"model.decoder.layers.{i}.self_attn.k_proj.bias"] = in_proj_bias[hidden_size : hidden_size * 2]
state_dict[f"model.decoder.layers.{i}.self_attn.v_proj.weight"] = in_proj_weight[-hidden_size:, :]
state_dict[f"model.decoder.layers.{i}.self_attn.v_proj.bias"] = in_proj_bias[-hidden_size:]
# We will verify our results on an image of cute cats
def prepare_img():
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
im = Image.open(requests.get(url, stream=True).raw)
return im
@torch.no_grad()
def convert_deta_checkpoint(model_name, pytorch_dump_folder_path, push_to_hub):
"""
Copy/paste/tweak model's weights to our DETA structure.
"""
# load config
config = get_deta_config(model_name)
# load original state dict
if model_name == "deta-swin-large":
checkpoint_path = hf_hub_download(repo_id="nielsr/deta-checkpoints", filename="adet_swin_ft.pth")
elif model_name == "deta-swin-large-o365":
checkpoint_path = hf_hub_download(repo_id="jozhang97/deta-swin-l-o365", filename="deta_swin_pt_o365.pth")
else:
raise ValueError(f"Model name {model_name} not supported")
state_dict = torch.load(checkpoint_path, map_location="cpu")["model"]
# original state dict
for name, param in state_dict.items():
print(name, param.shape)
# rename keys
rename_keys = create_rename_keys(config)
for src, dest in rename_keys:
rename_key(state_dict, src, dest)
read_in_swin_q_k_v(state_dict, config.backbone_config)
read_in_decoder_q_k_v(state_dict, config)
# fix some prefixes
for key in state_dict.copy().keys():
if "transformer.decoder.class_embed" in key or "transformer.decoder.bbox_embed" in key:
val = state_dict.pop(key)
state_dict[key.replace("transformer.decoder", "model.decoder")] = val
if "input_proj" in key:
val = state_dict.pop(key)
state_dict["model." + key] = val
if "level_embed" in key or "pos_trans" in key or "pix_trans" in key or "enc_output" in key:
val = state_dict.pop(key)
state_dict[key.replace("transformer", "model")] = val
# finally, create HuggingFace model and load state dict
model = DetaForObjectDetection(config)
model.load_state_dict(state_dict)
model.eval()
device = "cuda" if torch.cuda.is_available() else "cpu"
model.to(device)
# load image processor
processor = DetaImageProcessor(format="coco_detection")
# verify our conversion on image
img = prepare_img()
encoding = processor(images=img, return_tensors="pt")
pixel_values = encoding["pixel_values"]
outputs = model(pixel_values.to(device))
# verify logits
print("Logits:", outputs.logits[0, :3, :3])
print("Boxes:", outputs.pred_boxes[0, :3, :3])
if model_name == "deta-swin-large":
expected_logits = torch.tensor(
[[-7.6308, -2.8485, -5.3737], [-7.2037, -4.5505, -4.8027], [-7.2943, -4.2611, -4.6617]]
)
expected_boxes = torch.tensor([[0.4987, 0.4969, 0.9999], [0.2549, 0.5498, 0.4805], [0.5498, 0.2757, 0.0569]])
elif model_name == "deta-swin-large-o365":
expected_logits = torch.tensor(
[[-8.0122, -3.5720, -4.9717], [-8.1547, -3.6886, -4.6389], [-7.6610, -3.6194, -5.0134]]
)
expected_boxes = torch.tensor([[0.2523, 0.5549, 0.4881], [0.7715, 0.4149, 0.4601], [0.5503, 0.2753, 0.0575]])
assert torch.allclose(outputs.logits[0, :3, :3], expected_logits.to(device), atol=1e-4)
assert torch.allclose(outputs.pred_boxes[0, :3, :3], expected_boxes.to(device), atol=1e-4)
print("Everything ok!")
if pytorch_dump_folder_path:
# Save model and processor
logger.info(f"Saving PyTorch model and processor to {pytorch_dump_folder_path}...")
Path(pytorch_dump_folder_path).mkdir(exist_ok=True)
model.save_pretrained(pytorch_dump_folder_path)
processor.save_pretrained(pytorch_dump_folder_path)
# Push to hub
if push_to_hub:
print("Pushing model and processor to hub...")
model.push_to_hub(f"jozhang97/{model_name}")
processor.push_to_hub(f"jozhang97/{model_name}")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--model_name",
type=str,
default="deta-swin-large",
choices=["deta-swin-large", "deta-swin-large-o365"],
help="Name of the model you'd like to convert.",
)
parser.add_argument(
"--pytorch_dump_folder_path",
default=None,
type=str,
help="Path to the folder to output PyTorch model.",
)
parser.add_argument(
"--push_to_hub", action="store_true", help="Whether or not to push the converted model to the 🤗 hub."
)
args = parser.parse_args()
convert_deta_checkpoint(args.model_name, args.pytorch_dump_folder_path, args.push_to_hub)
|
2881099/dotnetGen_sqlserver | 5,985 | MakeCode/FrmView.designer.cs | namespace MakeCode {
partial class FrmView {
/// <summary>
/// 必需的设计器变量。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 清理所有正在使用的资源。
/// </summary>
/// <param name="disposing">如果应释放托管资源,为 true;否则为 false。</param>
protected override void Dispose(bool disposing) {
if (disposing && (components != null)) {
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows 窗体设计器生成的代码
/// <summary>
/// 设计器支持所需的方法 - 不要
/// 使用代码编辑器修改此方法的内容。
/// </summary>
private void InitializeComponent() {
System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle1 = new System.Windows.Forms.DataGridViewCellStyle();
System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle2 = new System.Windows.Forms.DataGridViewCellStyle();
this.btnOk = new System.Windows.Forms.Button();
this.dgvGridview = new System.Windows.Forms.DataGridView();
this.dgvColIcon = new System.Windows.Forms.DataGridViewImageColumn();
this.dgvColName = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.dgvColDBType = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.dgvColAllowDBNull = new System.Windows.Forms.DataGridViewCheckBoxColumn();
this.dgvColView = new System.Windows.Forms.DataGridViewLinkColumn();
((System.ComponentModel.ISupportInitialize)(this.dgvGridview)).BeginInit();
this.SuspendLayout();
//
// btnOk
//
this.btnOk.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.btnOk.Location = new System.Drawing.Point(262, 372);
this.btnOk.Name = "btnOk";
this.btnOk.Size = new System.Drawing.Size(62, 21);
this.btnOk.TabIndex = 1;
this.btnOk.Text = "Ok";
this.btnOk.UseVisualStyleBackColor = true;
this.btnOk.Click += new System.EventHandler(this.btnOk_Click);
//
// dgvGridview
//
this.dgvGridview.AllowUserToAddRows = false;
this.dgvGridview.AllowUserToResizeRows = false;
this.dgvGridview.BackgroundColor = System.Drawing.SystemColors.ActiveCaptionText;
this.dgvGridview.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize;
this.dgvGridview.Columns.AddRange(new System.Windows.Forms.DataGridViewColumn[] {
this.dgvColIcon,
this.dgvColName,
this.dgvColDBType,
this.dgvColAllowDBNull,
this.dgvColView});
this.dgvGridview.Location = new System.Drawing.Point(12, 12);
this.dgvGridview.Name = "dgvGridview";
this.dgvGridview.ReadOnly = true;
this.dgvGridview.RowHeadersWidthSizeMode = System.Windows.Forms.DataGridViewRowHeadersWidthSizeMode.DisableResizing;
this.dgvGridview.RowTemplate.Height = 23;
this.dgvGridview.Size = new System.Drawing.Size(562, 352);
this.dgvGridview.TabIndex = 0;
//
// dgvColIcon
//
this.dgvColIcon.AutoSizeMode = System.Windows.Forms.DataGridViewAutoSizeColumnMode.ColumnHeader;
dataGridViewCellStyle1.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleCenter;
dataGridViewCellStyle1.NullValue = null;
dataGridViewCellStyle1.SelectionBackColor = System.Drawing.Color.White;
this.dgvColIcon.DefaultCellStyle = dataGridViewCellStyle1;
this.dgvColIcon.HeaderText = " ";
this.dgvColIcon.Name = "dgvColIcon";
this.dgvColIcon.ReadOnly = true;
this.dgvColIcon.Width = 21;
//
// dgvColName
//
this.dgvColName.HeaderText = "Name";
this.dgvColName.Name = "dgvColName";
this.dgvColName.ReadOnly = true;
this.dgvColName.Width = 210;
//
// dgvColDBType
//
this.dgvColDBType.HeaderText = "SqlType";
this.dgvColDBType.Name = "dgvColDBType";
this.dgvColDBType.ReadOnly = true;
this.dgvColDBType.Width = 130;
//
// dgvColAllowDBNull
//
this.dgvColAllowDBNull.HeaderText = "AllowDBNull";
this.dgvColAllowDBNull.Name = "dgvColAllowDBNull";
this.dgvColAllowDBNull.ReadOnly = true;
this.dgvColAllowDBNull.Resizable = System.Windows.Forms.DataGridViewTriState.True;
this.dgvColAllowDBNull.SortMode = System.Windows.Forms.DataGridViewColumnSortMode.Automatic;
this.dgvColAllowDBNull.Width = 80;
//
// dgvColView
//
dataGridViewCellStyle2.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleCenter;
dataGridViewCellStyle2.SelectionBackColor = System.Drawing.Color.White;
this.dgvColView.DefaultCellStyle = dataGridViewCellStyle2;
this.dgvColView.HeaderText = "Relation";
this.dgvColView.Name = "dgvColView";
this.dgvColView.ReadOnly = true;
this.dgvColView.Resizable = System.Windows.Forms.DataGridViewTriState.True;
this.dgvColView.SortMode = System.Windows.Forms.DataGridViewColumnSortMode.Automatic;
this.dgvColView.Text = "View";
this.dgvColView.Width = 60;
//
// FrmView
//
this.AcceptButton = this.btnOk;
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.CancelButton = this.btnOk;
this.ClientSize = new System.Drawing.Size(586, 405);
this.Controls.Add(this.dgvGridview);
this.Controls.Add(this.btnOk);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle;
this.MaximizeBox = false;
this.Name = "FrmView";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
this.Text = "靓仔的 c# 代码生成器 (http://www.kellynic.com/)";
((System.ComponentModel.ISupportInitialize)(this.dgvGridview)).EndInit();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Button btnOk;
public System.Windows.Forms.DataGridView dgvGridview;
private System.Windows.Forms.DataGridViewImageColumn dgvColIcon;
private System.Windows.Forms.DataGridViewTextBoxColumn dgvColName;
private System.Windows.Forms.DataGridViewTextBoxColumn dgvColDBType;
private System.Windows.Forms.DataGridViewCheckBoxColumn dgvColAllowDBNull;
private System.Windows.Forms.DataGridViewLinkColumn dgvColView;
}
} |
2881099/dotnetGen_sqlserver | 4,556 | MakeCode/MakeCode.csproj | <?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="14.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.50727</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{5FDB8603-9878-49E3-9570-04734FDF1FDE}</ProjectGuid>
<OutputType>WinExe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>MakeCode</RootNamespace>
<AssemblyName>MakeCode</AssemblyName>
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
<FileUpgradeFlags>
</FileUpgradeFlags>
<UpgradeBackupLocation>
</UpgradeBackupLocation>
<OldToolsVersion>2.0</OldToolsVersion>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Data" />
<Reference Include="System.Deployment" />
<Reference Include="System.Drawing" />
<Reference Include="System.Web" />
<Reference Include="System.Windows.Forms" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="ClientSocket.cs" />
<Compile Include="FrmMain.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="FrmMain.designer.cs">
<DependentUpon>FrmMain.cs</DependentUpon>
</Compile>
<Compile Include="FrmView.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="FrmView.designer.cs">
<DependentUpon>FrmView.cs</DependentUpon>
</Compile>
<Compile Include="Lib.cs" />
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<EmbeddedResource Include="FrmMain.resx">
<DependentUpon>FrmMain.cs</DependentUpon>
<SubType>Designer</SubType>
</EmbeddedResource>
<EmbeddedResource Include="FrmView.resx">
<DependentUpon>FrmView.cs</DependentUpon>
<SubType>Designer</SubType>
</EmbeddedResource>
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
<DesignTime>True</DesignTime>
</Compile>
<None Include="app.config" />
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<None Include="Properties\vssver2.scc" />
<None Include="Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
<Compile Include="Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
<DependentUpon>Settings.settings</DependentUpon>
</Compile>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj">
<Project>{F0054101-9AC9-4E0E-9E78-44EA89FC5C19}</Project>
<Name>Common</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> |
27182812/ChatGLM-LLaMA-chinese-insturct | 9,306 | src/transformers/models/mctct/configuration_mctct.py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""M-CTC-T model configuration"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
logger = logging.get_logger(__name__)
MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"speechbrain/m-ctc-t-large": "https://huggingface.co/speechbrain/m-ctc-t-large/resolve/main/config.json",
# See all M-CTC-T models at https://huggingface.co/models?filter=mctct
}
class MCTCTConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`MCTCTModel`]. It is used to instantiate an
M-CTC-T model according to the specified arguments, defining the model architecture. Instantiating a configuration
with the defaults will yield a similar configuration to that of the M-CTC-T
[speechbrain/m-ctc-t-large](https://huggingface.co/speechbrain/m-ctc-t-large) architecture.
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
documentation from [`PretrainedConfig`] for more information.
Args:
vocab_size (`int`, *optional*, defaults to 8065):
Vocabulary size of the M-CTC-T model. Defines the number of different tokens that can be represented by the
`inputs_ids` passed when calling [`MCTCTModel`].
hidden_size (`int`, *optional*, defaults to 1536):
Dimension of the encoder layers and the pooler layer.
num_hidden_layers (`int`, *optional*, defaults to 36):
Number of hidden layers in the Transformer encoder.
intermediate_size (`int`, *optional*, defaults to 6144):
Dimension of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder.
num_attention_heads (`int`, *optional*, defaults to 4):
Number of attention heads for each attention layer in the Transformer encoder.
attention_head_dim (`int`, *optional*, defaults to 384):
Dimensions of each attention head for each attention layer in the Transformer encoder.
max_position_embeddings (`int`, *optional*, defaults to 920):
The maximum sequence length that this model might ever be used with (after log-mel spectrogram extraction).
layer_norm_eps (`float`, *optional*, defaults to 1e-5):
The epsilon used by the layer normalization layers.
layerdrop (`float`, *optional*, defaults to 0.3):
The probability of dropping an encoder layer during training. The default 0.3 value is used in the original
implementation.
hidden_act (`str` or `function`, *optional*, defaults to `"relu"`):
The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`,
`"relu"`, `"selu"` and `"gelu_new"` are supported.
initializer_range (`float`, *optional*, defaults to 0.02):
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
hidden_dropout_prob (`float`, *optional*, defaults to 0.1):
The dropout probabilitiy for all fully connected layers in the embeddings, encoder, and pooler.
attention_probs_dropout_prob (`float`, *optional*, defaults to 0.1):
The dropout ratio for the attention probabilities.
pad_token_id (`int`, *optional*, defaults to 1):
The tokenizer index of the pad token.
bos_token_id (`int`, *optional*, defaults to 0):
The tokenizer index of the bos token.
eos_token_id (`int`, *optional*, defaults to 2):
The tokenizer index of the eos token.
conv_glu_dim (`int`, *optional*, defaults to 1):
The dimension of the output of the `Conv1dSubsampler` layer in which GLU is applied on. Though the original
Flashlight code uses the value of 2, here it's adapted to 1 due to transposition differences.
conv_dropout (`int`, *optional*, defaults to 0.3):
The probability of randomly dropping the `Conv1dSubsampler` layer during training.
num_conv_layers (`int`, *optional*, defaults to 1):
Number of convolution layers before applying transformer encoder layers.
conv_kernel (`List[int]`, *optional*, defaults to `[7]`):
The kernel size of the 1D convolution applied before transformer layers. `len(conv_kernel)` must be equal
to `num_conv_layers`.
conv_stride (`List[int]`, *optional*, defaults to `[3]`):
The stride length of the 1D convolution applied before transformer layers. `len(conv_stride)` must be equal
to `num_conv_layers`.
input_feat_per_channel (`int`, *optional*, defaults to 80):
Feature dimensions of the channels of the input to the Conv1D layer.
input_channels (`int`, *optional*, defaults to 1):
Number of input channels of the input to the Conv1D layer.
conv_channels (`List[int]`, *optional*, defaults to None):
Channel sizes of intermediate Conv1D layers.
ctc_loss_reduction (`str`, *optional*, defaults to `"sum"`):
Specifies the reduction to apply to the output of `torch.nn.CTCLoss`. Only relevant when training an
instance of [`MCTCTForCTC`].
ctc_zero_infinity (`bool`, *optional*, defaults to `False`):
Whether to zero infinite losses and the associated gradients of `torch.nn.CTCLoss`. Infinite losses mainly
occur when the inputs are too short to be aligned to the targets. Only relevant when training an instance
of [`MCTCTForCTC`].
Example:
```python
>>> from transformers import MCTCTConfig, MCTCTModel
>>> # Initializing a M-CTC-T mctct-large style configuration
>>> configuration = MCTCTConfig()
>>> # Initializing a model (with random weights) from the mctct-large style configuration
>>> model = MCTCTModel(configuration)
>>> # Accessing the model configuration
>>> configuration = model.config
```"""
model_type = "mctct"
def __init__(
self,
vocab_size=8065,
hidden_size=1536,
num_hidden_layers=36,
intermediate_size=6144,
num_attention_heads=4,
attention_head_dim=384,
max_position_embeddings=920,
layer_norm_eps=1e-5,
layerdrop=0.3,
hidden_act="relu",
initializer_range=0.02,
hidden_dropout_prob=0.3,
attention_probs_dropout_prob=0.3,
pad_token_id=1,
bos_token_id=0,
eos_token_id=2,
conv_glu_dim=1,
conv_dropout=0.3,
num_conv_layers=1,
conv_kernel=(7,),
conv_stride=(3,),
input_feat_per_channel=80,
input_channels=1,
conv_channels=None,
ctc_loss_reduction="sum",
ctc_zero_infinity=False,
**kwargs,
):
super().__init__(**kwargs, pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id)
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.intermediate_size = intermediate_size
self.num_attention_heads = num_attention_heads
self.attention_head_dim = attention_head_dim
self.max_position_embeddings = max_position_embeddings
self.layer_norm_eps = layer_norm_eps
self.layerdrop = layerdrop
self.hidden_act = hidden_act
self.initializer_range = initializer_range
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.pad_token_id = pad_token_id
self.bos_token_id = bos_token_id
self.eos_token_id = eos_token_id
self.conv_glu_dim = conv_glu_dim
self.conv_dropout = conv_dropout
self.num_conv_layers = num_conv_layers
self.input_feat_per_channel = input_feat_per_channel
self.input_channels = input_channels
self.conv_channels = conv_channels
self.ctc_loss_reduction = ctc_loss_reduction
self.ctc_zero_infinity = ctc_zero_infinity
# prevents config testing fail with exporting to json
self.conv_kernel = list(conv_kernel)
self.conv_stride = list(conv_stride)
if len(self.conv_kernel) != self.num_conv_layers:
raise ValueError(
"Configuration for convolutional module is incorrect. "
"It is required that `len(config.conv_kernel)` == `config.num_conv_layers` "
f"but is `len(config.conv_kernel) = {len(self.conv_kernel)}`, "
f"`config.num_conv_layers = {self.num_conv_layers}`."
)
|
2881099/dotnetGen_mysql | 898 | Common/Properties/AssemblyInfo.cs | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过下列属性集
// 控制。更改这些属性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("Common")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Common")]
[assembly: AssemblyCopyright("版权所有 (C) 2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 属性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("b3e3991f-30e6-4edf-ad0b-8a24b747de76")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 内部版本号
// 修订号
//
// 可以指定所有这些值,也可以使用“修订号”和“内部版本号”的默认值,
// 方法是按如下所示使用“*”:
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
2881099/dotnetGen_postgresql | 898 | Server/Properties/AssemblyInfo.cs | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过下列属性集
// 控制。更改这些属性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("Server")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Server")]
[assembly: AssemblyCopyright("版权所有 (C) 2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 属性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("1963fe13-0957-46b8-b20c-593eb34a5897")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 内部版本号
// 修订号
//
// 可以指定所有这些值,也可以使用“修订号”和“内部版本号”的默认值,
// 方法是按如下所示使用“*”:
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
27182812/ChatGLM-LLaMA-chinese-insturct | 5,928 | src/transformers/models/mctct/processing_mctct.py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Speech processor class for M-CTC-T
"""
import warnings
from contextlib import contextmanager
from ...processing_utils import ProcessorMixin
class MCTCTProcessor(ProcessorMixin):
r"""
Constructs a MCTCT processor which wraps a MCTCT feature extractor and a MCTCT tokenizer into a single processor.
[`MCTCTProcessor`] offers all the functionalities of [`MCTCTFeatureExtractor`] and [`AutoTokenizer`]. See the
[`~MCTCTProcessor.__call__`] and [`~MCTCTProcessor.decode`] for more information.
Args:
feature_extractor (`MCTCTFeatureExtractor`):
An instance of [`MCTCTFeatureExtractor`]. The feature extractor is a required input.
tokenizer (`AutoTokenizer`):
An instance of [`AutoTokenizer`]. The tokenizer is a required input.
"""
feature_extractor_class = "MCTCTFeatureExtractor"
tokenizer_class = "AutoTokenizer"
def __init__(self, feature_extractor, tokenizer):
super().__init__(feature_extractor, tokenizer)
self.current_processor = self.feature_extractor
self._in_target_context_manager = False
def __call__(self, *args, **kwargs):
"""
When used in normal mode, this method forwards all its arguments to MCTCTFeatureExtractor's
[`~MCTCTFeatureExtractor.__call__`] and returns its output. If used in the context
[`~MCTCTProcessor.as_target_processor`] this method forwards all its arguments to AutoTokenizer's
[`~AutoTokenizer.__call__`]. Please refer to the doctsring of the above two methods for more information.
"""
# For backward compatibility
if self._in_target_context_manager:
return self.current_processor(*args, **kwargs)
if "raw_speech" in kwargs:
warnings.warn("Using `raw_speech` as a keyword argument is deprecated. Use `audio` instead.")
audio = kwargs.pop("raw_speech")
else:
audio = kwargs.pop("audio", None)
sampling_rate = kwargs.pop("sampling_rate", None)
text = kwargs.pop("text", None)
if len(args) > 0:
audio = args[0]
args = args[1:]
if audio is None and text is None:
raise ValueError("You need to specify either an `audio` or `text` input to process.")
if audio is not None:
inputs = self.feature_extractor(audio, *args, sampling_rate=sampling_rate, **kwargs)
if text is not None:
encodings = self.tokenizer(text, **kwargs)
if text is None:
return inputs
elif audio is None:
return encodings
else:
inputs["labels"] = encodings["input_ids"]
return inputs
def batch_decode(self, *args, **kwargs):
"""
This method forwards all its arguments to AutoTokenizer's [`~PreTrainedTokenizer.batch_decode`]. Please refer
to the docstring of this method for more information.
"""
return self.tokenizer.batch_decode(*args, **kwargs)
def pad(self, *args, **kwargs):
"""
When used in normal mode, this method forwards all its arguments to MCTCTFeatureExtractor's
[`~MCTCTFeatureExtractor.pad`] and returns its output. If used in the context
[`~MCTCTProcessor.as_target_processor`] this method forwards all its arguments to PreTrainedTokenizer's
[`~PreTrainedTokenizer.pad`]. Please refer to the docstring of the above two methods for more information.
"""
# For backward compatibility
if self._in_target_context_manager:
return self.current_processor.pad(*args, **kwargs)
input_features = kwargs.pop("input_features", None)
labels = kwargs.pop("labels", None)
if len(args) > 0:
input_features = args[0]
args = args[1:]
if input_features is not None:
input_features = self.feature_extractor.pad(input_features, *args, **kwargs)
if labels is not None:
labels = self.tokenizer.pad(labels, **kwargs)
if labels is None:
return input_features
elif input_features is None:
return labels
else:
input_features["labels"] = labels["input_ids"]
return input_features
def decode(self, *args, **kwargs):
"""
This method forwards all its arguments to AutoTokenizer's [`~PreTrainedTokenizer.decode`]. Please refer to the
docstring of this method for more information.
"""
return self.tokenizer.decode(*args, **kwargs)
@contextmanager
def as_target_processor(self):
"""
Temporarily sets the tokenizer for processing the input. Useful for encoding the labels when fine-tuning MCTCT.
"""
warnings.warn(
"`as_target_processor` is deprecated and will be removed in v5 of Transformers. You can process your "
"labels by using the argument `text` of the regular `__call__` method (either in the same call as "
"your audio inputs, or in a separate call."
)
self._in_target_context_manager = True
self.current_processor = self.tokenizer
yield
self.current_processor = self.feature_extractor
self._in_target_context_manager = False
|
2881099/dotnetGen_sqlserver | 13,252 | MakeCode/FrmMain.cs | using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Windows.Forms;
using Model;
namespace MakeCode {
public partial class FrmMain : Form {
public FrmMain() {
InitializeComponent();
}
private ClientInfo _client;
private ClientSocket _socket;
public List<TableInfo> _tables = new List<TableInfo>();
public string ConnectionString {
get {
string connStr = null;
if (this.chkIntegrated.Checked) {
connStr = "Data Source={0};Integrated Security=True;Initial Catalog={3}";
} else {
connStr = "Data Source={0};User ID={1};Password={2};Initial Catalog={3}";
}
return string.Format(connStr, this._client.Server, this._client.Username, this._client.Password, this._client.Database);
}
}
private void BindGridView() {
DataGridViewLinkColumn dgvColName = new DataGridViewLinkColumn();
dgvColName.Name = "dgvColName";
dgvColName.DefaultCellStyle.SelectionBackColor = System.Drawing.Color.White;
dgvColName.DataPropertyName = "FullName";
dgvColName.HeaderText = "Name";
dgvColName.DisplayIndex = 1;
dgvColName.Width = 206;
DataGridViewCheckBoxColumn dgvColIsOutput = new DataGridViewCheckBoxColumn();
dgvColIsOutput.Name = "dgvColIsOutput";
dgvColIsOutput.DefaultCellStyle.SelectionBackColor = System.Drawing.Color.White;
dgvColIsOutput.DefaultCellStyle.Alignment = DataGridViewContentAlignment.MiddleCenter;
dgvColIsOutput.DataPropertyName = "IsOutput";
dgvColIsOutput.HeaderText = "Ins Sel";
dgvColIsOutput.DisplayIndex = 2;
dgvColIsOutput.Width = 60;
this.dgvGridview.AutoGenerateColumns = false;
this.dgvGridview.DataSource = null;
this.dgvGridview.Columns.Clear();
this.dgvGridview.Columns.AddRange(new DataGridViewColumn[]{
dgvColName,
dgvColIsOutput
});
dgvGridview.DataSource = _tables;
txtProject_TextChanged(this, EventArgs.Empty);
}
private void FrmMain_Load(object sender, EventArgs e) {
Uri uri = new Uri("tcp://" + Settings.Default.server + "/");
this._socket = new ClientSocket();
this._socket.Error += Socket_OnError;
this._socket.Receive += Socket_OnReceive;
this._socket.Connect(uri.Host, uri.Port);
this.Closed += delegate(object sender2, EventArgs e2) {
this._socket.Dispose();
};
}
private void Socket_OnError(object sender, ClientSocketErrorEventArgs e) {
//Lib.Msgbox(e.Exception.Message + "\r\n\r\n" + e.Exception.StackTrace, MessageBoxIcon.Error);
Lib.Msgbox(e.Exception.Message, MessageBoxIcon.Error);
}
private void Socket_OnReceive(object sender, ClientSocketReceiveEventArgs e) {
SocketMessager messager = null;
switch (e.Messager.Action) {
case "ExecuteDataSet":
string sql = e.Messager.Arg.ToString();
DataSet ds = null;
try {
ds = ExecuteDataSet(sql);
} catch(Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, ds);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
case "ExecuteNonQuery":
string sql2 = e.Messager.Arg.ToString();
int val = 0;
try {
val = ExecuteNonQuery(sql2);
} catch (Exception ex) {
this.Socket_OnError(this, new ClientSocketErrorEventArgs(ex, 0));
}
messager = new SocketMessager(e.Messager.Action, val);
messager.Id = e.Messager.Id;
this._socket.Write(messager);
break;
default:
Lib.Msgbox("ǰʹõİ汾δʵֹܣ");
break;
}
}
public int ExecuteNonQuery(string cmdText) {
int val = 0;
using (SqlConnection conn = new SqlConnection(this.ConnectionString)) {
SqlCommand cmd = new SqlCommand(cmdText, conn);
try {
if (cmd.Connection.State != ConnectionState.Open) cmd.Connection.Open();
val = cmd.ExecuteNonQuery();
} catch {
cmd.Parameters.Clear();
cmd.Connection.Close();
throw;
}
cmd.Connection.Close();
cmd.Parameters.Clear();
}
return val;
}
public DataSet ExecuteDataSet(string cmdText) {
DataSet ds = new DataSet();
using (SqlConnection conn = new SqlConnection(this.ConnectionString)) {
SqlCommand cmd = new SqlCommand(cmdText, conn);
SqlDataAdapter sda = new SqlDataAdapter(cmd);
try {
if (cmd.Connection.State != ConnectionState.Open) cmd.Connection.Open();
sda.Fill(ds);
} catch {
cmd.Parameters.Clear();
cmd.Connection.Close();
throw;
}
cmd.Connection.Close();
cmd.Parameters.Clear();
}
return ds;
}
private void btnConnect_Click(object sender, EventArgs e) {
this.btnConnect.Enabled = false;
if (this.btnConnect.Text == "Connect") {
this._client = new ClientInfo(this.txtServer.Text, this.txtUsername.Text, this.txtPassword.Text);
List<DatabaseInfo> dbs = null;
SocketMessager messager = new SocketMessager("GetDatabases", this._client);
this._socket.Write(messager, delegate(object sender2, ClientSocketReceiveEventArgs e2) {
dbs = e2.Messager.Arg as List<DatabaseInfo>;
});
if (dbs == null) {
this.btnConnect.Enabled = true;
return;
}
this.cmbDatabase.DisplayMember = "Name";
this.cmbDatabase.DataSource = dbs;
if (this.cmbDatabase.Items.Count > 0) {
this.cmbDatabase.SelectedIndex = 0;
this.cmbDatabase.Enabled = true;
}
this.txtServer.Enabled = false;
this.chkIntegrated.Enabled = false;
this.chkIntegrated_CheckedChanged(sender, e);
} else {
this.txtSolution.Clear();
this.cmbDatabase.DataSource = null;
this.cmbDatabase.Enabled = false;
this.btnBuild.Enabled = false;
this.txtServer.Enabled = true;
this.chkIntegrated.Enabled = true;
this.chkIntegrated_CheckedChanged(sender, e);
this.dgvGridview.DataSource = null;
}
this.btnConnect.Text = this.btnConnect.Text == "Connect" ? "DisConnect" : "Connect";
this.btnConnect.Enabled = true;
}
private void cmbDatabase_SelectedIndexChanged(object sender, EventArgs e) {
if (this.btnConnect.Text == "DisConnect" && this.btnConnect.Enabled == false) return;
this._client.Database = this.cmbDatabase.Text;
List<TableInfo> tables = null;
SocketMessager messager = new SocketMessager("GetTablesByDatabase", this._client.Database);
this._socket.Write(messager, delegate(object sender2, ClientSocketReceiveEventArgs e2) {
tables = e2.Messager.Arg as List<TableInfo>;
});
this._tables = tables;
this.BindGridView();
}
private void btnBuild_Click(object sender, EventArgs e) {
if (this._tables.Find(delegate(TableInfo table) {
return table.IsOutput;
}) == null) {
DataGridViewCellMouseEventArgs e2 = new DataGridViewCellMouseEventArgs(1, -1, 1, 1, new MouseEventArgs(MouseButtons.Left, 1, 1, 1, 1));
this.dgvGridview_ColumnHeaderMouseClick(this, e2);
}
FolderBrowserDialog fbd = new FolderBrowserDialog();
if (fbd.ShowDialog() != DialogResult.OK) return;
string selectedPath = fbd.SelectedPath;
List<BuildInfo> bs = null;
SocketMessager messager = new SocketMessager("Build", new object[] {
this.txtSolution.Text,
this.chkSolution.Checked,
string.Join("", this._tables.ConvertAll<string>(delegate(TableInfo table){
return string.Concat(table.IsOutput ? 1 : 0);
}).ToArray()),
this.chkWebAdmin.Checked,
this.chkDownloadRes.Checked
});
this._socket.Write(messager, delegate(object sender2, ClientSocketReceiveEventArgs e2) {
bs = e2.Messager.Arg as List<BuildInfo>;
if (e2.Messager.Arg is Exception) throw e2.Messager.Arg as Exception;
}, TimeSpan.FromSeconds(60 * 5));
if (bs == null) return;
foreach (BuildInfo b in bs) {
string path = Path.Combine(selectedPath, b.Path);
Directory.CreateDirectory(Path.GetDirectoryName(path));
string fileName = Path.GetFileName(b.Path);
string ext = Path.GetExtension(b.Path);
Encoding encode = Encoding.UTF8;
if (fileName.EndsWith(".rar") || fileName.EndsWith(".zip") || fileName.EndsWith(".dll")) {
using (FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write)) {
fs.Write(b.Data, 0, b.Data.Length);
fs.Close();
}
continue;
}
byte[] data = Deflate.Decompress(b.Data);
string content = Encoding.UTF8.GetString(data);
if (string.Compare(fileName, "web.config") == 0) {
string place = System.Web.HttpUtility.HtmlEncode(this.ConnectionString);
content = content.Replace("{connectionString}", place);
}
//if (string.Compare(fileName, "procedure.sql") == 0) {
// this.ExecuteNonQuery(content);
//}
if (string.Compare(ext, ".refresh") == 0) {
encode = Encoding.Unicode;
}
using (StreamWriter sw = new StreamWriter(path, false, encode)) {
sw.Write(content);
sw.Close();
}
}
GC.Collect();
Lib.Msgbox("The code files be maked in \"" + selectedPath + "\", please check.");
//System.Diagnostics.Process.Start("iexplore.exe", "http://www.penzz.com/");
}
private void txtProject_TextChanged(object sender, EventArgs e) {
this.btnBuild.Enabled = this._tables != null && this._tables.Count > 0 && this.txtSolution.Text != string.Empty;
}
private void dgvGridview_ColumnHeaderMouseClick(object sender, DataGridViewCellMouseEventArgs e) {
if (e.Button == MouseButtons.Left && e.ColumnIndex == 1 && this._tables != null) {
foreach (TableInfo table in _tables) table.IsOutput = !table.IsOutput &&
(table.Columns.Count > 0 && table.FullName != "dbo.sysdiagrams" || table.Type == "P");
this.BindGridView();
}
}
private void dgvGridview_CellContentClick(object sender, DataGridViewCellEventArgs e) {
DataGridView dgv = sender as DataGridView;
if (dgv != null) {
bool isFrmMain = dgv.FindForm() is FrmMain;
if (e.RowIndex >= 0) {
DataGridViewColumn column = dgv.Columns[e.ColumnIndex];
DataGridViewRow row = dgv.Rows[e.RowIndex];
if (isFrmMain && column.Name == "dgvColName" || column.Name == "dgvColView") {
string pdgvColName = string.Concat(row.Cells["dgvColName"].Value);
string dgvColValue = string.Concat(column.Name == "dgvColView" ? row.Cells["dgvColView"].Value : null);
string viewTable = isFrmMain ? pdgvColName : row.Cells["dgvColView"].Tag.ToString();
string name = isFrmMain ? pdgvColName : dgv.Tag.ToString();
if (dgvColValue == "FK-x") {
Lib.Msgbox("ⲿĿֱɺ鿴룡");
return;
}
TableInfo table = _tables.Find(delegate(TableInfo table1) {
return viewTable == table1.FullName;
});
if (table == null) return;
FrmView frmView = new FrmView();
frmView.Text = isFrmMain ? (name + " - view") :
(name + "." + pdgvColName + " - " + table.FullName + " - relation view");
frmView.dgvGridview.Tag = viewTable;
foreach (ColumnInfo c1 in table.Columns) {
string viewText = null;
object image = c1.IsPrimaryKey ? this.imageList1.Images["PrimaryKey.ico"] : null;
table.ForeignKeys.FindAll(delegate(ForeignKeyInfo fk) {
ColumnInfo c2 = fk.Columns.Find(delegate(ColumnInfo c3) {
return c3.Name == c1.Name;
});
if (c2 != null) {
if (fk.ReferencedTable != null) {
viewTable = fk.ReferencedTable.FullName;
viewText = "View";
} else {
viewTable = fk.ReferencedTableName;
viewText = "FK-x";
}
if (image == null) image = imageList1.Images["Key.ico"];
}
return c2 != null;
});
frmView.dgvGridview.Rows.Add(new object[] { image, c1.Name, c1.SqlType, c1.IsNullable, viewText });
if (viewText != null) frmView.dgvGridview.Rows[frmView.dgvGridview.Rows.Count - 1].Cells["dgvColView"].Tag = viewTable;
}
frmView.dgvGridview.CellContentClick += dgvGridview_CellContentClick;
frmView.ShowDialog();
frmView.Dispose();
}
}
}
}
private void dgvGridview_CellValueChanged(object sender, DataGridViewCellEventArgs e) {
if (e.RowIndex >= 0) {
DataGridViewColumn column = ((DataGridView)sender).Columns[e.ColumnIndex];
DataGridViewRow row = ((DataGridView)sender).Rows[e.RowIndex];
if (column.Name == "dgvColIsOutput") {
txtProject_TextChanged(sender, e);
}
}
}
private void FrmMain_FormClosing(object sender, FormClosingEventArgs e) {
Properties.Settings.Default.Save();
}
private void chkIntegrated_CheckedChanged(object sender, EventArgs e) {
this.txtUsername.Enabled = this.txtServer.Enabled && !this.chkIntegrated.Checked;
this.txtPassword.Enabled = this.txtServer.Enabled && !this.chkIntegrated.Checked;
}
private void dgvGridview_DataError(object sender, DataGridViewDataErrorEventArgs e) {
e.Cancel = true;
}
private void dgvGridview_CellFormatting(object sender, DataGridViewCellFormattingEventArgs e) {
if (e.ColumnIndex == 0 && this._tables != null) {
switch (this._tables[e.RowIndex].Type) {
case "P":
e.CellStyle.BackColor = ColorTranslator.FromHtml("#CDEDFC");
break;
}
}
}
}
}
|
2881099/dotnetGen_postgresql | 14,089 | Server/Properties/Resources.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace Server.Properties {
using System;
/// <summary>
/// 一个强类型的资源类,用于查找本地化的字符串等。
/// </summary>
// 此类是由 StronglyTypedResourceBuilder
// 类通过类似于 ResGen 或 Visual Studio 的工具自动生成的。
// 若要添加或移除成员,请编辑 .ResX 文件,然后重新运行 ResGen
// (以 /str 作为命令选项),或重新生成 VS 项目。
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "15.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// 返回此类使用的缓存的 ResourceManager 实例。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Server.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// 使用此强类型资源类,为所有资源查找
/// 重写当前线程的 CurrentUICulture 属性。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
/// <summary>
/// 查找类似
///rem dotnet restore
///
///rem cd src/Module/Admin && dotnet build && cd ../../../
///rem cd src/Module/Order && dotnet build && cd ../../../
///rem cd src/Module/Search && dotnet build && cd ../../../
///
///dotnet build
///
///rem cd src/WebHost && npm install && npm install --global gulp-cli && gulp copy-module
///cd src/WebHost && gulp copy-module && cd ../../
///
///echo "Then type 'dotnet run' in src/WebHost to start the app."
///
///pause 的本地化字符串。
/// </summary>
internal static string _build_bat {
get {
return ResourceManager.GetString("_build_bat", resourceCulture);
}
}
/// <summary>
/// 查找类似 ###############################################################################
///# Set default behavior to automatically normalize line endings.
///###############################################################################
///* text=auto
///
///###############################################################################
///# Set default behavior for command prompt diff.
///#
///# This is need for earlier builds of msysgit that does not have it on by
///# default for csharp files.
///# Note: This is only used by comma [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string _gitattributes {
get {
return ResourceManager.GetString("_gitattributes", resourceCulture);
}
}
/// <summary>
/// 查找类似 ## Ignore Visual Studio temporary files, build results, and
///## files generated by popular Visual Studio add-ons.
///
///# User-specific files
///*.suo
///*.user
///*.userosscache
///*.sln.docstates
///
///# User-specific files (MonoDevelop/Xamarin Studio)
///*.userprefs
///
///# Build results
///[Dd]ebug/
///[Dd]ebugPublic/
///[Rr]elease/
///[Rr]eleases/
///[Xx]64/
///[Xx]86/
///[Bb]uild/
///bld/
///[Bb]in/
///[Oo]bj/
///
///# Visual Studio 2015 cache/options directory
///.vs/
///# Uncomment if you have tasks that create the project's static files in wwwr [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string _gitignore {
get {
return ResourceManager.GetString("_gitignore", resourceCulture);
}
}
/// <summary>
/// 查找 System.Byte[] 类型的本地化资源。
/// </summary>
internal static byte[] htm_zip {
get {
object obj = ResourceManager.GetObject("htm_zip", resourceCulture);
return ((byte[])(obj));
}
}
/// <summary>
/// 查找类似 using Microsoft.AspNetCore.Cors;
///using Microsoft.AspNetCore.Http;
///using Microsoft.AspNetCore.Mvc;
///using Microsoft.AspNetCore.Mvc.Filters;
///using Microsoft.Extensions.Logging;
///using Newtonsoft.Json;
///using System;
///using System.Collections;
///using System.Linq;
///using System.Threading.Tasks;
///
///[ServiceFilter(typeof(CustomExceptionFilter)), EnableCors("cors_all")]
///public partial class BaseController : Controller {
/// public ILogger _logger;
/// public ISession Session { get { return HttpContext.Session; } } [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string Infrastructure_Controllers_BaseController_cs {
get {
return ResourceManager.GetString("Infrastructure_Controllers_BaseController_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using Microsoft.AspNetCore.Hosting;
///using Microsoft.AspNetCore.Http;
///using Microsoft.AspNetCore.Mvc;
///using Microsoft.AspNetCore.Mvc.Filters;
///using Microsoft.Extensions.Configuration;
///using Microsoft.Extensions.Logging;
///using System;
///using System.Collections.Generic;
///using System.Security.Cryptography;
///using System.Text;
///using System.Threading.Tasks;
///
///public class CustomExceptionFilter : Attribute, IExceptionFilter {
/// private ILogger _logger = null;
/// private IConfiguration _cfg = null;
/// privat [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string Infrastructure_Controllers_CustomExceptionFilter_cs {
get {
return ResourceManager.GetString("Infrastructure_Controllers_CustomExceptionFilter_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using Newtonsoft.Json;
///using System;
///using System.Text.RegularExpressions;
///
///public static class GlobalExtensions {
/// public static object Json(this Microsoft.AspNetCore.Mvc.Rendering.IHtmlHelper html, object obj) {
/// string str = JsonConvert.SerializeObject(obj);
/// if (!string.IsNullOrEmpty(str)) str = Regex.Replace(str, @"<(/?script[\s>])", "<\"+\"$1", RegexOptions.IgnoreCase);
/// if (html == null) return str;
/// return html.Raw(str);
/// }
///
/// /// <summary>
/// /// 转格林时间,并以ISO8601格式化字符串
/// /// </summary> [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string Infrastructure_Extensions_GlobalExtensions_cs {
get {
return ResourceManager.GetString("Infrastructure_Extensions_GlobalExtensions_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using Microsoft.AspNetCore.Builder;
///using Microsoft.Extensions.DependencyInjection;
///
///public interface IModuleInitializer {
/// void Init(IApplicationBuilder services);
///} 的本地化字符串。
/// </summary>
internal static string Infrastructure_ModuleBasic_IModuleInitializer_cs {
get {
return ResourceManager.GetString("Infrastructure_ModuleBasic_IModuleInitializer_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using System.Linq;
///using System.Reflection;
///
///public class ModuleInfo {
/// public string Name { get; set; }
///
/// public Assembly Assembly { get; set; }
///
/// public string ShortName {
/// get {
/// return Name.Split('.').Last();
/// }
/// }
///
/// public string Path { get; set; }
///} 的本地化字符串。
/// </summary>
internal static string Infrastructure_ModuleBasic_ModuleInfo_cs {
get {
return ResourceManager.GetString("Infrastructure_ModuleBasic_ModuleInfo_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 using Microsoft.AspNetCore.Mvc.Razor;
///using System.Collections.Generic;
///using System.Linq;
///
///public class ModuleViewLocationExpander : IViewLocationExpander {
/// private const string _moduleKey = "module";
///
/// public IEnumerable<string> ExpandViewLocations(ViewLocationExpanderContext context, IEnumerable<string> viewLocations) {
/// if (context.Values.ContainsKey(_moduleKey)) {
/// var module = context.Values[_moduleKey];
/// if (!string.IsNullOrWhiteSpace(module)) {
/// var moduleViewLocations = new stri [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string Infrastructure_ModuleBasic_ModuleViewLocationExpander_cs {
get {
return ResourceManager.GetString("Infrastructure_ModuleBasic_ModuleViewLocationExpander_cs", resourceCulture);
}
}
/// <summary>
/// 查找类似 [Mm]odule/
///wwwroot/[Mm]odule/ 的本地化字符串。
/// </summary>
internal static string WebHost_gitignore {
get {
return ResourceManager.GetString("WebHost_gitignore", resourceCulture);
}
}
/// <summary>
/// 查找类似 "use strict";
///
///var gulp = require('gulp'),
/// clean = require('gulp-clean'),
/// glob = require("glob");
///
///var paths = {
/// devModule: "../Module/",
/// hostModule: "./Module/",
/// hostWwwrootModules: "./wwwroot/module/"
///};
///
///var modules = loadModules();
///
///gulp.task('clean-module', function () {
/// return gulp.src([paths.hostModule + '*', paths.hostWwwrootModules + '*'], { read: false })
/// .pipe(clean());
///});
///
///gulp.task('copy-module', ['clean-module'], function () {
/// modules.forEach(f [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string WebHost_gulpfile_js {
get {
return ResourceManager.GetString("WebHost_gulpfile_js", resourceCulture);
}
}
/// <summary>
/// 查找类似 {
/// "version": "1.0.0",
/// "name": "aaa",
/// "private": true,
/// "devDependencies": {
/// "gulp": "3.9.1",
/// "gulp-clean": "0.3.2",
/// "glob": "7.1.1"
/// }
///} 的本地化字符串。
/// </summary>
internal static string WebHost_package_json {
get {
return ResourceManager.GetString("WebHost_package_json", resourceCulture);
}
}
/// <summary>
/// 查找类似 <?xml version="1.0" encoding="utf-8"?>
///<configuration>
///
/// <!--
/// Configure your application settings in appsettings.json. Learn more at http://go.microsoft.com/fwlink/?LinkId=786380
/// -->
///
/// <system.webServer>
/// <handlers>
/// <add name="aspNetCore" path="*" verb="*" modules="AspNetCoreModule" resourceType="Unspecified"/>
/// </handlers>
/// <aspNetCore processPath="%LAUNCHER_PATH%" arguments="%LAUNCHER_ARGS%" stdoutLogEnabled="false" stdoutLogFile=".\logs\stdout" forwardWindowsAuthToken="f [字符串的其余部分被截断]"; 的本地化字符串。
/// </summary>
internal static string WebHost_web_config {
get {
return ResourceManager.GetString("WebHost_web_config", resourceCulture);
}
}
}
}
|
27182812/ChatGLM-LLaMA-chinese-insturct | 34,053 | src/transformers/models/mctct/modeling_mctct.py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch M-CTC-T model."""
import math
import random
from typing import Optional, Tuple, Union
import torch
import torch.utils.checkpoint
from torch import nn
from ...activations import ACT2FN
from ...deepspeed import is_deepspeed_zero3_enabled
from ...file_utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward
from ...modeling_outputs import BaseModelOutput, CausalLMOutput
from ...modeling_utils import (
PreTrainedModel,
apply_chunking_to_forward,
find_pruneable_heads_and_indices,
prune_linear_layer,
)
from ...pytorch_utils import is_torch_less_than_1_9
from ...utils import logging
from .configuration_mctct import MCTCTConfig
logger = logging.get_logger(__name__)
if is_torch_less_than_1_9:
logger.warning(
f"You are using torch=={torch.__version__}, but torch>=1.9.0 is required to use MCTCTModel. Please upgrade"
" torch."
)
_HIDDEN_STATES_START_POSITION = 1
_CONFIG_FOR_DOC = "MCTCTConfig"
# Base docstring
_CHECKPOINT_FOR_DOC = "speechbrain/m-ctc-t-large"
_EXPECTED_OUTPUT_SHAPE = [1, 195, 1536]
# CTC docstring
_CTC_EXPECTED_OUTPUT = '"Mr. Quilter is the apostle of the middle classes, and we\'re glad to welcome his gospel."'
_CTC_EXPECTED_LOSS = 1885.65
MCTCT_PRETRAINED_MODEL_ARCHIVE_LIST = [
"speechbrain/m-ctc-t-large",
# See all M-CTC-T models at https://huggingface.co/models?filter=mctct
]
# Copied from transformers.models.bart.modeling_bart._expand_mask
def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None):
"""
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
"""
bsz, src_len = mask.size()
tgt_len = tgt_len if tgt_len is not None else src_len
expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype)
inverted_mask = 1.0 - expanded_mask
return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min)
class MCTCTConv1dSubsampler(nn.Module):
"""
Convolutional subsampler: a stack of 1D convolution (along temporal dimension) followed by non-linear activation
via gated linear units (https://arxiv.org/abs/1911.08460)
"""
def __init__(self, config):
super().__init__()
self.config = config
self.glu_dim = config.conv_glu_dim
self.dropout = nn.Dropout(config.conv_dropout)
self.num_layers = config.num_conv_layers
self.in_channels = config.input_feat_per_channel * config.input_channels
if self.num_layers > 1:
if config.conv_channels is None:
raise ValueError(
"Need to specify `conv_channels` configuration in `MCTCTConfig` to use multiple convolution"
" layers."
)
self.mid_channels = config.conv_channels
else:
self.mid_channels = None
self.out_channels = config.hidden_size * 2 # considering GLU halving
self.kernel_size = config.conv_kernel
self.stride = config.conv_stride
# NOTE: MCTCT by construction only uses one convolution kernel. I've made this flexible to allow for
# multiple layers of convolutions, but not sure if this model definition should just restrict it
# to one layer. This becomes especially relevant when considering the padding like line 1 of forward().
self.conv_layers = nn.ModuleList(
nn.Conv1d(
self.in_channels if i == 0 else self.mid_channels[i],
self.mid_channels[i] if i < self.num_layers - 1 else self.out_channels,
kernel_size=k,
stride=self.stride[i],
padding="valid",
)
for i, k in enumerate(self.kernel_size)
)
def forward(self, input_features):
# NOTE: in reference to the NOTE in __init__, right now it just calculates padding as if
# there will be just one conv layer.
padding = sum([size // 2 for size in self.kernel_size]) # (7, 7) -> (3, 3)
input_features = torch.nn.functional.pad(input_features, (0, 0, padding, padding), "constant", 0)
hidden_states = input_features.transpose(1, 2).contiguous() # -> Batch x Frame x Time
for conv in self.conv_layers:
hidden_states = conv(hidden_states)
hidden_states = nn.functional.glu(hidden_states, dim=self.glu_dim)
hidden_states = self.dropout(hidden_states)
hidden_states = hidden_states.transpose(1, 2).contiguous() # -> Batch x Time x Frame
return hidden_states
class MCTCTEmbeddings(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings."""
def __init__(self, config):
super().__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
# self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
# any TensorFlow checkpoint file
# self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.LayerNorm = MCTCTLayerNorm()
self.dropout = nn.Dropout(config.hidden_dropout_prob)
# position_ids (1, len position emb) is contiguous in memory and exported when serialized
self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1)))
self.register_buffer(
"token_type_ids",
torch.zeros(self.position_ids.size(), dtype=torch.long, device=self.position_ids.device),
persistent=False,
)
def forward(
self, input_features=None, token_type_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0
):
input_shape = input_features.size() if input_features is not None else inputs_embeds.size()[:-1]
seq_length = input_shape[1]
if position_ids is None:
position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length]
# Setting the token_type_ids to the registered buffer in constructor where it is all zeros, which usually occurs
# when its auto-generated, registered buffer helps users when tracing the model without passing token_type_ids, solves
# issue #5664
if token_type_ids is None:
if hasattr(self, "token_type_ids"):
buffered_token_type_ids = self.token_type_ids[:, :seq_length]
buffered_token_type_ids_expanded = buffered_token_type_ids.expand(input_shape[0], seq_length)
token_type_ids = buffered_token_type_ids_expanded
else:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device)
if inputs_embeds is None:
inputs_embeds = self.word_embeddings(input_features)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings = inputs_embeds + token_type_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class MCTCTSelfAttention(nn.Module):
def __init__(self, config):
super().__init__()
if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"):
raise ValueError(
f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention "
f"heads ({config.num_attention_heads})"
)
self.num_attention_heads = config.num_attention_heads
self.attention_head_size = config.attention_head_dim
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size, bias=False)
self.key = nn.Linear(config.hidden_size, self.all_head_size, bias=False)
self.value = nn.Linear(config.hidden_size, self.all_head_size, bias=False)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
self.max_position_embeddings = config.max_position_embeddings
self.distance_embedding = nn.Embedding(2 * config.max_position_embeddings - 1, self.attention_head_size)
self.is_decoder = config.is_decoder
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def reshape_fortran(self, x, shape):
if len(x.shape) > 0:
x = x.permute(*reversed(range(len(x.shape))))
return x.reshape(*reversed(shape)).permute(*reversed(range(len(shape))))
def relative_position_embedding_rotate(self, scores):
# NOTE: should re-evaluate whether this re-implementation was truly necessary
# or the reason why my complete re-haul worked was due to some other part
# of the code. Adding this and the reshape fortrain code seems very undesirable.
scores = scores.permute(0, 2, 3, 1) # e.g. [10, 1839, 14, 4]
batch, hidden_state, seq_len, heads = scores.shape
# e.g. [10, 1853, 14, 4]
scores = torch.cat((scores, torch.zeros((batch, seq_len, seq_len, heads), device=scores.device)), dim=1)
# e.g. [10, 25942, 1, 4]
scores = self.reshape_fortran(scores, [batch, (hidden_state + seq_len) * seq_len, 1, heads])
# e.g. [10, 25928, 1, 4]
scores = scores[:, : (seq_len + hidden_state - 1) * seq_len]
# e.g. [10, 1852, 14, 4]
scores = self.reshape_fortran(scores, [batch, hidden_state + seq_len - 1, seq_len, heads])
halfpoint = hidden_state // 2
scores = scores[:, halfpoint : halfpoint + seq_len].transpose(1, 2) # e.g. [10, 14, 14, 4]
return scores.permute(0, 3, 1, 2)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
output_attentions=False,
):
mixed_query_layer = self.query(hidden_states)
mixed_query_layer = mixed_query_layer / math.sqrt(self.attention_head_size)
key_layer = self.transpose_for_scores(self.key(hidden_states))
value_layer = self.transpose_for_scores(self.value(hidden_states))
query_layer = self.transpose_for_scores(mixed_query_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
# relative key position embeddings
positional_embedding = self.distance_embedding.weight
relative_position_scores = torch.einsum("lh, bche -> bcle", positional_embedding, query_layer.transpose(2, 3))
relative_position_scores = self.relative_position_embedding_rotate(relative_position_scores)
attention_scores = attention_scores + relative_position_scores
if attention_mask is not None:
# Apply the attention mask is (precomputed for all layers in MCTCTModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.functional.softmax(attention_scores, dim=-1)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
# Mask heads if we want to
if head_mask is not None:
attention_probs = attention_probs * head_mask
context_layer = torch.matmul(attention_probs, value_layer)
context_layer = context_layer.permute(0, 2, 1, 3).flatten(start_dim=-2)
outputs = (context_layer, attention_probs) if output_attentions else (context_layer,)
return outputs
class MCTCTLayerNorm(nn.Module):
def __init__(self):
super().__init__()
self.singleton_weight = nn.Parameter(torch.ones(1))
self.singleton_bias = nn.Parameter(torch.zeros(1))
def forward(self, hidden_states):
return (hidden_states * self.singleton_weight) + self.singleton_bias
class MCTCTSelfOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.config = config
self.dense = nn.Linear(config.hidden_size, config.hidden_size, bias=False)
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class MCTCTAttention(nn.Module):
def __init__(self, config):
super().__init__()
self.self = MCTCTSelfAttention(config)
self.output = MCTCTSelfOutput(config)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
heads, index = find_pruneable_heads_and_indices(
heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads
)
# Prune linear layers
self.self.query = prune_linear_layer(self.self.query, index)
self.self.key = prune_linear_layer(self.self.key, index)
self.self.value = prune_linear_layer(self.self.value, index)
self.output.dense = prune_linear_layer(self.output.dense, index, dim=1)
# Update hyper params and store pruned heads
self.self.num_attention_heads = self.self.num_attention_heads - len(heads)
self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads
self.pruned_heads = self.pruned_heads.union(heads)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
output_attentions=False,
):
self_outputs = self.self(
hidden_states,
attention_mask,
head_mask,
output_attentions,
)
attention_output = self.output(self_outputs[0], hidden_states)
outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
return outputs
class MCTCTIntermediate(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size, bias=False)
if isinstance(config.hidden_act, str):
self.intermediate_act_fn = ACT2FN[config.hidden_act]
else:
self.intermediate_act_fn = config.hidden_act
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
class MCTCTOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size, bias=False)
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class MCTCTLayer(nn.Module):
def __init__(self, config: MCTCTConfig):
super().__init__()
self.seq_len_dim = 1
self.chunk_size_feed_forward = config.chunk_size_feed_forward
self.intermediate = MCTCTIntermediate(config)
self.attention = MCTCTAttention(config)
self.is_decoder = config.is_decoder
self.output = MCTCTOutput(config)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
output_attentions=False,
):
self_attention_outputs = self.attention(
hidden_states, attention_mask, head_mask, output_attentions=output_attentions
)
attention_output = self_attention_outputs[0]
outputs = self_attention_outputs[1:] # add self attentions if we output attention weights
layer_output = apply_chunking_to_forward(
self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output
)
outputs = (layer_output,) + outputs
return outputs
def feed_forward_chunk(self, attention_output):
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
return layer_output
class MCTCTPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = MCTCTConfig
base_model_prefix = "mctct"
main_input_name = "input_features"
_keys_to_ignore_on_load_missing = ["position_ids"]
supports_gradient_checkpointing = True
def _init_weights(self, module):
"""Initialize the weights"""
std = self.config.initializer_range
if isinstance(module, nn.Linear):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=std)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=std)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
elif isinstance(module, MCTCTLayerNorm):
module.singleton_weight.data.fill_(1.0)
module.singleton_bias.data.zero_()
if isinstance(module, (nn.Linear, nn.Conv1d)):
module.weight.data.normal_(mean=0.0, std=std)
if module.bias is not None:
module.bias.data.zero_()
def _get_feat_extract_output_lengths(self, input_lengths: torch.LongTensor):
"""
Computes the output length of the convolutional layers
"""
dilation = 1
for _, kernel_sz, stride in zip(
range(self.config.num_conv_layers), self.config.conv_kernel, self.config.conv_stride
):
padding = kernel_sz // 2
input_lengths = input_lengths + 2 * padding - dilation * (kernel_sz - 1) - 1
input_lengths = torch.div(input_lengths, stride, rounding_mode="trunc") + 1
return input_lengths
def _get_feature_vector_attention_mask(self, feature_vector_length, attention_mask):
# generate creates 3D attention mask, because of the shape of input_features
# convert it to 2D if thats the case
if len(attention_mask.shape) > 2:
attention_mask = attention_mask[:, :, -1]
# subsampled_lengths = attention_mask.sum(-1)
subsampled_lengths = self._get_feat_extract_output_lengths(attention_mask.sum(-1))
bsz = attention_mask.size()[0]
attention_mask = torch.zeros(
(bsz, feature_vector_length), dtype=attention_mask.dtype, device=attention_mask.device
)
# these two operations makes sure that all values
# before the output lengths indices are attended to
attention_mask[(torch.arange(bsz, device=attention_mask.device), subsampled_lengths - 1)] = 1
attention_mask = attention_mask.flip([-1]).cumsum(-1).flip([-1]).long()
return attention_mask
def _set_gradient_checkpointing(self, module, value=False):
if isinstance(module, (MCTCTEncoder)):
module.gradient_checkpointing = value
MCTCT_START_DOCSTRING = r"""
This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) sub-class. Use
it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and
behavior.
Parameters:
config ([`MCTCTConfig`]): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights.
"""
MCTCT_INPUTS_DOCSTRING = r"""
Args:
input_features (`torch.LongTensor` of shape `({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using [`Wav2Vec2CTCTokenizer`]. See [`PreTrainedTokenizer.encode`] and
[`PreTrainedTokenizer.__call__`] for details.
[What are input IDs?](../glossary#input-ids)
attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*):
Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.
"""
class MCTCTEncoder(MCTCTPreTrainedModel):
def __init__(self, config: MCTCTConfig):
super().__init__(config)
self.hidden_dropout_prob = config.hidden_dropout_prob
self.layer_norm = MCTCTLayerNorm()
self.conv = MCTCTConv1dSubsampler(config)
self.layers = nn.ModuleList([MCTCTLayer(config) for _ in range(config.num_hidden_layers)])
self.gradient_checkpointing = False
def forward(
self,
input_features: torch.Tensor,
attention_mask: torch.Tensor,
head_mask: torch.Tensor,
output_attentions: bool = False,
output_hidden_states: bool = False,
return_dict: bool = True,
) -> Union[Tuple, BaseModelOutput]:
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
input_features = self.layer_norm(input_features)
inputs_embeds = self.conv(input_features)
# subsample attention mask if necessary
if attention_mask is not None:
attention_mask = self._get_feature_vector_attention_mask(inputs_embeds.shape[1], attention_mask)
hidden_states = nn.functional.dropout(inputs_embeds, p=self.hidden_dropout_prob, training=self.training)
# expand attention_mask
if attention_mask is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype)
encoder_states = () if output_hidden_states else None
all_attentions = () if output_attentions else None
# check if head_mask has a correct number of layers specified if desired
if head_mask is not None:
if head_mask.size()[0] != len(self.layers):
raise ValueError(
f"The head_mask should be specified for {len(self.layers)} layers, "
f"but it is for {head_mask.size()[0]}."
)
deepspeed_zero3_is_enabled = is_deepspeed_zero3_enabled()
for idx, encoder_layer in enumerate(self.layers):
if output_hidden_states:
encoder_states = encoder_states + (hidden_states,)
# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)
dropout_probability = random.uniform(0, 1)
skip_the_layer = True if self.training and (dropout_probability < self.config.layerdrop) else False
if not skip_the_layer or deepspeed_zero3_is_enabled:
# under deepspeed zero3 all gpus must run in sync
if self.gradient_checkpointing and self.training:
def create_custom_forward(module):
def custom_forward(*inputs):
return module(*inputs, output_attentions)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(encoder_layer),
hidden_states,
attention_mask,
(head_mask[idx] if head_mask is not None else None),
)
else:
layer_outputs = encoder_layer(
hidden_states=hidden_states,
attention_mask=attention_mask,
output_attentions=output_attentions,
)
hidden_states = layer_outputs[0]
if skip_the_layer:
layer_outputs = (None, None)
if output_attentions:
all_attentions = all_attentions + (layer_outputs[1],)
if output_hidden_states:
encoder_states = encoder_states + (hidden_states,)
if not return_dict:
return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None)
return BaseModelOutput(
last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions
)
@add_start_docstrings(
"The bare M-CTC-T Model transformer outputting raw hidden-states without any specific head on top.",
MCTCT_START_DOCSTRING,
)
class MCTCTModel(MCTCTPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.config = config
self.encoder = MCTCTEncoder(config)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(MCTCT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=BaseModelOutput,
config_class=_CONFIG_FOR_DOC,
modality="audio",
expected_output=_EXPECTED_OUTPUT_SHAPE,
)
def forward(
self,
input_features: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, BaseModelOutput]:
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_features is None:
raise ValueError("You have to specify input_features.")
encoder_outputs = self.encoder(
input_features,
attention_mask=attention_mask,
head_mask=head_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoder_outputs[0]
if not return_dict:
return (sequence_output,) + encoder_outputs[1:]
return BaseModelOutput(
last_hidden_state=sequence_output,
hidden_states=encoder_outputs.hidden_states,
attentions=encoder_outputs.attentions,
)
@add_start_docstrings(
"""MCTCT Model with a `language modeling` head on top for Connectionist Temporal Classification (CTC).""",
MCTCT_START_DOCSTRING,
)
class MCTCTForCTC(MCTCTPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.mctct = MCTCTModel(config)
if config.vocab_size is None:
raise ValueError(
f"You are trying to instantiate {self.__class__} with a configuration that "
"does not define the vocabulary size of the language model head. Please "
"instantiate the model as follows: `MCTCTForCTC.from_pretrained(..., vocab_size=vocab_size)`. "
"or define `vocab_size` of your model's configuration."
)
output_hidden_size = config.hidden_size
self.ctc_head = nn.Linear(output_hidden_size, config.vocab_size)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(MCTCT_INPUTS_DOCSTRING)
@add_code_sample_docstrings(
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=CausalLMOutput,
config_class=_CONFIG_FOR_DOC,
expected_output=_CTC_EXPECTED_OUTPUT,
expected_loss=_CTC_EXPECTED_LOSS,
)
def forward(
self,
input_features: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
labels: Optional[torch.LongTensor] = None,
) -> Union[Tuple, CausalLMOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size, target_length)`, *optional*):
Labels for connectionist temporal classification. Note that `target_length` has to be smaller or equal to
the sequence length of the output logits. Indices are selected in `[-100, 0, ..., config.vocab_size - 1]`.
All labels set to `-100` are ignored (masked), the loss is only computed for labels in `[0, ...,
config.vocab_size - 1]`.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.mctct(
input_features,
attention_mask=attention_mask,
head_mask=head_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = outputs[0]
logits = self.ctc_head(hidden_states)
loss = None
if labels is not None:
if labels.max() >= self.config.vocab_size:
raise ValueError(f"Label values must be <= vocab_size: {self.config.vocab_size}")
# retrieve loss input_lengths from attention_mask
attention_mask = (
attention_mask
if attention_mask is not None
else torch.ones(input_features.shape[:-1], dtype=torch.long)
)
input_lengths = self._get_feat_extract_output_lengths(attention_mask.sum(-1)).to(torch.long)
# assuming that padded tokens are filled with -100
# when not being attended to
labels_mask = labels >= 0
target_lengths = labels_mask.sum(-1)
flattened_targets = labels.masked_select(labels_mask)
# ctc_loss doesn't support fp16
log_probs = nn.functional.log_softmax(logits, dim=-1, dtype=torch.float32).transpose(0, 1)
with torch.backends.cudnn.flags(enabled=False):
loss = nn.functional.ctc_loss(
log_probs,
flattened_targets,
input_lengths,
target_lengths,
blank=self.config.pad_token_id,
reduction=self.config.ctc_loss_reduction,
zero_infinity=self.config.ctc_zero_infinity,
)
if not return_dict:
output = (logits,) + outputs[_HIDDEN_STATES_START_POSITION:]
return ((loss,) + output) if loss is not None else output
return CausalLMOutput(
loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions
)
|
2881099/dotnetGen_postgresql | 2,201 | Server/Resources/WebHost/gulpfile.js | "use strict";
var gulp = require('gulp'),
clean = require('gulp-clean'),
glob = require("glob");
var paths = {
devModule: "../Module/",
hostModule: "./Module/",
hostWwwrootModules: "./wwwroot/module/"
};
var modules = loadModules();
gulp.task('clean-module', function () {
return gulp.src([paths.hostModule + '*', paths.hostWwwrootModules + '*'], { read: false })
.pipe(clean());
});
gulp.task('copy-module', ['clean-module'], function () {
modules.forEach(function (module) {
console.log(paths.devModule + module.fullName + '/Views/**/*.*');
gulp.src([paths.devModule + module.fullName + '/Views/**/*.*'], { base: module.fullName })
.pipe(gulp.dest(paths.hostModule + module.fullName));
gulp.src(paths.devModule + module.fullName + '/bin/Debug/netstandard2.0/**/' + module.fullName + '.*')
.pipe(gulp.dest(paths.hostModule + module.fullName));
gulp.src(paths.devModule + module.fullName + '/appsettings.json')
.pipe(gulp.dest(paths.hostModule + module.fullName));
gulp.src(paths.devModule + module.fullName + '/wwwroot/**/*.*')
.pipe(gulp.dest(paths.hostWwwrootModules + module.name));
});
});
gulp.task('copy-static', function () {
modules.forEach(function (module) {
gulp.src([paths.devModule + module.fullName + '/Views/**/*.*'], { base: module.fullName })
.pipe(gulp.dest(paths.hostModule + module.fullName));
gulp.src(paths.devModule + module.fullName + '/wwwroot/**/*.*')
.pipe(gulp.dest(paths.hostWwwrootModules + module.name));
});
});
function loadModules() {
var moduleManifestPaths,
modules = [];
moduleManifestPaths = glob.sync(paths.devModule + '*/*.csproj', {});
moduleManifestPaths.forEach(function (moduleManifestPath) {
var reg = /\/([^\/]+)\/\1\.csproj/.exec(moduleManifestPath);
var moduleManifest = {
name: reg[1],
fullName: reg[1],
version: "1.0.0"
}
//var exec = require('child_process').exec;
//var child = exec('echo hello ' + name, function (err, stdout, stderr) {
// if (err) throw err;
// console.log(stdout);
//});
modules.push(moduleManifest);
});
return modules;
} |
2881099/dotnetGen_mysql | 857 | ServerWinForm/Properties/AssemblyInfo.cs | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过下列属性集
// 控制。更改这些属性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("ServerWinForm")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("ServerWinForm")]
[assembly: AssemblyCopyright("版权所有 (C) 2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 属性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("919d7765-864f-4c8c-9d5c-c18c7bd11e38")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 内部版本号
// 修订号
//
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
2881099/dotnetGen_sqlserver | 16,551 | MakeCode/FrmMain.designer.cs | using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using Model;
namespace MakeCode {
partial class FrmMain {
/// <summary>
/// 必需的设计器变量。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 清理所有正在使用的资源。
/// </summary>
/// <param name="disposing">如果应释放托管资源,为 true;否则为 false。</param>
protected override void Dispose(bool disposing) {
if (disposing && (components != null)) {
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows 窗体设计器生成的代码
/// <summary>
/// 设计器支持所需的方法 - 不要
/// 使用代码编辑器修改此方法的内容。
/// </summary>
private void InitializeComponent() {
this.components = new System.ComponentModel.Container();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(FrmMain));
this.imageList1 = new System.Windows.Forms.ImageList(this.components);
this.labServer = new System.Windows.Forms.Label();
this.labProject = new System.Windows.Forms.Label();
this.labUsername = new System.Windows.Forms.Label();
this.labPassword = new System.Windows.Forms.Label();
this.cmbDatabase = new System.Windows.Forms.ComboBox();
this.btnBuild = new System.Windows.Forms.Button();
this.btnConnect = new System.Windows.Forms.Button();
this.dgvGridview = new System.Windows.Forms.DataGridView();
this.toolTip1 = new System.Windows.Forms.ToolTip(this.components);
this.chkDownloadRes = new System.Windows.Forms.CheckBox();
this.chkWebAdmin = new System.Windows.Forms.CheckBox();
this.chkSolution = new System.Windows.Forms.CheckBox();
this.txtServer = new System.Windows.Forms.TextBox();
this.txtSolution = new System.Windows.Forms.TextBox();
this.txtUsername = new System.Windows.Forms.TextBox();
this.txtPassword = new System.Windows.Forms.TextBox();
this.chkIntegrated = new System.Windows.Forms.CheckBox();
this.labDatabase = new System.Windows.Forms.Label();
this.panel1 = new System.Windows.Forms.Panel();
this.webBrowser1 = new System.Windows.Forms.WebBrowser();
((System.ComponentModel.ISupportInitialize)(this.dgvGridview)).BeginInit();
this.panel1.SuspendLayout();
this.SuspendLayout();
//
// imageList1
//
this.imageList1.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("imageList1.ImageStream")));
this.imageList1.TransparentColor = System.Drawing.Color.Transparent;
this.imageList1.Images.SetKeyName(0, "PrimaryKey.ico");
this.imageList1.Images.SetKeyName(1, "Key.ico");
//
// labServer
//
this.labServer.AutoSize = true;
this.labServer.Location = new System.Drawing.Point(10, 323);
this.labServer.Name = "labServer";
this.labServer.Size = new System.Drawing.Size(65, 12);
this.labServer.TabIndex = 16;
this.labServer.Text = "SQL Server";
//
// labProject
//
this.labProject.AutoSize = true;
this.labProject.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft;
this.labProject.Location = new System.Drawing.Point(302, 323);
this.labProject.Name = "labProject";
this.labProject.Size = new System.Drawing.Size(53, 12);
this.labProject.TabIndex = 27;
this.labProject.Text = "项目名称";
//
// labUsername
//
this.labUsername.AutoSize = true;
this.labUsername.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft;
this.labUsername.Location = new System.Drawing.Point(10, 350);
this.labUsername.Name = "labUsername";
this.labUsername.Size = new System.Drawing.Size(47, 12);
this.labUsername.TabIndex = 18;
this.labUsername.Text = "User Id";
//
// labPassword
//
this.labPassword.AutoSize = true;
this.labPassword.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft;
this.labPassword.Location = new System.Drawing.Point(10, 377);
this.labPassword.Name = "labPassword";
this.labPassword.Size = new System.Drawing.Size(53, 12);
this.labPassword.TabIndex = 20;
this.labPassword.Text = "Password";
//
// cmbDatabase
//
this.cmbDatabase.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.cmbDatabase.Enabled = false;
this.cmbDatabase.FormattingEnabled = true;
this.cmbDatabase.Location = new System.Drawing.Point(364, 374);
this.cmbDatabase.Margin = new System.Windows.Forms.Padding(4);
this.cmbDatabase.Name = "cmbDatabase";
this.cmbDatabase.Size = new System.Drawing.Size(117, 20);
this.cmbDatabase.TabIndex = 24;
this.toolTip1.SetToolTip(this.cmbDatabase, "请选择一个数据库");
this.cmbDatabase.SelectedIndexChanged += new System.EventHandler(this.cmbDatabase_SelectedIndexChanged);
//
// btnBuild
//
this.btnBuild.Enabled = false;
this.btnBuild.Location = new System.Drawing.Point(487, 373);
this.btnBuild.Name = "btnBuild";
this.btnBuild.Size = new System.Drawing.Size(89, 21);
this.btnBuild.TabIndex = 25;
this.btnBuild.Text = "生成";
this.toolTip1.SetToolTip(this.btnBuild, "生成");
this.btnBuild.UseVisualStyleBackColor = true;
this.btnBuild.Click += new System.EventHandler(this.btnBuild_Click);
//
// btnConnect
//
this.btnConnect.Location = new System.Drawing.Point(208, 374);
this.btnConnect.Name = "btnConnect";
this.btnConnect.Size = new System.Drawing.Size(90, 21);
this.btnConnect.TabIndex = 22;
this.btnConnect.Text = "Connect";
this.btnConnect.UseVisualStyleBackColor = true;
this.btnConnect.Click += new System.EventHandler(this.btnConnect_Click);
//
// dgvGridview
//
this.dgvGridview.AllowUserToAddRows = false;
this.dgvGridview.AllowUserToResizeRows = false;
this.dgvGridview.BackgroundColor = System.Drawing.SystemColors.ActiveCaptionText;
this.dgvGridview.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize;
this.dgvGridview.Location = new System.Drawing.Point(12, 12);
this.dgvGridview.Name = "dgvGridview";
this.dgvGridview.RowHeadersVisible = false;
this.dgvGridview.RowHeadersWidthSizeMode = System.Windows.Forms.DataGridViewRowHeadersWidthSizeMode.DisableResizing;
this.dgvGridview.RowTemplate.Height = 23;
this.dgvGridview.Size = new System.Drawing.Size(286, 302);
this.dgvGridview.TabIndex = 26;
this.dgvGridview.CellContentClick += new System.Windows.Forms.DataGridViewCellEventHandler(this.dgvGridview_CellContentClick);
this.dgvGridview.CellFormatting += new System.Windows.Forms.DataGridViewCellFormattingEventHandler(this.dgvGridview_CellFormatting);
this.dgvGridview.CellValueChanged += new System.Windows.Forms.DataGridViewCellEventHandler(this.dgvGridview_CellValueChanged);
this.dgvGridview.ColumnHeaderMouseClick += new System.Windows.Forms.DataGridViewCellMouseEventHandler(this.dgvGridview_ColumnHeaderMouseClick);
this.dgvGridview.DataError += new System.Windows.Forms.DataGridViewDataErrorEventHandler(this.dgvGridview_DataError);
//
// chkDownloadRes
//
this.chkDownloadRes.AutoSize = true;
this.chkDownloadRes.Checked = global::MakeCode.Properties.Settings.Default.chkDownloadRes_checked;
this.chkDownloadRes.DataBindings.Add(new System.Windows.Forms.Binding("Checked", global::MakeCode.Properties.Settings.Default, "chkDownloadRes_checked", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.chkDownloadRes.Location = new System.Drawing.Point(397, 347);
this.chkDownloadRes.Name = "chkDownloadRes";
this.chkDownloadRes.RightToLeft = System.Windows.Forms.RightToLeft.Yes;
this.chkDownloadRes.Size = new System.Drawing.Size(84, 16);
this.chkDownloadRes.TabIndex = 37;
this.chkDownloadRes.Text = "下载资源包";
this.toolTip1.SetToolTip(this.chkDownloadRes, "是否下载资源包,因网速原因,可能会影响生成速度");
this.chkDownloadRes.UseVisualStyleBackColor = true;
//
// chkWebAdmin
//
this.chkWebAdmin.AutoSize = true;
this.chkWebAdmin.Checked = global::MakeCode.Properties.Settings.Default.chkWebAdmin_checked;
this.chkWebAdmin.DataBindings.Add(new System.Windows.Forms.Binding("Checked", global::MakeCode.Properties.Settings.Default, "chkWebAdmin_checked", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.chkWebAdmin.Location = new System.Drawing.Point(486, 322);
this.chkWebAdmin.Name = "chkWebAdmin";
this.chkWebAdmin.RightToLeft = System.Windows.Forms.RightToLeft.Yes;
this.chkWebAdmin.Size = new System.Drawing.Size(96, 16);
this.chkWebAdmin.TabIndex = 35;
this.chkWebAdmin.Text = "生成后台管理";
this.toolTip1.SetToolTip(this.chkWebAdmin, "是否生成 WEB 管理");
this.chkWebAdmin.UseVisualStyleBackColor = true;
//
// chkSolution
//
this.chkSolution.AutoSize = true;
this.chkSolution.Checked = global::MakeCode.Properties.Settings.Default.chkSolution_checked;
this.chkSolution.DataBindings.Add(new System.Windows.Forms.Binding("Checked", global::MakeCode.Properties.Settings.Default, "chkSolution_checked", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.chkSolution.Location = new System.Drawing.Point(486, 347);
this.chkSolution.Name = "chkSolution";
this.chkSolution.RightToLeft = System.Windows.Forms.RightToLeft.Yes;
this.chkSolution.Size = new System.Drawing.Size(96, 16);
this.chkSolution.TabIndex = 30;
this.chkSolution.Text = "生成解决方案";
this.toolTip1.SetToolTip(this.chkSolution, "是否生成解决方案(.sln)和项目文件(.csproj)");
this.chkSolution.UseVisualStyleBackColor = true;
//
// txtServer
//
this.txtServer.DataBindings.Add(new System.Windows.Forms.Binding("Text", global::MakeCode.Properties.Settings.Default, "txtServer_text", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.txtServer.Location = new System.Drawing.Point(81, 320);
this.txtServer.Name = "txtServer";
this.txtServer.Size = new System.Drawing.Size(217, 21);
this.txtServer.TabIndex = 17;
this.txtServer.Text = global::MakeCode.Properties.Settings.Default.txtServer_text;
this.toolTip1.SetToolTip(this.txtServer, "数据库地址\r\n如:101.10.131.100");
//
// txtSolution
//
this.txtSolution.DataBindings.Add(new System.Windows.Forms.Binding("Text", global::MakeCode.Properties.Settings.Default, "txtSolution_text", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.txtSolution.Location = new System.Drawing.Point(364, 320);
this.txtSolution.Name = "txtSolution";
this.txtSolution.Size = new System.Drawing.Size(117, 21);
this.txtSolution.TabIndex = 28;
this.txtSolution.Text = global::MakeCode.Properties.Settings.Default.txtSolution_text;
this.toolTip1.SetToolTip(this.txtSolution, "要生成的解决方案名(不能为空)\r\n如:Nic");
this.txtSolution.TextChanged += new System.EventHandler(this.txtProject_TextChanged);
//
// txtUsername
//
this.txtUsername.DataBindings.Add(new System.Windows.Forms.Binding("Text", global::MakeCode.Properties.Settings.Default, "txtUsername_text", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.txtUsername.Enabled = false;
this.txtUsername.Location = new System.Drawing.Point(81, 347);
this.txtUsername.Name = "txtUsername";
this.txtUsername.Size = new System.Drawing.Size(116, 21);
this.txtUsername.TabIndex = 19;
this.txtUsername.Text = global::MakeCode.Properties.Settings.Default.txtUsername_text;
this.toolTip1.SetToolTip(this.txtUsername, "数据库用户\r\n如:sa");
//
// txtPassword
//
this.txtPassword.DataBindings.Add(new System.Windows.Forms.Binding("Text", global::MakeCode.Properties.Settings.Default, "txtPassword_text", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.txtPassword.Enabled = false;
this.txtPassword.Location = new System.Drawing.Point(81, 374);
this.txtPassword.Name = "txtPassword";
this.txtPassword.PasswordChar = '*';
this.txtPassword.Size = new System.Drawing.Size(116, 21);
this.txtPassword.TabIndex = 21;
this.txtPassword.Text = global::MakeCode.Properties.Settings.Default.txtPassword_text;
this.toolTip1.SetToolTip(this.txtPassword, "密码\r\n如:123456");
//
// chkIntegrated
//
this.chkIntegrated.AutoSize = true;
this.chkIntegrated.Checked = global::MakeCode.Properties.Settings.Default.chkIntegrated_Checked;
this.chkIntegrated.CheckState = System.Windows.Forms.CheckState.Checked;
this.chkIntegrated.DataBindings.Add(new System.Windows.Forms.Binding("Checked", global::MakeCode.Properties.Settings.Default, "chkIntegrated_Checked", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.chkIntegrated.Location = new System.Drawing.Point(208, 349);
this.chkIntegrated.Name = "chkIntegrated";
this.chkIntegrated.Size = new System.Drawing.Size(84, 16);
this.chkIntegrated.TabIndex = 32;
this.chkIntegrated.Text = "Integrated";
this.toolTip1.SetToolTip(this.chkIntegrated, "Windows 用户验证");
this.chkIntegrated.UseVisualStyleBackColor = true;
this.chkIntegrated.CheckedChanged += new System.EventHandler(this.chkIntegrated_CheckedChanged);
//
// labDatabase
//
this.labDatabase.AutoSize = true;
this.labDatabase.Location = new System.Drawing.Point(302, 378);
this.labDatabase.Name = "labDatabase";
this.labDatabase.Size = new System.Drawing.Size(53, 12);
this.labDatabase.TabIndex = 23;
this.labDatabase.Text = "Database";
//
// panel1
//
this.panel1.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.panel1.Controls.Add(this.webBrowser1);
this.panel1.Location = new System.Drawing.Point(304, 12);
this.panel1.Name = "panel1";
this.panel1.Size = new System.Drawing.Size(270, 302);
this.panel1.TabIndex = 36;
//
// webBrowser1
//
this.webBrowser1.AllowWebBrowserDrop = false;
this.webBrowser1.IsWebBrowserContextMenuEnabled = false;
this.webBrowser1.Location = new System.Drawing.Point(0, 0);
this.webBrowser1.MinimumSize = new System.Drawing.Size(20, 20);
this.webBrowser1.Name = "webBrowser1";
this.webBrowser1.Size = new System.Drawing.Size(270, 302);
this.webBrowser1.TabIndex = 33;
this.webBrowser1.Url = new System.Uri("http://www.penzz.com/nicpetshop.html", System.UriKind.Absolute);
this.webBrowser1.WebBrowserShortcutsEnabled = false;
//
// FrmMain
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(586, 405);
this.Controls.Add(this.chkDownloadRes);
this.Controls.Add(this.panel1);
this.Controls.Add(this.chkWebAdmin);
this.Controls.Add(this.chkSolution);
this.Controls.Add(this.txtServer);
this.Controls.Add(this.txtSolution);
this.Controls.Add(this.txtUsername);
this.Controls.Add(this.txtPassword);
this.Controls.Add(this.labServer);
this.Controls.Add(this.labProject);
this.Controls.Add(this.labDatabase);
this.Controls.Add(this.labUsername);
this.Controls.Add(this.labPassword);
this.Controls.Add(this.cmbDatabase);
this.Controls.Add(this.btnBuild);
this.Controls.Add(this.btnConnect);
this.Controls.Add(this.dgvGridview);
this.Controls.Add(this.chkIntegrated);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle;
this.MaximizeBox = false;
this.Name = "FrmMain";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
this.Text = "代码生成器(.NET Core + SQLServer)";
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.FrmMain_FormClosing);
this.Load += new System.EventHandler(this.FrmMain_Load);
((System.ComponentModel.ISupportInitialize)(this.dgvGridview)).EndInit();
this.panel1.ResumeLayout(false);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.ImageList imageList1;
private CheckBox chkSolution;
private TextBox txtServer;
private TextBox txtSolution;
private TextBox txtUsername;
private TextBox txtPassword;
private Label labServer;
private Label labProject;
private Label labUsername;
private Label labPassword;
private ComboBox cmbDatabase;
private Button btnBuild;
private Button btnConnect;
private DataGridView dgvGridview;
private ToolTip toolTip1;
private CheckBox chkIntegrated;
private Label labDatabase;
private Panel panel1;
private WebBrowser webBrowser1;
private CheckBox chkDownloadRes;
private CheckBox chkWebAdmin;
}
} |
2881099/dotnetGen_sqlserver | 1,391 | MakeCode/Settings.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace MakeCode {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "15.7.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("127.0.0.1:29918")]
public string server {
get {
return ((string)(this["server"]));
}
set {
this["server"] = value;
}
}
}
}
|
27182812/ChatGLM-LLaMA-chinese-insturct | 2,242 | src/transformers/models/mctct/__init__.py | # Copyright 2022 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_speech_available, is_torch_available
_import_structure = {
"configuration_mctct": ["MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MCTCTConfig"],
"processing_mctct": ["MCTCTProcessor"],
}
try:
if not is_speech_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["feature_extraction_mctct"] = ["MCTCTFeatureExtractor"]
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_mctct"] = [
"MCTCT_PRETRAINED_MODEL_ARCHIVE_LIST",
"MCTCTForCTC",
"MCTCTModel",
"MCTCTPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_mctct import MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP, MCTCTConfig
from .processing_mctct import MCTCTProcessor
try:
if not is_speech_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .feature_extraction_mctct import MCTCTFeatureExtractor
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_mctct import MCTCT_PRETRAINED_MODEL_ARCHIVE_LIST, MCTCTForCTC, MCTCTModel, MCTCTPreTrainedModel
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
|
2881099/dotnetGen_mysql | 979 | ServerWinForm/Properties/Settings.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace ServerWinForm.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "14.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
}
}
|
2881099/dotnetGen_mysql | 2,453 | ServerWinForm/Properties/Resources.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace ServerWinForm.Properties {
using System;
/// <summary>
/// 一个强类型的资源类,用于查找本地化的字符串等。
/// </summary>
// 此类是由 StronglyTypedResourceBuilder
// 类通过类似于 ResGen 或 Visual Studio 的工具自动生成的。
// 若要添加或移除成员,请编辑 .ResX 文件,然后重新运行 ResGen
// (以 /str 作为命令选项),或重新生成 VS 项目。
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// 返回此类使用的缓存的 ResourceManager 实例。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("ServerWinForm.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// 使用此强类型资源类,为所有资源查找
/// 重写当前线程的 CurrentUICulture 属性。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}
|
2881099/dotnetGen_postgresql | 1,264 | Server/Resources/Infrastructure/Extensions/GlobalExtensions.cs | using Newtonsoft.Json;
using System;
using System.Text.RegularExpressions;
public static class GlobalExtensions {
public static object Json(this Microsoft.AspNetCore.Mvc.Rendering.IHtmlHelper html, object obj) {
string str = JsonConvert.SerializeObject(obj);
if (!string.IsNullOrEmpty(str)) str = Regex.Replace(str, @"<(/?script[\s>])", "<\"+\"$1", RegexOptions.IgnoreCase);
if (html == null) return str;
return html.Raw(str);
}
/// <summary>
/// 转格林时间,并以ISO8601格式化字符串
/// </summary>
/// <param name="time"></param>
/// <returns></returns>
public static string ToGmtISO8601(this DateTime time) {
return time.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ");
}
/// <summary>
/// 获取时间戳,按1970-1-1
/// </summary>
/// <param name="time"></param>
/// <returns></returns>
public static long GetTime(this DateTime time) {
return (long)time.ToUniversalTime().Subtract(new DateTime(1970, 1, 1)).TotalSeconds;
}
static DateTime dt19700101 = new DateTime(1970, 1, 1);
/// <summary>
/// 获取时间戳毫秒数,按1970-1-1
/// </summary>
/// <param name="time"></param>
/// <returns></returns>
public static long GetTimeMilliseconds(this DateTime time) {
return (long)time.ToUniversalTime().Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds;
}
}
|
2881099/dotnetGen_postgresql | 1,194 | Server/Resources/Infrastructure/Controllers/CustomExceptionFilter.cs | using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
public class CustomExceptionFilter : Attribute, IExceptionFilter {
private ILogger _logger = null;
private IConfiguration _cfg = null;
private IHostingEnvironment _env = null;
public CustomExceptionFilter (ILogger<CustomExceptionFilter> logger, IConfiguration cfg, IHostingEnvironment env) {
_logger = logger;
_cfg = cfg;
_env = env;
}
public void OnException(ExceptionContext context) {
//在这里记录错误日志,context.Exception 为异常对象
context.Result = APIReturn.失败.SetMessage(context.Exception.Message); //返回给调用方
var innerLog = context.Exception.InnerException != null ? $" \r\n{context.Exception.InnerException.Message} \r\n{ context.Exception.InnerException.StackTrace}" : "";
_logger.LogError($"=============错误:{context.Exception.Message} \r\n{context.Exception.StackTrace}{innerLog}");
context.ExceptionHandled = true;
}
} |
2881099/dotnetGen_sqlserver | 1,846 | MakeCode/app.config | <?xml version="1.0" encoding="utf-8" ?>
<configuration>
<configSections>
<sectionGroup name="userSettings" type="System.Configuration.UserSettingsGroup, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" >
<section name="MakeCode.Settings" type="System.Configuration.ClientSettingsSection, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" allowExeDefinition="MachineToLocalUser" requirePermission="false" />
<section name="MakeCode.Properties.Settings" type="System.Configuration.ClientSettingsSection, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" allowExeDefinition="MachineToLocalUser" requirePermission="false" />
</sectionGroup>
</configSections>
<userSettings>
<MakeCode.Settings>
<setting name="server" serializeAs="String">
<value>127.0.0.1:29918</value>
</setting>
</MakeCode.Settings>
<MakeCode.Properties.Settings>
<setting name="txtServer_text" serializeAs="String">
<value />
</setting>
<setting name="txtUsername_text" serializeAs="String">
<value />
</setting>
<setting name="txtPassword_text" serializeAs="String">
<value />
</setting>
<setting name="txtSolution_text" serializeAs="String">
<value />
</setting>
<setting name="chkSolution_checked" serializeAs="String">
<value>False</value>
</setting>
<setting name="chkIntegrated_Checked" serializeAs="String">
<value>True</value>
</setting>
<setting name="chkMultiDB_checked" serializeAs="String">
<value>False</value>
</setting>
<setting name="chkWebAdmin_checked" serializeAs="String">
<value>False</value>
</setting>
<setting name="chkDownloadRes_checked" serializeAs="String">
<value>False</value>
</setting>
</MakeCode.Properties.Settings>
</userSettings>
</configuration> |
27182812/ChatGLM-LLaMA-chinese-insturct | 16,224 | src/transformers/models/mctct/feature_extraction_mctct.py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Feature extractor class for M-CTC-T
"""
from typing import List, Optional, Union
import numpy as np
import torch
import torchaudio
from packaging import version
from ...feature_extraction_sequence_utils import SequenceFeatureExtractor
from ...feature_extraction_utils import BatchFeature
from ...file_utils import PaddingStrategy, TensorType
from ...utils import logging
logger = logging.get_logger(__name__)
parsed_torchaudio_version_base = version.parse(version.parse(torchaudio.__version__).base_version)
if not parsed_torchaudio_version_base >= version.parse("0.10"):
logger.warning(
f"You are using torchaudio=={torchaudio.__version__}, but torchaudio>=0.10.0 is required to use "
"MCTCTFeatureExtractor. This requires torch>=1.10.0. Please upgrade torch and torchaudio."
)
class MCTCTFeatureExtractor(SequenceFeatureExtractor):
r"""
Constructs a M-CTC-T feature extractor.
This feature extractor inherits from [`~feature_extraction_sequence_utils.SequenceFeatureExtractor`] which contains
most of the main methods. Users should refer to this superclass for more information regarding those methods. This
code has been adapted from Flashlight's C++ code. For more information about the implementation, one can refer to
this [notebook](https://colab.research.google.com/drive/1GLtINkkhzms-IsdcGy_-tVCkv0qNF-Gt#scrollTo=pMCRGMmUC_an)
that takes the user step-by-step in the implementation.
Args:
feature_size (`int`, defaults to 80):
The feature dimension of the extracted features. This is the number of mel_frequency
sampling_rate (`int`, defaults to 16000):
The sampling rate at which the audio files should be digitalized expressed in hertz (Hz).
padding_value (`float`, defaults to 0.0):
The value that is used to fill the padding values.
hop_length (`int`, defaults to 10):
Number of audio samples between windows. Otherwise referred to as "shift" in many papers.
win_length (`int`, defaults to 25):
Number of ms per window
win_function (`str`, defaults to `"hamming_window"`):
Name for the window function used for windowing, must be accessible via `torch.{win_function}`
frame_signal_scale (`float`, defaults to 32768.0):
Constant multiplied in creating the frames before applying DFT.
preemphasis_coeff (`float`, defaults to 0.97):
Constant multiplied in applying Pre-emphasis before DFT.
mel_floor (`float` defaults to 1.0):
Minimum value of mel frequency banks.
normalize_means (`bool`, *optional*, defaults to `True`):
Whether or not to zero-mean normalize the extracted features.
normalize_vars (`bool`, *optional*, defaults to `True`):
Whether or not to unit-variance normalize the extracted features.
"""
model_input_names = ["input_features", "attention_mask"]
def __init__(
self,
feature_size=80,
sampling_rate=16000,
padding_value=0.0,
hop_length=10,
win_length=25,
win_function="hamming_window",
frame_signal_scale=32768.0,
preemphasis_coeff=0.97,
mel_floor=1.0,
normalize_means=True,
normalize_vars=True,
return_attention_mask=False,
**kwargs,
):
super().__init__(feature_size=feature_size, sampling_rate=sampling_rate, padding_value=padding_value, **kwargs)
self.feature_size = feature_size
self.sampling_rate = sampling_rate
self.padding_value = padding_value
self.hop_length = hop_length
self.win_length = win_length
self.frame_signal_scale = frame_signal_scale
self.preemphasis_coeff = preemphasis_coeff
self.mel_floor = mel_floor
self.normalize_means = normalize_means
self.normalize_vars = normalize_vars
self.win_function = win_function
self.return_attention_mask = return_attention_mask
self.sample_size = win_length * sampling_rate // 1000
self.sample_stride = hop_length * sampling_rate // 1000
self.n_fft = 2 ** int(np.ceil(np.log2(self.sample_size)))
self.n_freqs = (self.n_fft // 2) + 1
@staticmethod
def _num_frames_calc(in_size, frame_size, frame_stride):
return int(1 + np.floor((in_size - frame_size) * 1 / frame_stride))
@staticmethod
def _frame_signal(one_waveform, n_frames, frame_signal_scale, window_length, sample_stride):
scale = frame_signal_scale
frames = np.zeros(n_frames * window_length)
for frame_idx in range(n_frames):
start = frame_idx * window_length
end = (frame_idx + 1) * window_length
wave_start = frame_idx * sample_stride
wave_end = frame_idx * sample_stride + window_length
frames[start:end] = scale * one_waveform[wave_start:wave_end]
return frames
@staticmethod
def _apply_preemphasis_inplace(frames, window_length, preemphasis_coeff):
if frames.size % window_length != 0:
raise ValueError(
f"`frames` is supposed to have length divisble by `window_length`, but is {frames.size} with"
f" window_length={window_length}."
)
n_frames = frames.size // window_length
for frame_idx in range(n_frames, 0, -1):
start = (frame_idx - 1) * window_length
end = frame_idx * window_length - 1
frames[start + 1 : end + 1] -= preemphasis_coeff * frames[start:end]
frames[start] *= 1 - preemphasis_coeff
@staticmethod
def _windowing(frames, window_length, window):
if frames.size % window_length != 0:
raise ValueError(
f"`frames` is supposed to have length divisble by `window_length`, but is {frames.size} with"
f" window_length={window_length}."
)
shaped = frames.reshape(-1, window_length)
shaped = window * shaped
return shaped
@staticmethod
def _dft(frames, K, n_frames, n_samples, n_fft):
dft = np.zeros([n_frames, K])
for frame in range(n_frames):
begin = frame * n_samples
inwards_buffer = frames[begin : begin + n_samples]
inwards_buffer = np.pad(inwards_buffer, (0, n_fft - n_samples), "constant")
out = np.fft.rfft(inwards_buffer)
dft[frame] = np.abs(out[:K])
return dft
def _extract_mfsc_features(self, one_waveform: np.array) -> np.ndarray:
"""
Extracts MFSC Features for one waveform vector (unbatched). Adapted from Flashlight's C++ MFSC code.
"""
if self.win_function == "hamming_window":
window = torch.hamming_window(window_length=self.sample_size, periodic=False, alpha=0.54, beta=0.46)
else:
window = getattr(torch, self.win_function)()
window = window.numpy()
fbanks = torchaudio.functional.melscale_fbanks(
n_freqs=self.n_freqs,
f_min=0.0, # change this to zeros
f_max=self.sampling_rate / 2.0,
n_mels=self.feature_size,
sample_rate=self.sampling_rate,
)
fbanks = fbanks.numpy()
n_frames = self._num_frames_calc(one_waveform.size, self.sample_size, self.sample_stride)
frames = self._frame_signal(
one_waveform, n_frames, self.frame_signal_scale, self.sample_size, self.sample_stride
)
self._apply_preemphasis_inplace(frames, self.sample_size, self.preemphasis_coeff)
frames = self._windowing(frames, self.sample_size, window)
dft_out = self._dft(frames.flatten(), self.n_freqs, n_frames, self.sample_size, self.n_fft)
# msfc_features = STFT * mel frequency banks.
msfc_features = np.einsum("...tf,fm->...tm", dft_out, fbanks)
# clamp feature values then log scale, as implemented in flashlight
msfc_features = np.maximum(msfc_features, self.mel_floor)
msfc_features = np.log(msfc_features)
return msfc_features
def _normalize_one(self, x, input_length, padding_value):
# make sure we normalize float32 arrays
if self.normalize_means:
mean = x[:input_length].mean(axis=0)
x = np.subtract(x, mean)
if self.normalize_vars:
std = x[:input_length].std(axis=0)
x = np.divide(x, std)
if input_length < x.shape[0]:
x[input_length:] = padding_value
# make sure array is in float32
x = x.astype(np.float32)
return x
def normalize(
self, input_features: List[np.ndarray], attention_mask: Optional[np.ndarray] = None
) -> List[np.ndarray]:
lengths = attention_mask.sum(-1) if attention_mask is not None else [x.shape[0] for x in input_features]
return [self._normalize_one(x, n, self.padding_value) for x, n in zip(input_features, lengths)]
def __call__(
self,
raw_speech: Union[np.ndarray, List[float], List[np.ndarray], List[List[float]]],
padding: Union[bool, str, PaddingStrategy] = False,
max_length: Optional[int] = None,
truncation: bool = False,
pad_to_multiple_of: Optional[int] = None,
return_attention_mask: Optional[bool] = None,
return_tensors: Optional[Union[str, TensorType]] = None,
sampling_rate: Optional[int] = None,
**kwargs,
) -> BatchFeature:
"""
Main method to featurize and prepare for the model one or several sequence(s). sequences. It returns the
log-mel spectrogram of the input audio, as implemented in the original Flashlight MFSC feature extraction code.
Args:
raw_speech (`torch.Tensor`, `np.ndarray`, `List[float]`, `List[torch.Tensor]`, `List[np.ndarray]`, `List[List[float]]`):
The sequence or batch of sequences to be padded. Each sequence can be a tensor, a numpy array, a list
of float values, a list of tensors, a list of numpy arrays or a list of list of float values.
padding (`bool`, `str` or [`~file_utils.PaddingStrategy`], *optional*, defaults to `False`):
Select a strategy to pad the returned sequences (according to the model's padding side and padding
index) among:
- `True` or `'longest'`: Pad to the longest sequence in the batch (or no padding if only a single
sequence if provided).
- `'max_length'`: Pad to a maximum length specified with the argument `max_length` or to the maximum
acceptable input length for the model if that argument is not provided.
- `False` or `'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of different
lengths).
max_length (`int`, *optional*):
Maximum length of the returned list and optionally padding length (see above).
truncation (`bool`):
Activates truncation to cut input sequences longer than *max_length* to *max_length*.
pad_to_multiple_of (`int`, *optional*):
If set will pad the sequence to a multiple of the provided value.
This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability
`>= 7.5` (Volta), or on TPUs which benefit from having sequence lengths be a multiple of 128.
return_attention_mask (`bool`, *optional*):
Whether to return the attention mask. If left to the default, will return the attention mask according
to the specific feature_extractor's default.
[What are attention masks?](../glossary#attention-mask)
return_tensors (`str` or [`~file_utils.TensorType`], *optional*):
If set, will return tensors instead of list of python integers. Acceptable values are:
- `'tf'`: Return TensorFlow `tf.constant` objects.
- `'pt'`: Return PyTorch `torch.Tensor` objects.
- `'np'`: Return Numpy `np.ndarray` objects.
sampling_rate (`int`, *optional*):
The sampling rate at which the `raw_speech` input was sampled. It is strongly recommended to pass
`sampling_rate` at the forward call to prevent silent errors.
padding_value (`float`, defaults to 0.0):
"""
if sampling_rate is not None:
if sampling_rate != self.sampling_rate:
raise ValueError(
f"The model corresponding to this feature extractor: {self} was trained using a sampling rate of"
f" {self.sampling_rate}. Please make sure that the provided `raw_speech` input was sampled with"
f" {self.sampling_rate} and not {sampling_rate}."
)
else:
logger.warning(
"It is strongly recommended to pass the ``sampling_rate`` argument to this function. "
"Failing to do so can result in silent errors that might be hard to debug."
)
is_batched = bool(
isinstance(raw_speech, (list, tuple))
and (isinstance(raw_speech[0], np.ndarray) or isinstance(raw_speech[0], (tuple, list)))
)
if is_batched:
raw_speech = [np.asarray(speech, dtype=np.float32) for speech in raw_speech]
elif not is_batched and not isinstance(raw_speech, np.ndarray):
raw_speech = np.asarray(raw_speech, dtype=np.float32)
elif isinstance(raw_speech, np.ndarray) and raw_speech.dtype is np.dtype(np.float64):
raw_speech = raw_speech.astype(np.float32)
# always return batch
if not is_batched:
raw_speech = [raw_speech]
# extract fbank features
features = [self._extract_mfsc_features(one_waveform) for one_waveform in raw_speech]
# convert into correct format for padding
encoded_inputs = BatchFeature({"input_features": features})
padded_inputs = self.pad(
encoded_inputs,
padding=padding,
max_length=max_length,
truncation=truncation,
pad_to_multiple_of=pad_to_multiple_of,
return_attention_mask=True,
**kwargs,
)
# make sure list is in array format
input_features = padded_inputs.get("input_features")
if isinstance(input_features[0], list):
padded_inputs["input_features"] = [np.asarray(feature, dtype=np.float32) for feature in input_features]
attention_mask = padded_inputs.get("attention_mask")
if attention_mask is not None:
padded_inputs["attention_mask"] = [np.asarray(array, dtype=np.int32) for array in attention_mask]
if self.normalize_means or self.normalize_vars:
attention_mask = (
np.array(attention_mask, dtype=np.int32)
if self._get_padding_strategies(padding, max_length=max_length) is not PaddingStrategy.DO_NOT_PAD
and padding
else None
)
padded_inputs["input_features"] = self.normalize(
padded_inputs["input_features"], attention_mask=attention_mask
)
if return_tensors is not None:
padded_inputs = padded_inputs.convert_to_tensors(return_tensors)
return padded_inputs
|
2881099/dotnetGen_mysql | 851 | MakeCode/Properties/AssemblyInfo.cs | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过下列属性集
// 控制。更改这些属性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("NicPetShop")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("NicPetShop")]
[assembly: AssemblyCopyright("版权所有 (C) 2008")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 属性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("d26d6e70-6297-4f9c-992d-02c5478ca63b")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 内部版本号
// 修订号
//
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
2881099/dotnetGen_mysql | 5,451 | MakeCode/Properties/Settings.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace MakeCode.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "15.0.1.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string txtServer_text {
get {
return ((string)(this["txtServer_text"]));
}
set {
this["txtServer_text"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string txtUsername_text {
get {
return ((string)(this["txtUsername_text"]));
}
set {
this["txtUsername_text"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string txtPassword_text {
get {
return ((string)(this["txtPassword_text"]));
}
set {
this["txtPassword_text"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string txtSolution_text {
get {
return ((string)(this["txtSolution_text"]));
}
set {
this["txtSolution_text"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("False")]
public bool chkSolution_checked {
get {
return ((bool)(this["chkSolution_checked"]));
}
set {
this["chkSolution_checked"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("True")]
public bool chkIntegrated_Checked {
get {
return ((bool)(this["chkIntegrated_Checked"]));
}
set {
this["chkIntegrated_Checked"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("False")]
public bool chkMultiDB_checked {
get {
return ((bool)(this["chkMultiDB_checked"]));
}
set {
this["chkMultiDB_checked"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("False")]
public bool chkWebAdmin_checked {
get {
return ((bool)(this["chkWebAdmin_checked"]));
}
set {
this["chkWebAdmin_checked"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("False")]
public bool chkDownloadRes_checked {
get {
return ((bool)(this["chkDownloadRes_checked"]));
}
set {
this["chkDownloadRes_checked"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("3306")]
public string txtPort_text {
get {
return ((string)(this["txtPort_text"]));
}
set {
this["txtPort_text"] = value;
}
}
}
}
|
2881099/dotnetGen_mysql | 2,443 | MakeCode/Properties/Resources.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace MakeCode.Properties {
using System;
/// <summary>
/// 一个强类型的资源类,用于查找本地化的字符串等。
/// </summary>
// 此类是由 StronglyTypedResourceBuilder
// 类通过类似于 ResGen 或 Visual Studio 的工具自动生成的。
// 若要添加或移除成员,请编辑 .ResX 文件,然后重新运行 ResGen
// (以 /str 作为命令选项),或重新生成 VS 项目。
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// 返回此类使用的缓存的 ResourceManager 实例。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("MakeCode.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// 使用此强类型资源类,为所有资源查找
/// 重写当前线程的 CurrentUICulture 属性。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}
|
27182812/ChatGLM-LLaMA-chinese-insturct | 2,443 | src/transformers/models/codegen/__init__.py | # Copyright 2022 Salesforce authors, The EleutherAI, and HuggingFace Teams. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_codegen": ["CODEGEN_PRETRAINED_CONFIG_ARCHIVE_MAP", "CodeGenConfig", "CodeGenOnnxConfig"],
"tokenization_codegen": ["CodeGenTokenizer"],
}
try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_codegen_fast"] = ["CodeGenTokenizerFast"]
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_codegen"] = [
"CODEGEN_PRETRAINED_MODEL_ARCHIVE_LIST",
"CodeGenForCausalLM",
"CodeGenModel",
"CodeGenPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_codegen import CODEGEN_PRETRAINED_CONFIG_ARCHIVE_MAP, CodeGenConfig, CodeGenOnnxConfig
from .tokenization_codegen import CodeGenTokenizer
try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_codegen_fast import CodeGenTokenizerFast
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_codegen import (
CODEGEN_PRETRAINED_MODEL_ARCHIVE_LIST,
CodeGenForCausalLM,
CodeGenModel,
CodeGenPreTrainedModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
|
2881099/dotnetGen_sqlserver | 7,753 | MakeCode/ClientSocket.cs | using System;
using System.IO;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
public class ClientSocket : BaseSocket, IDisposable {
private bool _isDisposed;
private IPEndPoint _remotePoint;
private TcpClient _tcpClient;
private Thread _thread;
private bool _running;
private int _receives;
private int _errors;
private object _errors_lock = new object();
private object _write_lock = new object();
private Dictionary<int, SyncReceive> _receiveHandlers = new Dictionary<int, SyncReceive>();
private object _receiveHandlers_lock = new object();
private DateTime _lastActive;
public event ClientSocketClosedEventHandler Closed;
public event ClientSocketReceiveEventHandler Receive;
public event ClientSocketErrorEventHandler Error;
public void Connect(string hostname, int port) {
if (this._isDisposed == false && this._running == false) {
this._running = true;
try {
IPAddress[] ips = Dns.GetHostAddresses(hostname);
if (ips.Length == 0) throw new Exception("无法解析“" + hostname + "”");
this._remotePoint = new IPEndPoint(ips[0], port);
this._tcpClient = new TcpClient();
this._tcpClient.Connect(this._remotePoint);
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
this.OnClosed();
return;
}
this._receives = 0;
this._errors = 0;
this._lastActive = DateTime.Now;
this._thread = new Thread(delegate() {
while (this._running) {
try {
NetworkStream ns = this._tcpClient.GetStream();
ns.ReadTimeout = 1000 * 20;
if (ns.DataAvailable) {
SocketMessager messager = base.Read(ns);
if (string.Compare(messager.Action, SocketMessager.SYS_TEST_LINK.Action) == 0) {
} else if (this._receives == 0 &&
string.Compare(messager.Action, SocketMessager.SYS_HELLO_WELCOME.Action) == 0) {
this._receives++;
this.Write(messager);
} else if (string.Compare(messager.Action, SocketMessager.SYS_ACCESS_DENIED.Action) == 0) {
throw new Exception(SocketMessager.SYS_ACCESS_DENIED.Action);
} else {
ClientSocketReceiveEventArgs e = new ClientSocketReceiveEventArgs(this._receives++, messager);
SyncReceive receive = null;
if (this._receiveHandlers.TryGetValue(messager.Id, out receive)) {
new Thread(delegate() {
try {
receive.ReceiveHandler(this, e);
} catch (Exception ex) {
this.OnError(ex);
} finally {
receive.Wait.Set();
}
}).Start();
} else if (this.Receive != null) {
new Thread(delegate() {
this.OnReceive(e);
}).Start();
}
}
this._lastActive = DateTime.Now;
} else {
TimeSpan ts = DateTime.Now - _lastActive;
if (ts.TotalSeconds > 3) {
this.Write(SocketMessager.SYS_TEST_LINK);
}
}
if (!ns.DataAvailable) Thread.CurrentThread.Join(1);
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
}
}
this.Close();
this.OnClosed();
});
this._thread.Start();
}
}
public void Close() {
this._running = false;
if (this._tcpClient != null) {
this._tcpClient.Close();
}
int[] keys = new int[this._receiveHandlers.Count];
try {
this._receiveHandlers.Keys.CopyTo(keys, 0);
} catch {
lock (this._receiveHandlers_lock) {
keys = new int[this._receiveHandlers.Count];
this._receiveHandlers.Keys.CopyTo(keys, 0);
}
}
foreach (int key in keys) {
SyncReceive receiveHandler = null;
if (this._receiveHandlers.TryGetValue(key, out receiveHandler)) {
receiveHandler.Wait.Set();
}
}
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Clear();
}
}
public void Write(SocketMessager messager) {
this.Write(messager, null, TimeSpan.Zero);
}
public void Write(SocketMessager messager, ClientSocketReceiveEventHandler receiveHandler) {
this.Write(messager, receiveHandler, TimeSpan.FromSeconds(20));
}
public void Write(SocketMessager messager, ClientSocketReceiveEventHandler receiveHandler, TimeSpan timeout) {
SyncReceive syncReceive = null;
try {
if (receiveHandler != null) {
syncReceive = new SyncReceive(receiveHandler);
lock (this._receiveHandlers_lock) {
if (!this._receiveHandlers.ContainsKey(messager.Id)) {
this._receiveHandlers.Add(messager.Id, syncReceive);
} else {
this._receiveHandlers[messager.Id] = syncReceive;
}
}
}
lock (_write_lock) {
NetworkStream ns = this._tcpClient.GetStream();
base.Write(ns, messager);
}
this._lastActive = DateTime.Now;
if (syncReceive != null) {
syncReceive.Wait.Reset();
syncReceive.Wait.WaitOne(timeout, false);
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
} catch (Exception ex) {
this._running = false;
this.OnError(ex);
if (syncReceive != null) {
syncReceive.Wait.Set();
lock (this._receiveHandlers_lock) {
this._receiveHandlers.Remove(messager.Id);
}
}
}
}
protected virtual void OnClosed(EventArgs e) {
if (this.Closed != null) {
new Thread(delegate() {
try {
this.Closed(this, e);
} catch (Exception ex) {
this.OnError(ex);
}
}).Start();
}
}
protected void OnClosed() {
this.OnClosed(new EventArgs());
}
protected virtual void OnReceive(ClientSocketReceiveEventArgs e) {
if (this.Receive != null) {
try {
this.Receive(this, e);
} catch (Exception ex) {
this.OnError(ex);
}
}
}
protected virtual void OnError(ClientSocketErrorEventArgs e) {
if (this.Error != null) {
this.Error(this, e);
}
}
protected void OnError(Exception ex) {
int errors = 0;
lock (this._errors_lock) {
errors = ++this._errors;
}
ClientSocketErrorEventArgs e = new ClientSocketErrorEventArgs(ex, errors);
this.OnError(e);
}
public bool Running {
get { return this._running; }
}
class SyncReceive : IDisposable {
private ClientSocketReceiveEventHandler _receiveHandler;
private ManualResetEvent _wait;
public SyncReceive(ClientSocketReceiveEventHandler receiveHandler) {
this._receiveHandler = receiveHandler;
this._wait = new ManualResetEvent(false);
}
public ClientSocketReceiveEventHandler ReceiveHandler {
get { return _receiveHandler; }
}
public ManualResetEvent Wait {
get { return _wait; }
}
#region IDisposable 成员
public void Dispose() {
this._wait.Set();
this._wait.Close();
}
#endregion
}
#region IDisposable 成员
public void Dispose() {
this._isDisposed = true;
this.Close();
}
#endregion
}
public delegate void ClientSocketClosedEventHandler(object sender, EventArgs e);
public delegate void ClientSocketErrorEventHandler(object sender, ClientSocketErrorEventArgs e);
public delegate void ClientSocketReceiveEventHandler(object sender, ClientSocketReceiveEventArgs e);
public class ClientSocketErrorEventArgs : EventArgs {
private int _errors;
private Exception _exception;
public ClientSocketErrorEventArgs(Exception exception, int errors) {
this._exception = exception;
this._errors = errors;
}
public int Errors {
get { return _errors; }
}
public Exception Exception {
get { return _exception; }
}
}
public class ClientSocketReceiveEventArgs : EventArgs {
private int _receives;
private SocketMessager _messager;
public ClientSocketReceiveEventArgs(int receives, SocketMessager messager) {
this._receives = receives;
this._messager = messager;
}
public int Receives {
get { return _receives; }
}
public SocketMessager Messager {
get { return _messager; }
}
} |
2881099/dotnetGen_sqlserver | 4,497 | ServerWinService/ServerWinService.csproj | <?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="14.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.50727</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{551011E1-3310-44F2-A7F1-61F559441F66}</ProjectGuid>
<OutputType>WinExe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>ServerWinService</RootNamespace>
<AssemblyName>ServerWinService</AssemblyName>
<SccProjectName>
</SccProjectName>
<SccLocalPath>
</SccLocalPath>
<SccAuxPath>
</SccAuxPath>
<SccProvider>
</SccProvider>
<FileUpgradeFlags>
</FileUpgradeFlags>
<UpgradeBackupLocation>
</UpgradeBackupLocation>
<OldToolsVersion>3.5</OldToolsVersion>
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
<PublishUrl>publish\</PublishUrl>
<Install>true</Install>
<InstallFrom>Disk</InstallFrom>
<UpdateEnabled>false</UpdateEnabled>
<UpdateMode>Foreground</UpdateMode>
<UpdateInterval>7</UpdateInterval>
<UpdateIntervalUnits>Days</UpdateIntervalUnits>
<UpdatePeriodically>false</UpdatePeriodically>
<UpdateRequired>false</UpdateRequired>
<MapFileExtensions>true</MapFileExtensions>
<ApplicationRevision>0</ApplicationRevision>
<ApplicationVersion>1.0.0.%2a</ApplicationVersion>
<IsWebBootstrapper>false</IsWebBootstrapper>
<UseApplicationTrust>false</UseApplicationTrust>
<BootstrapperEnabled>true</BootstrapperEnabled>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Configuration.Install" />
<Reference Include="System.Data" />
<Reference Include="System.Runtime.Remoting" />
<Reference Include="System.ServiceProcess" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="Install1.cs">
<SubType>Component</SubType>
</Compile>
<Compile Include="Service1.cs">
<SubType>Component</SubType>
</Compile>
<Compile Include="Service1.Designer.cs">
<DependentUpon>Service1.cs</DependentUpon>
</Compile>
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Settings.Designer.cs">
<DependentUpon>Settings.settings</DependentUpon>
<AutoGen>True</AutoGen>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
</ItemGroup>
<ItemGroup>
<None Include="app.config" />
<None Include="Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
</ItemGroup>
<ItemGroup>
<BootstrapperPackage Include="Microsoft.Net.Framework.3.5.SP1">
<Visible>False</Visible>
<ProductName>.NET Framework 3.5 SP1</ProductName>
<Install>true</Install>
</BootstrapperPackage>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj">
<Project>{f0054101-9ac9-4e0e-9e78-44ea89fc5c19}</Project>
<Name>Common</Name>
</ProjectReference>
<ProjectReference Include="..\Server\Server.csproj">
<Project>{efe1f5d6-ab1f-4fa6-8e10-9b8a197b31c7}</Project>
<Name>Server</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> |
2881099/dotnetGen_postgresql | 5,159 | Server/Resources/Infrastructure/Controllers/BaseController.cs | using Microsoft.AspNetCore.Cors;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using System;
using System.Collections;
using System.Linq;
using System.Threading.Tasks;
[ServiceFilter(typeof(CustomExceptionFilter)), EnableCors("cors_all")]
public partial class BaseController : Controller {
public ILogger _logger;
public ISession Session { get { return HttpContext.Session; } }
public HttpRequest Req { get { return Request; } }
public HttpResponse Res { get { return Response; } }
public string Ip => this.Request.Headers["X-Real-IP"].FirstOrDefault() ?? this.Request.HttpContext.Connection.RemoteIpAddress.ToString();
public IConfiguration Configuration => (IConfiguration) HttpContext.RequestServices.GetService(typeof(IConfiguration));
//public SysuserInfo LoginUser { get; private set; }
public BaseController(ILogger logger) { _logger = logger; }
public override void OnActionExecuting(ActionExecutingContext context) {
#region 参数验证
if (context.ModelState.IsValid == false)
foreach (var value in context.ModelState.Values)
if (value.Errors.Any()) {
context.Result = APIReturn.参数格式不正确.SetMessage($"参数格式不正确:{value.Errors.First().ErrorMessage}");
return;
}
#endregion
#region 初始化当前登陆账号
//string username = Session.GetString("login.username");
//if (!string.IsNullOrEmpty(username)) LoginUser = Sysuser.GetItemByUsername(username);
//var method = (context.ActionDescriptor as ControllerActionDescriptor).MethodInfo;
//if (method.GetCustomAttribute<需要登陆Attribute>() != null && LoginUser == null)
// context.Result = new RedirectResult("/signin");
//else if (method.GetCustomAttribute<匿名访问Attribute>() == null && LoginUser == null)
// context.Result = new RedirectResult("/signin");
//ViewBag.user = LoginUser;
#endregion
base.OnActionExecuting(context);
}
public override void OnActionExecuted(ActionExecutedContext context) {
base.OnActionExecuted(context);
}
#region 角色权限验证
//public bool sysrole_check(string url) {
// url = url.ToLower();
// //Response.Write(url + "<br>");
// if (url == "/" || url.IndexOf("/default.aspx") == 0) return true;
// foreach(var role in this.LoginUser.Obj_sysroles) {
// //Response.Write(role.ToString());
// foreach(var dir in role.Obj_sysdirs) {
// //Response.Write("-----------------" + dir.ToString() + "<br>");
// string tmp = dir.Url;
// if (tmp.EndsWith("/")) tmp += "default.aspx";
// if (url.IndexOf(tmp) == 0) return true;
// }
// }
// return false;
//}
#endregion
}
#region 需要登陆、匿名访问
public partial class 需要登陆Attribute : Attribute { }
public partial class 匿名访问Attribute : Attribute { }
#endregion
#region APIReturn
[JsonObject(MemberSerialization.OptIn)]
public partial class APIReturn : ContentResult {
[JsonProperty("code")] public int Code { get; protected set; }
[JsonProperty("message")] public string Message { get; protected set; }
[JsonProperty("data")] public Hashtable Data { get; protected set; } = new Hashtable();
[JsonProperty("success")] public bool Success { get { return this.Code == 0; } }
public APIReturn() { }
public APIReturn(int code) { this.SetCode(code); }
public APIReturn(string message) { this.SetMessage(message); }
public APIReturn(int code, string message, params object[] data) { this.SetCode(code).SetMessage(message).AppendData(data); }
public APIReturn SetCode(int value) { this.Code = value; return this; }
public APIReturn SetMessage(string value) { this.Message = value; return this; }
public APIReturn SetData(params object[] value) {
this.Data.Clear();
return this.AppendData(value);
}
public APIReturn AppendData(params object[] value) {
if (value == null || value.Length < 2 || value[0] == null) return this;
for (int a = 0; a < value.Length; a += 2) {
if (value[a] == null) continue;
this.Data[value[a]] = a + 1 < value.Length ? value[a + 1] : null;
}
return this;
}
#region form 表单 target=iframe 提交回调处理
private void Jsonp(ActionContext context) {
string __callback = context.HttpContext.Request.HasFormContentType ? context.HttpContext.Request.Form["__callback"].ToString() : null;
if (string.IsNullOrEmpty(__callback)) {
this.ContentType = "text/json;charset=utf-8;";
this.Content = JsonConvert.SerializeObject(this);
}else {
this.ContentType = "text/html;charset=utf-8";
this.Content = $"<script>top.{__callback}({GlobalExtensions.Json(null, this)});</script>";
}
}
public override void ExecuteResult(ActionContext context) {
Jsonp(context);
base.ExecuteResult(context);
}
public override Task ExecuteResultAsync(ActionContext context) {
Jsonp(context);
return base.ExecuteResultAsync(context);
}
#endregion
public static APIReturn 成功 { get { return new APIReturn(0, "成功"); } }
public static APIReturn 失败 { get { return new APIReturn(99, "失败"); } }
public static APIReturn 记录不存在_或者没有权限 { get { return new APIReturn(98, "记录不存在,或者没有权限"); } }
public static APIReturn 参数格式不正确 { get { return new APIReturn(97, "参数格式不正确"); } }
}
#endregion
|
27182812/ChatGLM-LLaMA-chinese-insturct | 10,324 | src/transformers/models/codegen/configuration_codegen.py | # coding=utf-8
# Copyright 2022 Salesforce authors, The EleutherAI, and HuggingFace Teams. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" CodeGen model configuration"""
from collections import OrderedDict
from typing import Any, List, Mapping, Optional
from ... import PreTrainedTokenizer, TensorType, is_torch_available
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfigWithPast, PatchingSpec
from ...utils import logging
logger = logging.get_logger(__name__)
CODEGEN_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"Salesforce/codegen-350M-nl": "https://huggingface.co/Salesforce/codegen-350M-nl/resolve/main/config.json",
"Salesforce/codegen-350M-multi": "https://huggingface.co/Salesforce/codegen-350M-multi/resolve/main/config.json",
"Salesforce/codegen-350M-mono": "https://huggingface.co/Salesforce/codegen-350M-mono/resolve/main/config.json",
"Salesforce/codegen-2B-nl": "https://huggingface.co/Salesforce/codegen-2B-nl/resolve/main/config.json",
"Salesforce/codegen-2B-multi": "https://huggingface.co/Salesforce/codegen-2B-multi/resolve/main/config.json",
"Salesforce/codegen-2B-mono": "https://huggingface.co/Salesforce/codegen-2B-mono/resolve/main/config.json",
"Salesforce/codegen-6B-nl": "https://huggingface.co/Salesforce/codegen-6B-nl/resolve/main/config.json",
"Salesforce/codegen-6B-multi": "https://huggingface.co/Salesforce/codegen-6B-multi/resolve/main/config.json",
"Salesforce/codegen-6B-mono": "https://huggingface.co/Salesforce/codegen-6B-mono/resolve/main/config.json",
"Salesforce/codegen-16B-nl": "https://huggingface.co/Salesforce/codegen-16B-nl/resolve/main/config.json",
"Salesforce/codegen-16B-multi": "https://huggingface.co/Salesforce/codegen-16B-multi/resolve/main/config.json",
"Salesforce/codegen-16B-mono": "https://huggingface.co/Salesforce/codegen-16B-mono/resolve/main/config.json",
}
class CodeGenConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`CodeGenModel`]. It is used to instantiate a
CodeGen model according to the specified arguments, defining the model architecture. Instantiating a configuration
with the defaults will yield a similar configuration to that of the CodeGen
[Salesforce/codegen-2B-mono](https://huggingface.co/Salesforce/codegen-2B-mono) architecture. Configuration objects
inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from
[`PretrainedConfig`] for more information.
Args:
vocab_size (`int`, *optional*, defaults to 50400):
Vocabulary size of the CodeGen model. Defines the number of different tokens that can be represented by the
`inputs_ids` passed when calling [`CodeGenModel`].
n_positions (`int`, *optional*, defaults to 2048):
The maximum sequence length that this model might ever be used with. Typically set this to something large
just in case (e.g., 512 or 1024 or 2048).
n_embd (`int`, *optional*, defaults to 4096):
Dimensionality of the embeddings and hidden states.
n_layer (`int`, *optional*, defaults to 28):
Number of hidden layers in the Transformer encoder.
n_head (`int`, *optional*, defaults to 16):
Number of attention heads for each attention layer in the Transformer encoder.
rotary_dim (`int`, *optional*, defaults to 64):
Number of dimensions in the embedding that Rotary Position Embedding is applied to.
n_inner (`int`, *optional*, defaults to None):
Dimensionality of the inner feed-forward layers. `None` will set it to 4 times n_embd
activation_function (`str`, *optional*, defaults to `"gelu_new"`):
Activation function, to be selected in the list `["relu", "silu", "gelu", "tanh", "gelu_new"]`.
resid_pdrop (`float`, *optional*, defaults to 0.1):
The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
embd_pdrop (`int`, *optional*, defaults to 0.1):
The dropout ratio for the embeddings.
attn_pdrop (`float`, *optional*, defaults to 0.1):
The dropout ratio for the attention.
layer_norm_epsilon (`float`, *optional*, defaults to 1e-5):
The epsilon to use in the layer normalization layers.
initializer_range (`float`, *optional*, defaults to 0.02):
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
use_cache (`bool`, *optional*, defaults to `True`):
Whether or not the model should return the last key/values attentions (not used by all models).
Example:
```python
>>> from transformers import CodeGenConfig, CodeGenModel
>>> # Initializing a CodeGen 6B configuration
>>> configuration = CodeGenConfig()
>>> # Initializing a model (with random weights) from the configuration
>>> model = CodeGenModel(configuration)
>>> # Accessing the model configuration
>>> configuration = model.config
```"""
model_type = "codegen"
attribute_map = {
"max_position_embeddings": "n_positions",
"hidden_size": "n_embd",
"num_attention_heads": "n_head",
"num_hidden_layers": "n_layer",
}
def __init__(
self,
vocab_size=50400,
n_positions=2048,
n_ctx=2048,
n_embd=4096,
n_layer=28,
n_head=16,
rotary_dim=64,
n_inner=None,
activation_function="gelu_new",
resid_pdrop=0.0,
embd_pdrop=0.0,
attn_pdrop=0.0,
layer_norm_epsilon=1e-5,
initializer_range=0.02,
use_cache=True,
bos_token_id=50256,
eos_token_id=50256,
tie_word_embeddings=False,
**kwargs,
):
self.vocab_size = vocab_size
self.n_ctx = n_ctx
self.n_positions = n_positions
self.n_embd = n_embd
self.n_layer = n_layer
self.n_head = n_head
self.n_inner = n_inner
self.rotary_dim = rotary_dim
self.activation_function = activation_function
self.resid_pdrop = resid_pdrop
self.embd_pdrop = embd_pdrop
self.attn_pdrop = attn_pdrop
self.layer_norm_epsilon = layer_norm_epsilon
self.initializer_range = initializer_range
self.use_cache = use_cache
self.bos_token_id = bos_token_id
self.eos_token_id = eos_token_id
super().__init__(
bos_token_id=bos_token_id, eos_token_id=eos_token_id, tie_word_embeddings=tie_word_embeddings, **kwargs
)
# Copied from transformers.models.gpt2.configuration_gpt2.GPT2OnnxConfig
class CodeGenOnnxConfig(OnnxConfigWithPast):
def __init__(
self,
config: PretrainedConfig,
task: str = "default",
patching_specs: List[PatchingSpec] = None,
use_past: bool = False,
):
super().__init__(config, task=task, patching_specs=patching_specs, use_past=use_past)
if not getattr(self._config, "pad_token_id", None):
# TODO: how to do that better?
self._config.pad_token_id = 0
@property
def inputs(self) -> Mapping[str, Mapping[int, str]]:
common_inputs = OrderedDict({"input_ids": {0: "batch", 1: "sequence"}})
if self.use_past:
self.fill_with_past_key_values_(common_inputs, direction="inputs")
common_inputs["attention_mask"] = {0: "batch", 1: "past_sequence + sequence"}
else:
common_inputs["attention_mask"] = {0: "batch", 1: "sequence"}
return common_inputs
@property
def num_layers(self) -> int:
return self._config.n_layer
@property
def num_attention_heads(self) -> int:
return self._config.n_head
def generate_dummy_inputs(
self,
tokenizer: PreTrainedTokenizer,
batch_size: int = -1,
seq_length: int = -1,
is_pair: bool = False,
framework: Optional[TensorType] = None,
) -> Mapping[str, Any]:
common_inputs = super(OnnxConfigWithPast, self).generate_dummy_inputs(
tokenizer, batch_size=batch_size, seq_length=seq_length, is_pair=is_pair, framework=framework
)
# We need to order the input in the way they appears in the forward()
ordered_inputs = OrderedDict({"input_ids": common_inputs["input_ids"]})
# Need to add the past_keys
if self.use_past:
if not is_torch_available():
raise ValueError("Cannot generate dummy past_keys inputs without PyTorch installed.")
else:
import torch
batch, seqlen = common_inputs["input_ids"].shape
# Not using the same length for past_key_values
past_key_values_length = seqlen + 2
past_shape = (
batch,
self.num_attention_heads,
past_key_values_length,
self._config.hidden_size // self.num_attention_heads,
)
ordered_inputs["past_key_values"] = [
(torch.zeros(past_shape), torch.zeros(past_shape)) for _ in range(self.num_layers)
]
ordered_inputs["attention_mask"] = common_inputs["attention_mask"]
if self.use_past:
mask_dtype = ordered_inputs["attention_mask"].dtype
ordered_inputs["attention_mask"] = torch.cat(
[ordered_inputs["attention_mask"], torch.ones(batch, past_key_values_length, dtype=mask_dtype)], dim=1
)
return ordered_inputs
@property
def default_onnx_opset(self) -> int:
return 13
|
2881099/dotnetGen_mysql | 1,594 | MakeCode/Properties/Settings.settings | <?xml version='1.0' encoding='utf-8'?>
<SettingsFile xmlns="http://schemas.microsoft.com/VisualStudio/2004/01/settings" CurrentProfile="(Default)" GeneratedClassNamespace="MakeCode.Properties" GeneratedClassName="Settings">
<Profiles />
<Settings>
<Setting Name="txtServer_text" Type="System.String" Scope="User">
<Value Profile="(Default)" />
</Setting>
<Setting Name="txtUsername_text" Type="System.String" Scope="User">
<Value Profile="(Default)" />
</Setting>
<Setting Name="txtPassword_text" Type="System.String" Scope="User">
<Value Profile="(Default)" />
</Setting>
<Setting Name="txtSolution_text" Type="System.String" Scope="User">
<Value Profile="(Default)" />
</Setting>
<Setting Name="chkSolution_checked" Type="System.Boolean" Scope="User">
<Value Profile="(Default)">False</Value>
</Setting>
<Setting Name="chkIntegrated_Checked" Type="System.Boolean" Scope="User">
<Value Profile="(Default)">True</Value>
</Setting>
<Setting Name="chkMultiDB_checked" Type="System.Boolean" Scope="User">
<Value Profile="(Default)">False</Value>
</Setting>
<Setting Name="chkWebAdmin_checked" Type="System.Boolean" Scope="User">
<Value Profile="(Default)">False</Value>
</Setting>
<Setting Name="chkDownloadRes_checked" Type="System.Boolean" Scope="User">
<Value Profile="(Default)">False</Value>
</Setting>
<Setting Name="txtPort_text" Type="System.String" Scope="User">
<Value Profile="(Default)">3306</Value>
</Setting>
</Settings>
</SettingsFile> |
2881099/dotnetGen_mysql | 863 | ServerWinService/Properties/AssemblyInfo.cs | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过下列属性集
// 控制。更改这些属性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("ServerWinService")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("ServerWinService")]
[assembly: AssemblyCopyright("版权所有 (C) 2007")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 属性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("3d782f35-0953-5580-8273-e45d913562b7")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 内部版本号
// 修订号
//
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
27182812/ChatGLM-LLaMA-chinese-insturct | 15,109 | src/transformers/models/codegen/tokenization_codegen.py | # coding=utf-8
# Copyright 2022 The Salesforce authors, The Open AI Team Authors and The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tokenization classes for CodeGen"""
import json
import os
from functools import lru_cache
from typing import TYPE_CHECKING, List, Optional, Tuple, Union
import numpy as np
import regex as re
from ...utils import is_tf_available, is_torch_available, logging
if TYPE_CHECKING:
if is_torch_available():
import torch
if is_tf_available():
import tensorflow as tf
from ...tokenization_utils import AddedToken, PreTrainedTokenizer
logger = logging.get_logger(__name__)
VOCAB_FILES_NAMES = {
"vocab_file": "vocab.json",
"merges_file": "merges.txt",
}
PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"Salesforce/codegen-350M-mono": "https://huggingface.co/Salesforce/codegen-350M-mono/resolve/main/vocab.json",
},
"merges_file": {
"Salesforce/codegen-350M-mono": "https://huggingface.co/Salesforce/codegen-350M-mono/resolve/main/merges.txt",
},
}
PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"Salesforce/codegen-350M-mono": 2048,
}
@lru_cache()
def bytes_to_unicode():
"""
Returns list of utf-8 byte and a mapping to unicode strings. We specifically avoids mapping to whitespace/control
characters the bpe code barfs on.
The reversible bpe codes work on unicode strings. This means you need a large # of unicode characters in your vocab
if you want to avoid UNKs. When you're at something like a 10B token dataset you end up needing around 5K for
decent coverage. This is a significant percentage of your normal, say, 32K bpe vocab. To avoid that, we want lookup
tables between utf-8 bytes and unicode strings.
"""
bs = (
list(range(ord("!"), ord("~") + 1)) + list(range(ord("¡"), ord("¬") + 1)) + list(range(ord("®"), ord("ÿ") + 1))
)
cs = bs[:]
n = 0
for b in range(2**8):
if b not in bs:
bs.append(b)
cs.append(2**8 + n)
n += 1
cs = [chr(n) for n in cs]
return dict(zip(bs, cs))
def get_pairs(word):
"""
Return set of symbol pairs in a word.
Word is represented as tuple of symbols (symbols being variable-length strings).
"""
pairs = set()
prev_char = word[0]
for char in word[1:]:
pairs.add((prev_char, char))
prev_char = char
return pairs
class CodeGenTokenizer(PreTrainedTokenizer):
"""
Construct a CodeGen tokenizer. Based on byte-level Byte-Pair-Encoding.
This tokenizer has been trained to treat spaces like parts of the tokens (a bit like sentencepiece) so a word will
be encoded differently whether it is at the beginning of the sentence (without space) or not:
```
>>> from transformers import CodeGenTokenizer
>>> tokenizer = CodeGenTokenizer.from_pretrained("Salesforce/codegen-350M-mono")
>>> tokenizer("Hello world")['input_ids']
[15496, 995]
>>> tokenizer(" Hello world")['input_ids']
[18435, 995]
```
You can get around that behavior by passing `add_prefix_space=True` when instantiating this tokenizer or when you
call it on some text, but since the model was not pretrained this way, it might yield a decrease in performance.
<Tip>
When used with `is_split_into_words=True`, this tokenizer will add a space before each word (even the first one).
</Tip>
This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to
this superclass for more information regarding those methods.
Args:
vocab_file (`str`):
Path to the vocabulary file.
merges_file (`str`):
Path to the merges file.
errors (`str`, *optional*, defaults to `"replace"`):
Paradigm to follow when decoding bytes to UTF-8. See
[bytes.decode](https://docs.python.org/3/library/stdtypes.html#bytes.decode) for more information.
unk_token (`str`, *optional*, defaults to `<|endoftext|>`):
The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this
token instead.
bos_token (`str`, *optional*, defaults to `<|endoftext|>`):
The beginning of sequence token.
eos_token (`str`, *optional*, defaults to `<|endoftext|>`):
The end of sequence token.
add_prefix_space (`bool`, *optional*, defaults to `False`):
Whether or not to add an initial space to the input. This allows to treat the leading word just as any
other word. (CodeGen tokenizer detect beginning of words by the preceding space).
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
model_input_names = ["input_ids", "attention_mask"]
def __init__(
self,
vocab_file,
merges_file,
errors="replace",
unk_token="<|endoftext|>",
bos_token="<|endoftext|>",
eos_token="<|endoftext|>",
pad_token=None,
add_prefix_space=False,
add_bos_token=False,
**kwargs,
):
bos_token = AddedToken(bos_token, lstrip=False, rstrip=False) if isinstance(bos_token, str) else bos_token
eos_token = AddedToken(eos_token, lstrip=False, rstrip=False) if isinstance(eos_token, str) else eos_token
unk_token = AddedToken(unk_token, lstrip=False, rstrip=False) if isinstance(unk_token, str) else unk_token
pad_token = AddedToken(pad_token, lstrip=False, rstrip=False) if isinstance(pad_token, str) else pad_token
super().__init__(
errors=errors,
unk_token=unk_token,
bos_token=bos_token,
eos_token=eos_token,
pad_token=pad_token,
add_prefix_space=add_prefix_space,
add_bos_token=add_bos_token,
**kwargs,
)
self.add_bos_token = add_bos_token
with open(vocab_file, encoding="utf-8") as vocab_handle:
self.encoder = json.load(vocab_handle)
self.decoder = {v: k for k, v in self.encoder.items()}
self.errors = errors # how to handle errors in decoding
self.byte_encoder = bytes_to_unicode()
self.byte_decoder = {v: k for k, v in self.byte_encoder.items()}
with open(merges_file, encoding="utf-8") as merges_handle:
bpe_merges = merges_handle.read().split("\n")[1:-1]
bpe_merges = [tuple(merge.split()) for merge in bpe_merges]
self.bpe_ranks = dict(zip(bpe_merges, range(len(bpe_merges))))
self.cache = {}
self.add_prefix_space = add_prefix_space
# Should have added re.IGNORECASE so BPE merges can happen for capitalized versions of contractions
self.pat = re.compile(r"""'s|'t|'re|'ve|'m|'ll|'d| ?\p{L}+| ?\p{N}+| ?[^\s\p{L}\p{N}]+|\s+(?!\S)|\s+""")
@property
def vocab_size(self):
return len(self.encoder)
def get_vocab(self):
return dict(self.encoder, **self.added_tokens_encoder)
def bpe(self, token):
if token in self.cache:
return self.cache[token]
word = tuple(token)
pairs = get_pairs(word)
if not pairs:
return token
while True:
bigram = min(pairs, key=lambda pair: self.bpe_ranks.get(pair, float("inf")))
if bigram not in self.bpe_ranks:
break
first, second = bigram
new_word = []
i = 0
while i < len(word):
try:
j = word.index(first, i)
except ValueError:
new_word.extend(word[i:])
break
else:
new_word.extend(word[i:j])
i = j
if word[i] == first and i < len(word) - 1 and word[i + 1] == second:
new_word.append(first + second)
i += 2
else:
new_word.append(word[i])
i += 1
new_word = tuple(new_word)
word = new_word
if len(word) == 1:
break
else:
pairs = get_pairs(word)
word = " ".join(word)
self.cache[token] = word
return word
def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
if self.add_bos_token:
bos_token_ids = [self.bos_token_id]
else:
bos_token_ids = []
output = bos_token_ids + token_ids_0
if token_ids_1 is None:
return output
return output + bos_token_ids + token_ids_1
def _tokenize(self, text):
"""Tokenize a string."""
bpe_tokens = []
for token in re.findall(self.pat, text):
token = "".join(
self.byte_encoder[b] for b in token.encode("utf-8")
) # Maps all our bytes to unicode strings, avoiding control tokens of the BPE (spaces in our case)
bpe_tokens.extend(bpe_token for bpe_token in self.bpe(token).split(" "))
return bpe_tokens
def _convert_token_to_id(self, token):
"""Converts a token (str) in an id using the vocab."""
return self.encoder.get(token, self.encoder.get(self.unk_token))
def _convert_id_to_token(self, index):
"""Converts an index (integer) in a token (str) using the vocab."""
return self.decoder.get(index)
def convert_tokens_to_string(self, tokens):
"""Converts a sequence of tokens (string) in a single string."""
text = "".join(tokens)
text = bytearray([self.byte_decoder[c] for c in text]).decode("utf-8", errors=self.errors)
return text
def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]:
if not os.path.isdir(save_directory):
logger.error(f"Vocabulary path ({save_directory}) should be a directory")
return
vocab_file = os.path.join(
save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"]
)
merge_file = os.path.join(
save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["merges_file"]
)
with open(vocab_file, "w", encoding="utf-8") as f:
f.write(json.dumps(self.encoder, indent=2, sort_keys=True, ensure_ascii=False) + "\n")
index = 0
with open(merge_file, "w", encoding="utf-8") as writer:
writer.write("#version: 0.2\n")
for bpe_tokens, token_index in sorted(self.bpe_ranks.items(), key=lambda kv: kv[1]):
if index != token_index:
logger.warning(
f"Saving vocabulary to {merge_file}: BPE merge indices are not consecutive."
" Please check that the tokenizer is not corrupted!"
)
index = token_index
writer.write(" ".join(bpe_tokens) + "\n")
index += 1
return vocab_file, merge_file
def prepare_for_tokenization(self, text, is_split_into_words=False, **kwargs):
add_prefix_space = kwargs.pop("add_prefix_space", self.add_prefix_space)
if is_split_into_words or add_prefix_space:
text = " " + text
return (text, kwargs)
def decode(
self,
token_ids: Union[int, List[int], "np.ndarray", "torch.Tensor", "tf.Tensor"],
skip_special_tokens: bool = False,
clean_up_tokenization_spaces: bool = True,
truncate_before_pattern: Optional[List[str]] = None,
**kwargs,
) -> str:
"""
Converts a sequence of ids in a string, using the tokenizer and vocabulary with options to remove special
tokens and clean up tokenization spaces.
Similar to doing `self.convert_tokens_to_string(self.convert_ids_to_tokens(token_ids))`.
Args:
token_ids (`Union[int, List[int], np.ndarray, torch.Tensor, tf.Tensor]`):
List of tokenized input ids. Can be obtained using the `__call__` method.
skip_special_tokens (`bool`, *optional*, defaults to `False`):
Whether or not to remove special tokens in the decoding.
clean_up_tokenization_spaces (`bool`, *optional*, defaults to `True`):
Whether or not to clean up the tokenization spaces.
truncate_before_pattern (`List[str]`, *optional*, defaults to `None`):
A list of regular expression strings that will be used to truncate the returned string. This can be
used to remove extra pieces of code (e.g. truncate if observing a comment symbol "#" at the beginning
of a new line). An example pattern could be `["^#", re.escape("<|endoftext|>"), "^'''", "\n\n\n"]`.
kwargs (additional keyword arguments, *optional*):
Will be passed to the underlying model specific decode method.
Returns:
`str`: The decoded sentence.
"""
decoded_text = super()._decode(
token_ids=token_ids,
skip_special_tokens=skip_special_tokens,
clean_up_tokenization_spaces=clean_up_tokenization_spaces,
**kwargs,
)
if truncate_before_pattern is not None and len(truncate_before_pattern) > 0:
decoded_text = self.truncate(decoded_text, truncate_before_pattern)
return decoded_text
def truncate(self, completion, truncate_before_pattern):
def find_re(string, pattern, start_pos):
m = pattern.search(string, start_pos)
return m.start() if m else -1
terminals = [re.compile(pattern, re.MULTILINE) for pattern in truncate_before_pattern]
prints = list(re.finditer("^print", completion, re.MULTILINE))
if len(prints) > 1:
completion = completion[: prints[1].start()]
defs = list(re.finditer("^def", completion, re.MULTILINE))
if len(defs) > 1:
completion = completion[: defs[1].start()]
start_pos = 0
terminals_pos = [
pos for pos in [find_re(completion, terminal, start_pos) for terminal in terminals] if pos != -1
]
if len(terminals_pos) > 0:
return completion[: min(terminals_pos)]
else:
return completion
|
2881099/dotnetGen_postgresql | 1,815 | Common/Model/ForeignKeyInfo.cs | using System;
using System.Text;
using System.Collections.Generic;
namespace Model {
[Serializable]
public class ForeignKeyInfo {
private TableInfo _table;
private List<ColumnInfo> _columns = new List<ColumnInfo>();
private TableInfo _referencedTable;
private List<ColumnInfo> _referencedColumns = new List<ColumnInfo>();
private string _referencedDBName;
private string _referencedTableName;
private List<string> _referencedColumnNames = new List<string>();
private bool _referencedIsPrimaryKey;
public ForeignKeyInfo(TableInfo table, TableInfo referencedTable) {
_table = table;
_referencedTable = referencedTable;
}
public ForeignKeyInfo(string referencedSln, string referencedTableName, bool referencedIsPK) {
_referencedDBName = referencedSln;
_referencedTableName = referencedTableName;
_referencedIsPrimaryKey = referencedIsPK;
}
public TableInfo Table {
get { return _table; }
set { _table = value; }
}
public List<ColumnInfo> Columns {
get { return _columns; }
set { _columns = value; }
}
public TableInfo ReferencedTable {
get { return _referencedTable; }
set { _referencedTable = value; }
}
public List<ColumnInfo> ReferencedColumns {
get { return _referencedColumns; }
set { _referencedColumns = value; }
}
public string ReferencedDBName {
get { return _referencedDBName; }
set { _referencedDBName = value; }
}
public string ReferencedTableName {
get { return _referencedTableName; }
set { _referencedTableName = value; }
}
public List<string> ReferencedColumnNames {
get { return _referencedColumnNames; }
set { _referencedColumnNames = value; }
}
public bool ReferencedIsPrimaryKey {
get { return _referencedIsPrimaryKey; }
set { _referencedIsPrimaryKey = value; }
}
}
}
|
2881099/dotnetGen_postgresql | 2,050 | Common/Model/ColumnInfo.cs | using System;
using System.Collections.Generic;
using System.Text;
namespace Model {
[Serializable]
public class ColumnInfo {
private string _name;
private NpgsqlDbType _type;
private long _length;
private string _sqlType;
private string _csType;
private DataSort _orderby;
private bool _isNullable;
private bool _isIdentity;
private bool _isClustered;
private bool _isPrimaryKey;
private int _attndims;
private int _attnum;
public ColumnInfo() { }
public ColumnInfo(string name, NpgsqlDbType type, long length, string sqlType, string csType, DataSort orderby, bool isNullable, bool isIdentity, bool isClustered, bool isPrimaryKey, int attndims, int attnum) {
_name = name;
_type = type;
_length = length;
_sqlType = sqlType;
_csType = csType;
_orderby = orderby;
_isNullable = isNullable;
_isIdentity = isIdentity;
_isClustered = isClustered;
_isPrimaryKey = isPrimaryKey;
_attndims = attndims;
_attnum = attnum;
}
public string Name {
get { return _name; }
set { _name = value; }
}
public NpgsqlDbType Type {
get { return _type; }
set { _type = value; }
}
public long Length {
get { return _length; }
set { _length = value; }
}
public string SqlType {
get { return _sqlType; }
set { _sqlType = value; }
}
public string CsType {
get { return _csType; }
set { _csType = value; }
}
public DataSort Orderby {
get { return _orderby; }
set { _orderby = value; }
}
public bool IsNullable {
get { return _isNullable; }
set { _isNullable = value; }
}
public bool IsIdentity {
get { return _isIdentity; }
set { _isIdentity = value; }
}
public bool IsClustered {
get { return _isClustered; }
set { _isClustered = value; }
}
public bool IsPrimaryKey {
get { return _isPrimaryKey; }
set { _isPrimaryKey = value; }
}
public int Attndims {
get { return _attndims; }
set { _attndims = value; }
}
public int Attnum {
get { return _attnum; }
set { _attnum = value; }
}
}
}
|
2881099/dotnetGen_postgresql | 2,676 | Common/Model/TableInfo.cs | using System;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections.Generic;
namespace Model {
[Serializable]
public class TableInfo {
private string _id;
private string _owner;
private string _name;
private List<ColumnInfo> _columns = new List<ColumnInfo>();
private List<List<ColumnInfo>> _uniques = new List<List<ColumnInfo>>();
private List<List<ColumnInfo>> _indexes = new List<List<ColumnInfo>>();
private List<ForeignKeyInfo> _foreignKeys = new List<ForeignKeyInfo>();
private List<ColumnInfo> _identitys = new List<ColumnInfo>();
private List<ColumnInfo> _clustereds = new List<ColumnInfo>();
private List<ColumnInfo> _primaryKeys = new List<ColumnInfo>();
private string _Type;
private bool _IsOutput;
public TableInfo(string id, string owner, string name, string type) {
_id = id;
_owner = owner;
_name = name;
_Type = type;
}
public static string GetClassName(string name) {
int rr = 0;
string n = name.StartsWith("public.") ? name.Substring(7) : Regex.Replace(name, @"\.", delegate(Match m) {
if (rr++ > 0) return m.Groups[0].Value;
return "_";
});
return char.IsLetter(n, 0) ? n : string.Concat("_", n);
}
public static string GetEntryName(string name) {
int idx = name.IndexOf('.');
return idx == -1 ? name : name.Substring(idx + 1);
}
public string Id {
get { return _id; }
}
public string Owner {
get { return _owner; }
}
public string Name {
get { return _name; }
}
public string ClassName {
get {
return GetClassName(_owner.ToLower() + "." + _name);
}
}
public string FullName {
get { return string.IsNullOrEmpty(_owner) ? _name : string.Format("{0}.{1}", _owner, _name); }
}
public string Type {
get { return _Type; }
}
public List<ColumnInfo> Columns {
get { return _columns; }
}
public List<List<ColumnInfo>> Uniques {
get {
if (_uniques == null) {
}
return _uniques;
}
}
public List<List<ColumnInfo>> Indexes {
get {
if (_indexes == null) {
}
return _indexes;
}
}
public List<ForeignKeyInfo> ForeignKeys {
get {
if (_foreignKeys == null) {
}
return _foreignKeys;
}
}
public List<ColumnInfo> PrimaryKeys {
get {
if (_primaryKeys == null) {
}
return _primaryKeys;
}
}
public List<ColumnInfo> Clustereds {
get {
if (_clustereds == null) {
}
return _clustereds;
}
}
public List<ColumnInfo> Identitys {
get {
if (_identitys == null) {
}
return _identitys;
}
}
public bool IsOutput {
get { return _IsOutput; }
set { _IsOutput = value; }
}
}
}
|
2881099/dotnetGen_sqlserver | 1,814 | ServerWinService/Settings.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace ServerWinService {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "15.7.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string cs_head {
get {
return ((string)(this["cs_head"]));
}
set {
this["cs_head"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("29918")]
public int socket_port {
get {
return ((int)(this["socket_port"]));
}
set {
this["socket_port"] = value;
}
}
}
}
|
2881099/dotnetGen_sqlserver | 1,248 | ServerWinService/app.config | <?xml version="1.0" encoding="utf-8" ?>
<configuration>
<configSections>
<sectionGroup name="userSettings" type="System.Configuration.UserSettingsGroup, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" >
<section name="ServerWinService.Settings" type="System.Configuration.ClientSettingsSection, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" allowExeDefinition="MachineToLocalUser" requirePermission="false" />
<section name="ServerWinForm.Settings" type="System.Configuration.ClientSettingsSection, System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" allowExeDefinition="MachineToLocalUser" requirePermission="false" />
</sectionGroup>
</configSections>
<userSettings>
<ServerWinService.Settings>
<setting name="cs_head" serializeAs="String">
<value />
</setting>
<setting name="socket_port" serializeAs="String">
<value>29918</value>
</setting>
</ServerWinService.Settings>
<ServerWinForm.Settings>
<setting name="cs_head" serializeAs="String">
<value />
</setting>
<setting name="socket_port" serializeAs="String">
<value>28888</value>
</setting>
</ServerWinForm.Settings>
</userSettings>
</configuration> |
27182812/ChatGLM-LLaMA-chinese-insturct | 31,446 | src/transformers/models/codegen/modeling_codegen.py | # coding=utf-8
# Copyright 2022 Salesforce authors, The EleutherAI, and HuggingFace Teams. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch CodeGen model."""
from typing import Optional, Tuple, Union
import torch
import torch.utils.checkpoint
from torch import nn
from torch.nn import CrossEntropyLoss
from ...activations import ACT2FN
from ...modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast
from ...modeling_utils import PreTrainedModel
from ...utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
from .configuration_codegen import CodeGenConfig
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "Salesforce/codegen-2B-mono"
_CONFIG_FOR_DOC = "CodeGenConfig"
CODEGEN_PRETRAINED_MODEL_ARCHIVE_LIST = [
"Salesforce/codegen-350M-nl",
"Salesforce/codegen-350M-multi",
"Salesforce/codegen-350M-mono",
"Salesforce/codegen-2B-nl",
"Salesforce/codegen-2B-multi",
"Salesforce/codegen-2B-mono",
"Salesforce/codegen-6B-nl",
"Salesforce/codegen-6B-multi",
"Salesforce/codegen-6B-mono",
"Salesforce/codegen-16B-nl",
"Salesforce/codegen-16B-multi",
"Salesforce/codegen-16B-mono",
# See all CodeGen models at https://huggingface.co/models?filter=codegen
]
# Copied from transformers.models.gptj.modeling_gptj.fixed_pos_embedding
def fixed_pos_embedding(x, seq_dim=1, seq_len=None):
dim = x.shape[-1]
if seq_len is None:
seq_len = x.shape[seq_dim]
inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2) / dim))
sinusoid_inp = (
torch.einsum("i , j -> i j", torch.arange(seq_len, dtype=torch.float), inv_freq).to(x.device).float()
)
return torch.sin(sinusoid_inp), torch.cos(sinusoid_inp)
# Copied from transformers.models.gptj.modeling_gptj.rotate_every_two
def rotate_every_two(x):
x1 = x[:, :, :, ::2]
x2 = x[:, :, :, 1::2]
x = torch.stack((-x2, x1), dim=-1)
return x.flatten(-2) # in einsum notation: rearrange(x, '... d j -> ... (d j)')
# Copied from transformers.models.gptj.modeling_gptj.duplicate_interleave
def duplicate_interleave(m):
"""
A simple version of `torch.repeat_interleave` for duplicating a matrix while interleaving the copy.
"""
dim0 = m.shape[0]
m = m.view(-1, 1) # flatten the matrix
m = m.repeat(1, 2) # repeat all elements into the 2nd dimension
m = m.view(dim0, -1) # reshape into a matrix, interleaving the copy
return m
# Copied from transformers.models.gptj.modeling_gptj.apply_rotary_pos_emb
def apply_rotary_pos_emb(x, sincos, offset=0):
sin, cos = (duplicate_interleave(t)[None, offset : x.shape[1] + offset, None, :] for t in sincos)
# einsum notation for lambda t: repeat(t[offset:x.shape[1]+offset,:], "n d -> () n () (d j)", j=2)
return (x * cos) + (rotate_every_two(x) * sin)
class CodeGenAttention(nn.Module):
def __init__(self, config):
super().__init__()
max_positions = config.max_position_embeddings
self.register_buffer(
"causal_mask",
torch.tril(torch.ones((max_positions, max_positions), dtype=torch.bool)).view(
1, 1, max_positions, max_positions
),
)
self.attn_dropout = nn.Dropout(config.attn_pdrop)
self.resid_dropout = nn.Dropout(config.resid_pdrop)
self.embed_dim = config.hidden_size
self.num_attention_heads = config.num_attention_heads
self.head_dim = self.embed_dim // self.num_attention_heads
if self.head_dim * self.num_attention_heads != self.embed_dim:
raise ValueError(
f"embed_dim must be divisible by num_attention_heads (got `embed_dim`: {self.embed_dim} and"
f" `num_attention_heads`: {self.num_attention_heads})."
)
self.scale_attn = torch.sqrt(torch.tensor(self.head_dim, dtype=torch.float32)).to(torch.get_default_dtype())
self.qkv_proj = nn.Linear(self.embed_dim, self.embed_dim * 3, bias=False)
self.out_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=False)
self.rotary_dim = None
if config.rotary_dim is not None:
self.rotary_dim = config.rotary_dim
def _split_heads(self, x, n_head, dim_head, mp_num):
reshaped = x.reshape(x.shape[:-1] + (n_head // mp_num, dim_head))
reshaped = reshaped.reshape(x.shape[:-2] + (-1,) + reshaped.shape[-1:])
return reshaped
def _merge_heads(self, tensor, num_attention_heads, attn_head_size):
"""
Merges attn_head_size dim and num_attn_heads dim into n_ctx
"""
if len(tensor.shape) == 5:
tensor = tensor.permute(0, 1, 3, 2, 4).contiguous()
elif len(tensor.shape) == 4:
tensor = tensor.permute(0, 2, 1, 3).contiguous()
else:
raise ValueError(f"Input tensor rank should be one of [4, 5], but is: {len(tensor.shape)}")
new_shape = tensor.size()[:-2] + (num_attention_heads * attn_head_size,)
return tensor.view(new_shape)
def _attn(
self,
query,
key,
value,
attention_mask=None,
head_mask=None,
):
# compute causal mask from causal mask buffer
query_length, key_length = query.size(-2), key.size(-2)
causal_mask = self.causal_mask[:, :, key_length - query_length : key_length, :key_length]
# Keep the attention weights computation in fp32 to avoid overflow issues
query = query.to(torch.float32)
key = key.to(torch.float32)
attn_weights = torch.matmul(query, key.transpose(-1, -2))
attn_weights = attn_weights / self.scale_attn
mask_value = torch.finfo(attn_weights.dtype).min
# Need to be a tensor, otherwise we get error: `RuntimeError: expected scalar type float but found double`.
# Need to be on the same device, otherwise `RuntimeError: ..., x and y to be on the same device`
mask_value = torch.tensor(mask_value, dtype=attn_weights.dtype).to(attn_weights.device)
attn_weights = torch.where(causal_mask, attn_weights, mask_value)
if attention_mask is not None:
# Apply the attention mask
attn_weights = attn_weights + attention_mask
attn_weights = nn.Softmax(dim=-1)(attn_weights)
attn_weights = attn_weights.to(value.dtype)
attn_weights = self.attn_dropout(attn_weights)
# Mask heads if we want to
if head_mask is not None:
attn_weights = attn_weights * head_mask
attn_output = torch.matmul(attn_weights, value)
return attn_output, attn_weights
def forward(
self,
hidden_states: Optional[torch.FloatTensor],
attention_mask: Optional[torch.FloatTensor] = None,
layer_past: Optional[Tuple[torch.Tensor]] = None,
head_mask: Optional[torch.FloatTensor] = None,
use_cache: Optional[bool] = False,
output_attentions: Optional[bool] = False,
) -> Union[
Tuple[torch.Tensor, Tuple[torch.Tensor]],
Optional[Tuple[torch.Tensor, Tuple[torch.Tensor], Tuple[torch.Tensor, ...]]],
]:
qkv = self.qkv_proj(hidden_states)
# TODO(enijkamp): factor out number of logical TPU-v4 cores or make forward pass agnostic
mp_num = 4
qkv_split = qkv.reshape(qkv.shape[:-1] + (mp_num, -1))
local_dim = self.head_dim * self.num_attention_heads // mp_num
query, value, key = torch.split(qkv_split, local_dim, dim=-1)
query = self._split_heads(query, self.num_attention_heads, self.head_dim, mp_num=mp_num)
key = self._split_heads(key, self.num_attention_heads, self.head_dim, mp_num=mp_num)
value = self._split_heads(value, self.num_attention_heads, self.head_dim, mp_num=mp_num)
value = value.permute(0, 2, 1, 3)
seq_len = key.shape[1]
offset = 0
if layer_past is not None:
offset = layer_past[0].shape[-2]
seq_len += offset
if self.rotary_dim is not None:
k_rot = key[:, :, :, : self.rotary_dim]
k_pass = key[:, :, :, self.rotary_dim :]
q_rot = query[:, :, :, : self.rotary_dim]
q_pass = query[:, :, :, self.rotary_dim :]
sincos = fixed_pos_embedding(k_rot, 1, seq_len=seq_len)
k_rot = apply_rotary_pos_emb(k_rot, sincos, offset=offset)
q_rot = apply_rotary_pos_emb(q_rot, sincos, offset=offset)
key = torch.cat([k_rot, k_pass], dim=-1)
query = torch.cat([q_rot, q_pass], dim=-1)
else:
sincos = fixed_pos_embedding(key, 1, seq_len=seq_len)
key = apply_rotary_pos_emb(key, sincos, offset=offset)
query = apply_rotary_pos_emb(query, sincos, offset=offset)
key = key.permute(0, 2, 1, 3)
query = query.permute(0, 2, 1, 3)
if layer_past is not None:
past_key = layer_past[0]
past_value = layer_past[1]
key = torch.cat((past_key, key), dim=-2)
value = torch.cat((past_value, value), dim=-2)
if use_cache is True:
present = (key, value)
else:
present = None
# compute self-attention: V x Softmax(QK^T)
attn_output, attn_weights = self._attn(query, key, value, attention_mask, head_mask)
attn_output = self._merge_heads(attn_output, self.num_attention_heads, self.head_dim)
attn_output = self.out_proj(attn_output)
attn_output = self.resid_dropout(attn_output)
outputs = (attn_output, present)
if output_attentions:
outputs += (attn_weights,)
return outputs # a, present, (attentions)
# Copied from transformers.models.gptj.modeling_gptj.GPTJMLP with GPTJ->CodeGen
class CodeGenMLP(nn.Module):
def __init__(self, intermediate_size, config): # in MLP: intermediate_size= 4 * embed_dim
super().__init__()
embed_dim = config.n_embd
self.fc_in = nn.Linear(embed_dim, intermediate_size)
self.fc_out = nn.Linear(intermediate_size, embed_dim)
self.act = ACT2FN[config.activation_function]
self.dropout = nn.Dropout(config.resid_pdrop)
def forward(self, hidden_states: Optional[torch.FloatTensor]) -> torch.FloatTensor:
hidden_states = self.fc_in(hidden_states)
hidden_states = self.act(hidden_states)
hidden_states = self.fc_out(hidden_states)
hidden_states = self.dropout(hidden_states)
return hidden_states
# Copied from transformers.models.gptj.modeling_gptj.GPTJBlock with GPTJ->CodeGen
class CodeGenBlock(nn.Module):
def __init__(self, config):
super().__init__()
inner_dim = config.n_inner if config.n_inner is not None else 4 * config.n_embd
self.ln_1 = nn.LayerNorm(config.n_embd, eps=config.layer_norm_epsilon)
self.attn = CodeGenAttention(config)
self.mlp = CodeGenMLP(inner_dim, config)
def forward(
self,
hidden_states: Optional[torch.FloatTensor],
layer_past: Optional[Tuple[torch.Tensor]] = None,
attention_mask: Optional[torch.FloatTensor] = None,
head_mask: Optional[torch.FloatTensor] = None,
use_cache: Optional[bool] = False,
output_attentions: Optional[bool] = False,
) -> Union[Tuple[torch.Tensor], Optional[Tuple[torch.Tensor, Tuple[torch.FloatTensor, ...]]]]:
residual = hidden_states
hidden_states = self.ln_1(hidden_states)
attn_outputs = self.attn(
hidden_states,
layer_past=layer_past,
attention_mask=attention_mask,
head_mask=head_mask,
use_cache=use_cache,
output_attentions=output_attentions,
)
attn_output = attn_outputs[0] # output_attn: a, present, (attentions)
outputs = attn_outputs[1:]
feed_forward_hidden_states = self.mlp(hidden_states)
hidden_states = attn_output + feed_forward_hidden_states + residual
if use_cache:
outputs = (hidden_states,) + outputs
else:
outputs = (hidden_states,) + outputs[1:]
return outputs # hidden_states, present, (attentions)
class CodeGenPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = CodeGenConfig
base_model_prefix = "transformer"
supports_gradient_checkpointing = True
_no_split_modules = ["CodeGenBlock"]
def __init__(self, *inputs, **kwargs):
super().__init__(*inputs, **kwargs)
def _init_weights(self, module):
"""Initialize the weights."""
if isinstance(module, (nn.Linear,)):
# Slightly different from Mesh Transformer JAX which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
def _set_gradient_checkpointing(self, module, value=False):
if isinstance(module, CodeGenModel):
module.gradient_checkpointing = value
CODEGEN_START_DOCSTRING = r"""
This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) sub-class. Use
it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and
behavior.
Parameters:
config ([`CodeGenConfig`]): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights.
"""
CODEGEN_INPUTS_DOCSTRING = r"""
Args:
input_ids (`torch.LongTensor` of shape `({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using [`AutoProcenizer`]. See [`PreTrainedTokenizer.encode`] and
[`PreTrainedTokenizer.__call__`] for details.
[What are input IDs?](../glossary#input-ids)
attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*):
Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
1]`:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
[What are token type IDs?](../glossary#token-type-ids)
position_ids (`torch.LongTensor` of shape `({0})`, *optional*):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
config.n_positions - 1]`.
[What are position IDs?](../glossary#position-ids)
head_mask (`torch.FloatTensor` of shape `(num_attention_heads,)` or `(n_layer, num_attention_heads)`, *optional*):
Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_dim)`, *optional*):
Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
is useful if you want more control over how to convert *input_ids* indices into associated vectors than the
model's internal embedding lookup matrix.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
"""
@add_start_docstrings(
"The bare CodeGen Model transformer outputting raw hidden-states without any specific head on top.",
CODEGEN_START_DOCSTRING,
)
class CodeGenModel(CodeGenPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.embed_dim = config.n_embd
self.vocab_size = config.vocab_size
self.wte = nn.Embedding(config.vocab_size, self.embed_dim)
self.drop = nn.Dropout(config.embd_pdrop)
self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)])
self.ln_f = nn.LayerNorm(self.embed_dim, eps=config.layer_norm_epsilon)
self.rotary_dim = min(config.rotary_dim, config.n_ctx // config.num_attention_heads)
self.gradient_checkpointing = False
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.wte
def set_input_embeddings(self, new_embeddings):
self.wte = new_embeddings
@add_start_docstrings_to_model_forward(CODEGEN_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=BaseModelOutputWithPast,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.LongTensor] = None,
past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
attention_mask: Optional[torch.FloatTensor] = None,
token_type_ids: Optional[torch.LongTensor] = None,
position_ids: Optional[torch.LongTensor] = None,
head_mask: Optional[torch.FloatTensor] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, BaseModelOutputWithPast]:
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
batch_size = input_ids.shape[0]
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
batch_size = inputs_embeds.shape[0]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
device = input_ids.device if input_ids is not None else inputs_embeds.device
if token_type_ids is not None:
token_type_ids = token_type_ids.view(-1, input_shape[-1])
if position_ids is not None:
position_ids = position_ids.view(-1, input_shape[-1])
if past_key_values is None:
past_length = 0
past_key_values = tuple([None] * len(self.h))
else:
past_length = past_key_values[0][0].size(-2)
if position_ids is None:
position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device)
position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1])
# Attention mask.
if attention_mask is not None:
if batch_size <= 0:
raise ValueError("batch_size has to be defined and > 0")
attention_mask = attention_mask.view(batch_size, -1)
# We create a 3D attention mask from a 2D tensor mask.
# Sizes are [batch_size, 1, 1, to_seq_length]
# So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
# this attention mask is more simple than the triangular masking of causal attention
# used in OpenAI GPT, we just need to prepare the broadcast dimension here.
attention_mask = attention_mask[:, None, None, :]
# Since attention_mask is 1.0 for positions we want to attend and 0.0 for
# masked positions, this operation will create a tensor which is 0.0 for
# positions we want to attend and the dtype's smallest value for masked positions.
# Since we are adding it to the raw scores before the softmax, this is
# effectively the same as removing these entirely.
attention_mask = attention_mask.to(dtype=self.dtype) # fp16 compatibility
attention_mask = (1.0 - attention_mask) * torch.finfo(self.dtype).min
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x num_attention_heads x N x N
# head_mask has shape n_layer x batch x num_attention_heads x N x N
head_mask = self.get_head_mask(head_mask, self.config.n_layer)
if inputs_embeds is None:
inputs_embeds = self.wte(input_ids)
hidden_states = inputs_embeds
if token_type_ids is not None:
token_type_embeds = self.wte(token_type_ids)
hidden_states = hidden_states + token_type_embeds
hidden_states = self.drop(hidden_states)
output_shape = input_shape + (hidden_states.size(-1),)
presents = () if use_cache else None
all_self_attentions = () if output_attentions else None
all_hidden_states = () if output_hidden_states else None
for i, (block, layer_past) in enumerate(zip(self.h, past_key_values)):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
# None for past_key_value
return module(*inputs, use_cache, output_attentions)
return custom_forward
outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(block),
hidden_states,
None,
attention_mask,
head_mask[i],
)
else:
outputs = block(
hidden_states,
layer_past=layer_past,
attention_mask=attention_mask,
head_mask=head_mask[i],
use_cache=use_cache,
output_attentions=output_attentions,
)
hidden_states = outputs[0]
if use_cache is True:
presents = presents + (outputs[1],)
if output_attentions:
all_self_attentions = all_self_attentions + (outputs[2 if use_cache else 1],)
hidden_states = self.ln_f(hidden_states)
hidden_states = hidden_states.view(output_shape)
# Add last hidden state
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(v for v in [hidden_states, presents, all_hidden_states, all_self_attentions] if v is not None)
return BaseModelOutputWithPast(
last_hidden_state=hidden_states,
past_key_values=presents,
hidden_states=all_hidden_states,
attentions=all_self_attentions,
)
@add_start_docstrings(
"""
The CodeGen Model transformer with a language modeling head on top.
""",
CODEGEN_START_DOCSTRING,
)
class CodeGenForCausalLM(CodeGenPreTrainedModel):
_keys_to_ignore_on_load_missing = [r"h\.\d+\.attn\.causal_mask"]
def __init__(self, config):
super().__init__(config)
self.transformer = CodeGenModel(config)
self.lm_head = nn.Linear(config.n_embd, config.vocab_size)
# Initialize weights and apply final processing
self.post_init()
def get_output_embeddings(self):
return self.lm_head
def set_output_embeddings(self, new_embeddings):
self.lm_head = new_embeddings
def prepare_inputs_for_generation(self, input_ids, past_key_values=None, **kwargs):
token_type_ids = kwargs.get("token_type_ids", None)
# only last token for inputs_ids if past is defined in kwargs
if past_key_values:
input_ids = input_ids[:, -1].unsqueeze(-1)
if token_type_ids is not None:
token_type_ids = token_type_ids[:, -1].unsqueeze(-1)
attention_mask = kwargs.get("attention_mask", None)
position_ids = kwargs.get("position_ids", None)
if attention_mask is not None and position_ids is None:
# create position_ids on the fly for batch generation
position_ids = attention_mask.long().cumsum(-1) - 1
position_ids.masked_fill_(attention_mask == 0, 1)
if past_key_values:
position_ids = position_ids[:, -1].unsqueeze(-1)
else:
position_ids = None
return {
"input_ids": input_ids,
"past_key_values": past_key_values,
"use_cache": kwargs.get("use_cache"),
"position_ids": position_ids,
"attention_mask": attention_mask,
"token_type_ids": token_type_ids,
}
@add_start_docstrings_to_model_forward(CODEGEN_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=CausalLMOutputWithPast,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.LongTensor] = None,
past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
attention_mask: Optional[torch.FloatTensor] = None,
token_type_ids: Optional[torch.LongTensor] = None,
position_ids: Optional[torch.LongTensor] = None,
head_mask: Optional[torch.FloatTensor] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
labels: Optional[torch.LongTensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, CausalLMOutputWithPast]:
r"""
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
`labels = input_ids` Indices are selected in `[-100, 0, ..., config.vocab_size]` All labels set to `-100`
are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size]`
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
transformer_outputs = self.transformer(
input_ids,
past_key_values=past_key_values,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = transformer_outputs[0]
# make sure sampling in fp16 works correctly and
# compute loss in fp32 to match with mesh-tf version
# https://github.com/EleutherAI/gpt-neo/blob/89ce74164da2fb16179106f54e2269b5da8db333/models/gpt2/gpt2.py#L179
lm_logits = self.lm_head(hidden_states).to(torch.float32)
loss = None
if labels is not None:
# Shift so that tokens < n predict n
shift_logits = lm_logits[..., :-1, :].contiguous()
shift_labels = labels[..., 1:].contiguous()
# Flatten the tokens
loss_fct = CrossEntropyLoss()
loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1))
loss = loss.to(hidden_states.dtype)
if not return_dict:
output = (lm_logits,) + transformer_outputs[1:]
return ((loss,) + output) if loss is not None else output
return CausalLMOutputWithPast(
loss=loss,
logits=lm_logits,
past_key_values=transformer_outputs.past_key_values,
hidden_states=transformer_outputs.hidden_states,
attentions=transformer_outputs.attentions,
)
@staticmethod
def _reorder_cache(
past_key_values: Tuple[Tuple[torch.Tensor]], beam_idx: torch.Tensor
) -> Tuple[Tuple[torch.Tensor]]:
"""
This function is used to re-order the `past_key_values` cache if [`~PretrainedModel.beam_search`] or
[`~PretrainedModel.beam_sample`] is called. This is required to match `past_key_values` with the correct
beam_idx at every generation step.
"""
return tuple(
tuple(past_state.index_select(0, beam_idx.to(past_state.device)) for past_state in layer_past)
for layer_past in past_key_values
)
|
2881099/dotnetGen_postgresql | 898 | Common/Properties/AssemblyInfo.cs | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过下列属性集
// 控制。更改这些属性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("Common")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Common")]
[assembly: AssemblyCopyright("版权所有 (C) 2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 属性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("b3e3991f-30e6-4edf-ad0b-8a24b747de76")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 内部版本号
// 修订号
//
// 可以指定所有这些值,也可以使用“修订号”和“内部版本号”的默认值,
// 方法是按如下所示使用“*”:
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
2881099/dotnetGen_postgresql | 1,815 | GenPg/Model/ForeignKeyInfo.cs | using System;
using System.Text;
using System.Collections.Generic;
namespace Model {
[Serializable]
public class ForeignKeyInfo {
private TableInfo _table;
private List<ColumnInfo> _columns = new List<ColumnInfo>();
private TableInfo _referencedTable;
private List<ColumnInfo> _referencedColumns = new List<ColumnInfo>();
private string _referencedDBName;
private string _referencedTableName;
private List<string> _referencedColumnNames = new List<string>();
private bool _referencedIsPrimaryKey;
public ForeignKeyInfo(TableInfo table, TableInfo referencedTable) {
_table = table;
_referencedTable = referencedTable;
}
public ForeignKeyInfo(string referencedSln, string referencedTableName, bool referencedIsPK) {
_referencedDBName = referencedSln;
_referencedTableName = referencedTableName;
_referencedIsPrimaryKey = referencedIsPK;
}
public TableInfo Table {
get { return _table; }
set { _table = value; }
}
public List<ColumnInfo> Columns {
get { return _columns; }
set { _columns = value; }
}
public TableInfo ReferencedTable {
get { return _referencedTable; }
set { _referencedTable = value; }
}
public List<ColumnInfo> ReferencedColumns {
get { return _referencedColumns; }
set { _referencedColumns = value; }
}
public string ReferencedDBName {
get { return _referencedDBName; }
set { _referencedDBName = value; }
}
public string ReferencedTableName {
get { return _referencedTableName; }
set { _referencedTableName = value; }
}
public List<string> ReferencedColumnNames {
get { return _referencedColumnNames; }
set { _referencedColumnNames = value; }
}
public bool ReferencedIsPrimaryKey {
get { return _referencedIsPrimaryKey; }
set { _referencedIsPrimaryKey = value; }
}
}
}
|
2881099/dotnetGen_postgresql | 2,050 | GenPg/Model/ColumnInfo.cs | using System;
using System.Collections.Generic;
using System.Text;
namespace Model {
[Serializable]
public class ColumnInfo {
private string _name;
private NpgsqlDbType _type;
private long _length;
private string _sqlType;
private string _csType;
private DataSort _orderby;
private bool _isNullable;
private bool _isIdentity;
private bool _isClustered;
private bool _isPrimaryKey;
private int _attndims;
private int _attnum;
public ColumnInfo() { }
public ColumnInfo(string name, NpgsqlDbType type, long length, string sqlType, string csType, DataSort orderby, bool isNullable, bool isIdentity, bool isClustered, bool isPrimaryKey, int attndims, int attnum) {
_name = name;
_type = type;
_length = length;
_sqlType = sqlType;
_csType = csType;
_orderby = orderby;
_isNullable = isNullable;
_isIdentity = isIdentity;
_isClustered = isClustered;
_isPrimaryKey = isPrimaryKey;
_attndims = attndims;
_attnum = attnum;
}
public string Name {
get { return _name; }
set { _name = value; }
}
public NpgsqlDbType Type {
get { return _type; }
set { _type = value; }
}
public long Length {
get { return _length; }
set { _length = value; }
}
public string SqlType {
get { return _sqlType; }
set { _sqlType = value; }
}
public string CsType {
get { return _csType; }
set { _csType = value; }
}
public DataSort Orderby {
get { return _orderby; }
set { _orderby = value; }
}
public bool IsNullable {
get { return _isNullable; }
set { _isNullable = value; }
}
public bool IsIdentity {
get { return _isIdentity; }
set { _isIdentity = value; }
}
public bool IsClustered {
get { return _isClustered; }
set { _isClustered = value; }
}
public bool IsPrimaryKey {
get { return _isPrimaryKey; }
set { _isPrimaryKey = value; }
}
public int Attndims {
get { return _attndims; }
set { _attndims = value; }
}
public int Attnum {
get { return _attnum; }
set { _attnum = value; }
}
}
}
|
2881099/dotnetGen_postgresql | 2,676 | GenPg/Model/TableInfo.cs | using System;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections.Generic;
namespace Model {
[Serializable]
public class TableInfo {
private string _id;
private string _owner;
private string _name;
private List<ColumnInfo> _columns = new List<ColumnInfo>();
private List<List<ColumnInfo>> _uniques = new List<List<ColumnInfo>>();
private List<List<ColumnInfo>> _indexes = new List<List<ColumnInfo>>();
private List<ForeignKeyInfo> _foreignKeys = new List<ForeignKeyInfo>();
private List<ColumnInfo> _identitys = new List<ColumnInfo>();
private List<ColumnInfo> _clustereds = new List<ColumnInfo>();
private List<ColumnInfo> _primaryKeys = new List<ColumnInfo>();
private string _Type;
private bool _IsOutput;
public TableInfo(string id, string owner, string name, string type) {
_id = id;
_owner = owner;
_name = name;
_Type = type;
}
public static string GetClassName(string name) {
int rr = 0;
string n = name.StartsWith("public.") ? name.Substring(7) : Regex.Replace(name, @"\.", delegate(Match m) {
if (rr++ > 0) return m.Groups[0].Value;
return "_";
});
return char.IsLetter(n, 0) ? n : string.Concat("_", n);
}
public static string GetEntryName(string name) {
int idx = name.IndexOf('.');
return idx == -1 ? name : name.Substring(idx + 1);
}
public string Id {
get { return _id; }
}
public string Owner {
get { return _owner; }
}
public string Name {
get { return _name; }
}
public string ClassName {
get {
return GetClassName(_owner.ToLower() + "." + _name);
}
}
public string FullName {
get { return string.IsNullOrEmpty(_owner) ? _name : string.Format("{0}.{1}", _owner, _name); }
}
public string Type {
get { return _Type; }
}
public List<ColumnInfo> Columns {
get { return _columns; }
}
public List<List<ColumnInfo>> Uniques {
get {
if (_uniques == null) {
}
return _uniques;
}
}
public List<List<ColumnInfo>> Indexes {
get {
if (_indexes == null) {
}
return _indexes;
}
}
public List<ForeignKeyInfo> ForeignKeys {
get {
if (_foreignKeys == null) {
}
return _foreignKeys;
}
}
public List<ColumnInfo> PrimaryKeys {
get {
if (_primaryKeys == null) {
}
return _primaryKeys;
}
}
public List<ColumnInfo> Clustereds {
get {
if (_clustereds == null) {
}
return _clustereds;
}
}
public List<ColumnInfo> Identitys {
get {
if (_identitys == null) {
}
return _identitys;
}
}
public bool IsOutput {
get { return _IsOutput; }
set { _IsOutput = value; }
}
}
}
|
Subsets and Splits
PyTorch Neural Network Imports
This query filters for code examples containing a specific PyTorch import pattern, which is useful for finding code snippets that use PyTorch's neural network module but doesn't provide deeper analytical insights about the dataset.
HTML Files in Train Set
Retrieves all records from the dataset where the file path ends with .html or .htm, providing a basic filter for HTML files.
SQL Console for nick007x/github-code-2025
Retrieves 200 file paths that end with '.html' or '.htm', providing a basic overview of HTML files in the dataset.
Top HTML Files
The query retrieves a sample of HTML file paths, providing basic filtering but limited analytical value.
CSharp Repositories Excluding Unity
Retrieves all records for repositories that contain C# files but are not related to Unity, providing a basic filter of the dataset.
C# File Count per Repository
Counts the total number of C# files across distinct repositories, providing a basic measure of C# file presence.
SQL Console for nick007x/github-code-2025
Lists unique repository IDs containing C# files, providing basic filtering to understand which repositories have C# code.
Select Groovy Files: Train Set
Retrieves the first 1000 entries from the 'train' dataset where the file path ends with '.groovy', providing a basic sample of Groovy files.
GitHub Repos with WiFiClientSecure
Finds specific file paths in repositories that contain particular code snippets related to WiFiClientSecure and ChatGPT, providing basic filtering of relevant files.