自定义条件完成

pull/1/head
潘建东 8 months ago
parent 7d4acd61c0
commit de831eba9d

@ -1,4 +1,5 @@
using System;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
@ -32,7 +33,7 @@ namespace ZKLT.Hadoop.Interface
/// <param name="table">数据表</param>
/// <param name="row">数据</param>
/// <returns>是否成功</returns>
public bool Insert(HDP_Source source, HDP_Table table, Dictionary<string, object> row);
public bool Insert(HDP_Source source, HDP_Table table, JContainer? row);
/// <summary>
/// 更新
@ -42,7 +43,7 @@ namespace ZKLT.Hadoop.Interface
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <returns>是否成功</returns>
public bool Update(HDP_Source source, HDP_Table table, Dictionary<string, string> where, Dictionary<string, object> row);
public bool Update(HDP_Source source, HDP_Table table, JContainer? where, JContainer? row);
/// <summary>
/// 删除
@ -52,7 +53,7 @@ namespace ZKLT.Hadoop.Interface
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <returns>是否成功</returns>
public bool Delete(HDP_Source source, HDP_Table table, Dictionary<string, string> where, Dictionary<string, object> row);
public bool Delete(HDP_Source source, HDP_Table table, JContainer? where, JContainer? row);
/// <summary>
/// 查询单个
@ -62,7 +63,7 @@ namespace ZKLT.Hadoop.Interface
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <returns>结果</returns>
public T? QuerySingle<T>(HDP_Source source, HDP_Table table, Dictionary<string, string> where, Dictionary<string, object> row, string[]? col);
public T? QuerySingle<T>(HDP_Source source, HDP_Table table, JContainer? where, JContainer? row, string[]? col);
/// <summary>
/// 查询列表
@ -73,8 +74,8 @@ namespace ZKLT.Hadoop.Interface
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <returns>结果集</returns>
public T[] Query<T>(HDP_Source source, HDP_Table table, Dictionary<string, string>? where, Dictionary<string, object>? row,
Dictionary<string, object>? order, string[]? col);
public T[] Query<T>(HDP_Source source, HDP_Table table, JContainer? where, JContainer? row,
JContainer? order, string[]? col);
/// <summary>
/// 查询列表
@ -85,7 +86,7 @@ namespace ZKLT.Hadoop.Interface
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <returns>结果集</returns>
public HDP_Page<T> QueryPage<T>(HDP_Source source, HDP_Table table, int pageIndex, int pageSize, Dictionary<string, string>? where, Dictionary<string, object>? row, Dictionary<string, object>? order, string[]? col);
public HDP_Page<T> QueryPage<T>(HDP_Source source, HDP_Table table, int pageIndex, int pageSize, JContainer? where, JContainer? row, JContainer? order, string[]? col);
/// <summary>

@ -1,4 +1,5 @@
using System;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
@ -23,11 +24,11 @@ namespace ZKLT.Hadoop.Model
private string[]? _Col;
private Dictionary<string, string>? _Where;
private JContainer? _Where;
private Dictionary<string, object>? _Data;
private JContainer? _Data;
private Dictionary<string, object>? _Order;
private JContainer? _Order;
/// <summary>
/// 源
@ -42,12 +43,12 @@ namespace ZKLT.Hadoop.Model
/// <summary>
/// 条件
/// </summary>
public Dictionary<string, string>? Where { get => _Where; set => _Where = value; }
public JContainer? Where { get => _Where; set => _Where = value; }
/// <summary>
/// 数据
/// </summary>
public Dictionary<string, object>? Data { get => _Data; set => _Data = value; }
public JContainer? Data { get => _Data; set => _Data = value; }
/// <summary>
/// 分页下标
@ -62,7 +63,7 @@ namespace ZKLT.Hadoop.Model
/// <summary>
/// 排序
/// </summary>
public Dictionary<string, object>? Order { get => _Order; set => _Order = value; }
public JContainer? Order { get => _Order; set => _Order = value; }
/// <summary>
/// 命令类型

@ -1,4 +1,5 @@
using System;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
@ -27,7 +28,7 @@ namespace ZKLT.Hadoop.Model
/// <param name="action">函数</param>
/// <param name="param">参数</param>
/// <returns>命令</returns>
public static object Convert(string action, Dictionary<string, object> param)
public static object Convert(string action, JContainer param)
{
if (action == DATENOW)
{

@ -1,4 +1,5 @@
using System;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
@ -136,6 +137,29 @@ namespace ZKLT.Hadoop.Model
}
}
return _result;
}
/// <summary>
/// 类转JObject
/// </summary>
/// <param name="data">数据</param>
/// <returns></returns>
public static JObject Class2JObject(object data) {
var _result = new JObject();
Type _type = data.GetType();
var _properties = _type.GetProperties();
foreach (var _property in _properties)
{
if (_property.GetValue(data) != null)
{
_result.Add(new JProperty(_property.Name, _property.GetValue(data)!));
}
}
return _result;
}
}

@ -10,4 +10,8 @@
<None Remove="HDP_Table.cs~RF4db01d6c.TMP" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
</ItemGroup>
</Project>

@ -1,6 +1,7 @@
using Microsoft.AspNetCore.Builder;
using Microsoft.Extensions.DependencyInjection;
using MySqlX.XDevAPI.Relational;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Linq;
@ -140,10 +141,11 @@ namespace ZKLT.Hadoop
{
return _Source;
}
var _result = _TableService.QuerySingle<HDP_Source>(_Source, GetTable("HDP_Source")!, new Dictionary<string, string>
var _result = _TableService.QuerySingle<HDP_Source>(_Source, GetTable("HDP_Source")!, new JObject
{
{ "Id","=" }
}, new Dictionary<string, object> {
}, new JObject {
{ "Id",sourceid}
}, null);
return _result;
@ -186,7 +188,7 @@ namespace ZKLT.Hadoop
throw new ArgumentException("编号已存在");
}
return _TableService.Insert(_Source, GetTable("HDP_Source")!, HDP_Table.Class2Dictionary(source));
return _TableService.Insert(_Source, GetTable("HDP_Source")!, HDP_Table.Class2JObject(source));
}
/// <summary>
@ -226,9 +228,9 @@ namespace ZKLT.Hadoop
throw new ArgumentException("编号不存在");
}
return _TableService.Update(_Source, GetTable("HDP_Source")!, new Dictionary<string, string> {
return _TableService.Update(_Source, GetTable("HDP_Source")!, new JObject {
{"Id","=" }
}, HDP_Table.Class2Dictionary(source));
}, HDP_Table.Class2JObject(source));
}
/// <summary>
@ -248,9 +250,9 @@ namespace ZKLT.Hadoop
throw new ArgumentException("编号不存在");
}
return _TableService.Delete(_Source, GetTable("HDP_Source")!, new Dictionary<string, string> {
return _TableService.Delete(_Source, GetTable("HDP_Source")!, new JObject{
{"Id","=" }
}, new Dictionary<string, object> {
}, new JObject{
{"Id",sourceid }
});
}
@ -282,18 +284,18 @@ namespace ZKLT.Hadoop
return _Tables.First(x => x.Id == tableid);
}
var _result = _TableService.QuerySingle<HDP_Table>(_Source, GetTable("HDP_Table")!, new Dictionary<string, string>
var _result = _TableService.QuerySingle<HDP_Table>(_Source, GetTable("HDP_Table")!, new JObject
{
{ "Id","=" }
}, new Dictionary<string, object> {
}, new JObject{
{ "Id",tableid}
}, null);
if (_result != null)
{
_result.Columns = _TableService.Query<HDP_Column>(_Source, GetTable("HDP_Column")!, new Dictionary<string, string> {
_result.Columns = _TableService.Query<HDP_Column>(_Source, GetTable("HDP_Column")!, new JObject {
{ "TableId","="}
}, new Dictionary<string, object> {
}, new JObject{
{"TableId",_result.Id! }
}, null, null);
}
@ -320,7 +322,7 @@ namespace ZKLT.Hadoop
{
using (TransactionScope _scope = new TransactionScope())
{
if (!_TableService.Insert(_Source, GetTable("HDP_Table")!, HDP_Table.Class2Dictionary(table)))
if (!_TableService.Insert(_Source, GetTable("HDP_Table")!, HDP_Table.Class2JObject(table)))
{
return false;
}
@ -328,7 +330,7 @@ namespace ZKLT.Hadoop
{
var _column = table.Columns![i];
_column.TableId = table.Id;
if (!_TableService.Insert(_Source, GetTable("HDP_Column")!, HDP_Table.Class2Dictionary(_column)))
if (!_TableService.Insert(_Source, GetTable("HDP_Column")!, HDP_Table.Class2JObject(_column)))
{
return false;
}
@ -362,9 +364,9 @@ namespace ZKLT.Hadoop
{
using (TransactionScope _scope = new TransactionScope())
{
if (!_TableService.Update(_Source, GetTable("HDP_Table")!, new Dictionary<string, string> {
if (!_TableService.Update(_Source, GetTable("HDP_Table")!, new JObject{
{ "Id","="}
}, HDP_Table.Class2Dictionary(table)))
}, HDP_Table.Class2JObject(table)))
{
return false;
}
@ -373,21 +375,21 @@ namespace ZKLT.Hadoop
{
var _column = table.Columns![i];
_column.TableId = table.Id;
if (_TableService.QuerySingle<HDP_Column>(_Source, GetTable("HDP_Column")!, new Dictionary<string, string>
if (_TableService.QuerySingle<HDP_Column>(_Source, GetTable("HDP_Column")!, new JObject
{
{"Id","=" }
}, HDP_Table.Class2Dictionary(_column), null) == null)
}, HDP_Table.Class2JObject(_column), null) == null)
{
if (!_TableService.Insert(_Source, GetTable("HDP_Column")!, HDP_Table.Class2Dictionary(_column)))
if (!_TableService.Insert(_Source, GetTable("HDP_Column")!, HDP_Table.Class2JObject(_column)))
{
return false;
}
}
else
{
if (!_TableService.Update(_Source, GetTable("HDP_Column")!, new Dictionary<string, string> {
if (!_TableService.Update(_Source, GetTable("HDP_Column")!, new JObject {
{"Id","=" }
}, HDP_Table.Class2Dictionary(_column)))
}, HDP_Table.Class2JObject(_column)))
{
return false;
}
@ -423,17 +425,17 @@ namespace ZKLT.Hadoop
using (TransactionScope _scope = new TransactionScope())
{
if (!_TableService.Delete(_Source, GetTable("HDP_Table")!, new Dictionary<string, string> {
if (!_TableService.Delete(_Source, GetTable("HDP_Table")!, new JObject{
{"Id","=" }
}, new Dictionary<string, object> {
}, new JObject {
{"Id",tableId }
}))
{
return false;
}
if (!_TableService.Delete(_Source, GetTable("HDP_Column")!, new Dictionary<string, string> {
if (!_TableService.Delete(_Source, GetTable("HDP_Column")!, new JObject{
{ "TableId","="}
}, new Dictionary<string, object> {
}, new JObject {
{"TableId",tableId }
}))
{

@ -12,6 +12,8 @@ using MySqlX.XDevAPI.Relational;
using Mysqlx.Crud;
using Newtonsoft.Json;
using Mysqlx.Resultset;
using Newtonsoft.Json.Linq;
using System.Transactions;
namespace ZKLT.Hadoop
{
@ -22,60 +24,70 @@ namespace ZKLT.Hadoop
/// </summary>
/// <param name="table">表</param>
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <param name="command">命令</param>
/// <param name="data">数据</param>
/// <param name="param">参数</param>
private void MergeWhere(HDP_Table table, Dictionary<string, string>? where, Dictionary<string, object>? row, StringBuilder command, Dictionary<string, object> param)
private string MergeWhere(HDP_Table table, JContainer? where, JContainer? data, Dictionary<string, object> param)
{
string _guid = "";
//执行条件
StringBuilder _wherestr = new StringBuilder();
_wherestr.Append("WHERE 1 = 1");
if (where != null && where.Count > 0)
string _guid = "";
if (where != null && data != null)
{
for (var i = 0; i < table.Columns!.Length; i++)
_wherestr.AppendLine("WHERE");
var _wheres = new List<JObject>();
var _datas = new List<JObject>();
if (where.Type == JTokenType.Object)
{
var _column = table.Columns[i];
if (where.ContainsKey(_column.Key!))
_wheres.Add((JObject)where);
_datas.Add((JObject)data);
}
else if (where.Type == JTokenType.Array)
{
switch (where[_column.Key!])
for(var i = 0;i < where.Count;i++)
{
case HDP_WhereType.LIKE:
_guid = Guid.NewGuid().ToString("N");
_wherestr.Append($@" AND `{_column.Key!}` {where[_column.Key!]} CONCAT('%',@{_guid},'%')");
param.Add(_guid, row![_column.Key!]);
break;
case HDP_WhereType.BETWEEN:
if (row![_column.Key!] != null)
_wheres.Add((JObject)where.Children().ToArray()[i]);
_datas.Add((JObject)data.Children().ToArray()[i]);
}
}
for (var j = 0; j < _wheres.Count; j++) {
if (j > 0) {
_wherestr.AppendLine("OR");
}
_wherestr.AppendLine("(");
var _where = _wheres[j];
var _data = _datas[j];
if (_where.Count > 0)
{
var _betweendata = JsonConvert.DeserializeObject<object[]>(JsonConvert.SerializeObject(row![_column.Key!]));
if (_betweendata != null)
_wherestr.AppendLine("1 = 1");
var _fileds = _where.Children().ToArray();
for (var i = 0; i < _fileds.Length; i++)
{
if (_betweendata[0] != null && _betweendata[0].ToString() != "")
var _item = (JProperty)_fileds[i];
if (table.Columns!.Any(x => x.Key == _item.Name) && _data.ContainsKey(_item.Name))
{
if (_item.Value.Type == JTokenType.String)
{
_guid = Guid.NewGuid().ToString("N");
_wherestr.Append($@" AND `{_column.Key!}` >= @{_guid}");
param.Add(_guid, _betweendata[0]);
_wherestr.AppendLine(@$"AND `{_item.Name}` {_item.Value.ToString()} @{_guid}");
param.Add(_guid, ((JValue)_data[_item.Name]!).Value!);
}
if (_betweendata[1] != null && _betweendata[1].ToString() != "")
else if (_item.Value.Type == JTokenType.Array)
{
string[] _itemv = _item.Value.ToObject<string[]>()!;
object[] _colv = _data[_item.Name]!.ToObject<object[]>()!;
for (var k = 0; k < _itemv.Length; k++)
{
_guid = Guid.NewGuid().ToString("N");
_wherestr.Append($@" AND `{_column.Key!}` <= @{_guid}");
param.Add(_guid, _betweendata[1]);
_wherestr.AppendLine(@$"AND `{_item.Name}` {_itemv[k]} @{_guid}");
param.Add(_guid, _colv.Length > k ? _colv[k] : _colv[_colv.Length - 1]);
}
}
}
break;
default:
_guid = Guid.NewGuid().ToString("N");
_wherestr.Append($@" AND `{_column.Key!}` {where[_column.Key!]} @{_guid}");
param.Add(_guid, row![_column.Key!]);
break;
}
}
_wherestr.AppendLine(")");
}
command.AppendLine(_wherestr.ToString());
}
return _wherestr.ToString();
}
/// <summary>
@ -111,21 +123,23 @@ namespace ZKLT.Hadoop
/// <param name="table">表</param>
/// <param name="order">排序</param>
/// <returns></returns>
private string MergeOrder(HDP_Table table, Dictionary<string, object>? order, Dictionary<string, object> param)
private string MergeOrder(HDP_Table table, JContainer? order, Dictionary<string, object> param)
{
string _guid = "";
StringBuilder _orderstr = new StringBuilder();
if (order != null && order.Count > 0)
{
_orderstr.Append("ORDER BY ");
foreach (var key in order.Keys)
var _fields = order.Children().ToArray();
foreach (var field in _fields)
{
if (table.Columns!.Any(x => x.Key == key))
var _field = (JProperty)field;
if (table.Columns!.Any(x => x.Key == _field.Name))
{
var _column = table.Columns!.First(x => x.Key == key);
if (order[key] is string)
var _column = table.Columns!.First(x => x.Key == _field.Name);
if (_field.Value.Type == JTokenType.String)
{
switch (order[key])
switch (_field.Value.ToString())
{
case "DESC":
_orderstr.Append($@"`{_column.Key!}` DESC,");
@ -135,9 +149,9 @@ namespace ZKLT.Hadoop
break;
}
}
else if (order[key] is Newtonsoft.Json.Linq.JArray)
else if (_field.Value.Type == JTokenType.Array)
{
var _orderTemp = JsonConvert.DeserializeObject<object[]>(JsonConvert.SerializeObject(order[key]));
var _orderTemp = _field.Value.ToObject<object[]>();
_orderstr.Append(@$"CASE `{_column.Key!}`");
for (var i = 0; i < _orderTemp!.Length; i++)
{
@ -373,9 +387,9 @@ namespace ZKLT.Hadoop
/// </summary>
/// <param name="source">数据源</param>
/// <param name="table">数据表</param>
/// <param name="row">数据</param>
/// <param name="data">数据</param>
/// <returns>是否成功</returns>
public bool Insert(HDP_Source source, HDP_Table table, Dictionary<string, object> row)
public bool Insert(HDP_Source source, HDP_Table table, JContainer? data)
{
//数据校验
if (string.IsNullOrEmpty(table.Key))
@ -386,10 +400,22 @@ namespace ZKLT.Hadoop
{
throw new ArgumentNullException("列无效");
}
if (row == null || row.Count == 0)
if (data == null || data.Count == 0)
{
throw new ArgumentNullException("数据无效");
}
List<JObject> _data = new List<JObject>();
if (data.Type == JTokenType.Object)
{
_data.Add((JObject)data);
}
else if (data.Type == JTokenType.Array)
{
_data = data.ToObject<List<JObject>>()!;
}
var _result = 0;
using (TransactionScope _scope = new TransactionScope())
{
using (MySqlConnection _connection = new MySqlConnection(source.GetConnectString()))
{
try
@ -400,6 +426,10 @@ namespace ZKLT.Hadoop
{
throw new ArgumentException("数据源连接失败");
}
for (var j = 0; j < _data.Count; j++)
{
var _row = _data[j];
StringBuilder _command = new StringBuilder();
//主键检查
@ -407,7 +437,7 @@ namespace ZKLT.Hadoop
for (var i = 0; i < _primarys.Length; i++)
{
var _primary = _primarys[i];
if (!string.IsNullOrEmpty(_primary.InsertDefault) || (row.ContainsKey(_primary.Key!) && row[_primary.Key!] != null))
if (!string.IsNullOrEmpty(_primary.InsertDefault) || _row.ContainsKey(_primary.Key!))
{
continue;
}
@ -425,17 +455,17 @@ namespace ZKLT.Hadoop
for (var i = 0; i < table.Columns.Length; i++)
{
var _column = table.Columns[i];
if (row.ContainsKey(_column.Key!) && row[_column.Key!] != null)
if (_row.ContainsKey(_column.Key!))
{
_colstr.Append($@"`{_column.Key!}`,");
_parmstr.Append($@"@{_column.Key},");
_params.Add(_column.Key!, row[_column.Key!]);
_params.Add(_column.Key!, ((JValue)_row[_column.Key!]!).Value!);
}
else if (!string.IsNullOrEmpty(_column.InsertDefault))
{
_colstr.Append($@"`{_column.Key!}`,");
_parmstr.Append($@"@{_column.Key},");
_params.Add(_column.Key!, HDP_CommandAction.Convert(_column.InsertDefault, row));
_params.Add(_column.Key!, HDP_CommandAction.Convert(_column.InsertDefault, _row));
}
}
if (_colstr[_colstr.Length - 1] == ',')
@ -450,10 +480,12 @@ namespace ZKLT.Hadoop
_command.AppendLine(") VALUES (");
_command.AppendLine(_parmstr.ToString());
_command.AppendLine(")");
var _result = _connection.Execute(_command.ToString(), _params);
_result += _connection.Execute(_command.ToString(), _params);
}
_connection.Close();
if (_result > 0)
if (_result == _data.Count)
{
_scope.Complete();
return true;
}
else
@ -462,6 +494,7 @@ namespace ZKLT.Hadoop
}
}
}
}
/// <summary>
/// 更新
@ -471,7 +504,7 @@ namespace ZKLT.Hadoop
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <returns>是否成功</returns>
public bool Update(HDP_Source source, HDP_Table table, Dictionary<string, string> where, Dictionary<string, object> row)
public bool Update(HDP_Source source, HDP_Table table, JContainer? where, JContainer? data)
{
//数据校验
if (string.IsNullOrEmpty(table.Key))
@ -482,14 +515,26 @@ namespace ZKLT.Hadoop
{
throw new ArgumentNullException("列无效");
}
if (where == null || where.Count == 0)
if (where == null)
{
throw new ArgumentNullException("条件无效");
}
if (row == null || row.Count == 0)
if (data == null || data.Count == 0)
{
throw new ArgumentNullException("数据无效");
}
List<JObject> _data = new List<JObject>();
if (data.Type == JTokenType.Object)
{
_data.Add((JObject)data);
}
else if (data.Type == JTokenType.Array)
{
_data = data.ToObject<List<JObject>>()!;
}
var _result = 0;
using (TransactionScope _scope = new TransactionScope())
{
using (MySqlConnection _connection = new MySqlConnection(source.GetConnectString()))
{
try
@ -500,7 +545,9 @@ namespace ZKLT.Hadoop
{
throw new ArgumentException("数据源连接失败");
}
for (var j = 0; j < _data.Count; j++)
{
var _row = _data[j];
//更新命令
StringBuilder _command = new StringBuilder();
Dictionary<string, object> _params = new Dictionary<string, object>();
@ -511,15 +558,15 @@ namespace ZKLT.Hadoop
for (var i = 0; i < table.Columns.Length; i++)
{
var _column = table.Columns[i];
if (row.ContainsKey(_column.Key!) && !where.ContainsKey(_column.Key!) && row[_column.Key!] != null)
if (_row.ContainsKey(_column.Key!))
{
_colstr.Append($@"`{_column.Key!}`=@{_column.Key!},");
_params.Add(_column.Key!, row[_column.Key!]);
_params.Add(_column.Key!, ((JValue)_row[_column.Key!]!).Value!);
}
else if (!string.IsNullOrEmpty(_column.UpdateDefault))
{
_colstr.Append($@"`{_column.Key!}`=@{_column.Key!},");
_params.Add(_column.Key!, HDP_CommandAction.Convert(_column.UpdateDefault, row));
_params.Add(_column.Key!, HDP_CommandAction.Convert(_column.UpdateDefault, _row));
}
}
if (_colstr[_colstr.Length - 1] == ',')
@ -529,12 +576,14 @@ namespace ZKLT.Hadoop
_command.AppendLine(_colstr.ToString());
//执行条件
MergeWhere(table, where, row, _command, _params);
_command.AppendLine(MergeWhere(table, where, _row, _params));
var _result = _connection.Execute(_command.ToString(), _params);
_result += _connection.Execute(_command.ToString(), _params);
}
_connection.Close();
if (_result > 0)
if (_result == _data.Count)
{
_scope.Complete();
return true;
}
else
@ -543,6 +592,7 @@ namespace ZKLT.Hadoop
}
}
}
}
/// <summary>
/// 删除
@ -552,7 +602,7 @@ namespace ZKLT.Hadoop
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <returns>是否成功</returns>
public bool Delete(HDP_Source source, HDP_Table table, Dictionary<string, string> where, Dictionary<string, object> row)
public bool Delete(HDP_Source source, HDP_Table table, JContainer? where, JContainer? data)
{
//数据校验
if (string.IsNullOrEmpty(table.Key))
@ -563,14 +613,26 @@ namespace ZKLT.Hadoop
{
throw new ArgumentNullException("列无效");
}
if (where == null || where.Count == 0)
if (where == null)
{
throw new ArgumentNullException("条件无效");
}
if (row == null || row.Count == 0)
if (data == null || data.Count == 0)
{
throw new ArgumentNullException("数据无效");
}
List<JObject> _data = new List<JObject>();
if (data.Type == JTokenType.Object)
{
_data.Add((JObject)data);
}
else if (data.Type == JTokenType.Array)
{
_data = data.ToObject<List<JObject>>()!;
}
var _result = 0;
using (TransactionScope _scope = new TransactionScope())
{
using (MySqlConnection _connection = new MySqlConnection(source.GetConnectString()))
{
try
@ -582,17 +644,22 @@ namespace ZKLT.Hadoop
throw new ArgumentException("数据源连接失败");
}
//更新命令
for (var j = 0; j < _data.Count; j++)
{
var _row = _data[j];
//删除命令
StringBuilder _command = new StringBuilder();
Dictionary<string, object> _params = new Dictionary<string, object>();
_command.AppendLine(@$"DELETE FROM `{table.Key}`");
MergeWhere(table, where, row, _command, _params);
_command.AppendLine(MergeWhere(table, where, _row, _params));
var _result = _connection.Execute(_command.ToString(), _params);
_result += _connection.Execute(_command.ToString(), _params);
}
_connection.Close();
if (_result > 0)
if (_result == _data.Count)
{
_scope.Complete();
return true;
}
else
@ -601,6 +668,7 @@ namespace ZKLT.Hadoop
}
}
}
}
/// <summary>
/// 查询单个
@ -608,9 +676,9 @@ namespace ZKLT.Hadoop
/// <param name="source">数据源</param>
/// <param name="table">数据表</param>
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <param name="data">数据</param>
/// <returns>结果</returns>
public T? QuerySingle<T>(HDP_Source source, HDP_Table table, Dictionary<string, string> where, Dictionary<string, object> row,
public T? QuerySingle<T>(HDP_Source source, HDP_Table table, JContainer? where, JContainer? data,
string[]? col)
{
//数据校验
@ -622,11 +690,11 @@ namespace ZKLT.Hadoop
{
throw new ArgumentNullException("列无效");
}
if (where == null || where.Count == 0)
if (where == null)
{
throw new ArgumentNullException("条件无效");
}
if (row == null || row.Count == 0)
if (data == null || data.Count == 0)
{
throw new ArgumentNullException("数据无效");
}
@ -656,7 +724,7 @@ namespace ZKLT.Hadoop
}
//执行条件
MergeWhere(table, where, row, _command, _params);
_command.AppendLine(MergeWhere(table, where, data, _params));
var _result = _connection.Query<T>(_command.ToString(), _params).ToArray();
_connection.Close();
@ -678,10 +746,10 @@ namespace ZKLT.Hadoop
/// <param name="source">数据源</param>
/// <param name="table">数据表</param>
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <param name="data">数据</param>
/// <returns>结果集</returns>
public T[] Query<T>(HDP_Source source, HDP_Table table, Dictionary<string, string>? where, Dictionary<string, object>? row,
Dictionary<string, object>? order, string[]? col)
public T[] Query<T>(HDP_Source source, HDP_Table table, JContainer? where, JContainer? data,
JContainer? order, string[]? col)
{
//数据校验
if (string.IsNullOrEmpty(table.Key))
@ -718,7 +786,7 @@ namespace ZKLT.Hadoop
}
//执行条件
MergeWhere(table, where, row, _command, _params);
_command.AppendLine(MergeWhere(table, where, data, _params));
//执行排序
_command.AppendLine(MergeOrder(table, order, _params));
@ -738,8 +806,8 @@ namespace ZKLT.Hadoop
/// <param name="where">条件</param>
/// <param name="row">数据</param>
/// <returns>结果集</returns>
public HDP_Page<T> QueryPage<T>(HDP_Source source, HDP_Table table, int pageIndex, int pageSize, Dictionary<string, string>? where,
Dictionary<string, object>? row, Dictionary<string, object>? order, string[]? col)
public HDP_Page<T> QueryPage<T>(HDP_Source source, HDP_Table table, int pageIndex, int pageSize, JContainer? where,
JContainer? data, JContainer? order, string[]? col)
{
//数据校验
if (string.IsNullOrEmpty(table.Key))
@ -776,35 +844,10 @@ namespace ZKLT.Hadoop
}
//执行条件
MergeWhere(table, where, row, _command, _params);
_command.AppendLine(MergeWhere(table, where, data, _params));
//执行排序
StringBuilder _orderstr = new StringBuilder();
_orderstr.Append("ORDER BY ");
if (order != null && order.Count > 0)
{
for (var i = 0; i < table.Columns.Length; i++)
{
var _column = table.Columns[i];
if (order.ContainsKey(_column.Key!))
{
switch (order[_column.Key!])
{
case "DESC":
_orderstr.Append($@"`{_column.Key!}` DESC,");
break;
default:
_orderstr.Append($@"`{_column.Key!}` ASC,");
break;
}
}
}
if (_orderstr[_orderstr.Length - 1] == ',')
{
_orderstr.Remove(_orderstr.Length - 1, 1);
}
_command.AppendLine(_orderstr.ToString());
}
_command.AppendLine(MergeOrder(table, order, _params));
var _result = new HDP_Page<T>();
_result.PageIndex = pageIndex;
_result.PageSize = pageSize;

Loading…
Cancel
Save