refactor: ♻️ 抽取公共导出方法 (#152)

[skip ci]

Co-authored-by: tk <fiyne1a@dingtalk.com>
This commit is contained in:
nsnail 2024-07-04 09:29:18 +08:00 committed by GitHub
parent 8b01112f42
commit 67eaa5b783
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 77 additions and 343 deletions

View File

@ -1,9 +1,9 @@
{
"version": "1.4.0",
"devDependencies": {
"cz-git": "^1.9.2",
"cz-git": "^1.9.3",
"commitizen": "^4.3.0",
"prettier": "^3.3.0",
"prettier": "^3.3.2",
"standard-version": "^9.5.0"
},
"config": {
@ -11,4 +11,4 @@
"path": "node_modules/cz-git"
}
}
}
}

View File

@ -1,4 +1,8 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Domain;
using NetAdmin.Domain.Dto.Dependency;
namespace NetAdmin.Application.Services;
@ -73,6 +77,43 @@ public abstract class RepositoryService<TEntity, TPrimary, TLogger>(BasicReposit
}
#endif
/// <summary>
/// 导出实体
/// </summary>
protected async Task<IActionResult> ExportAsync<TQuery, TExport>( //
Func<QueryReq<TQuery>, ISelect<TEntity>> selector, QueryReq<TQuery> query, string fileName)
where TQuery : DataAbstraction, new()
{
var data = await selector(query)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<TExport>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<TExport>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{fileName}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
}
private IUpdate<TEntity> BuildUpdate( //
TEntity entity //
, IEnumerable<string> includeFields //

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Dto.Dependency;
@ -59,36 +57,10 @@ public sealed class ApiService(
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryApiReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryApiReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportApiRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportApiRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.接口导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryApiReq, ExportApiRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Dto.Dependency;
@ -76,36 +74,10 @@ public sealed class ConfigService(BasicRepository<Sys_Config, long> rpo) //
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryConfigReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryConfigReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportConfigRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportConfigRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.配置导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryConfigReq, ExportConfigRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Dto.Dependency;
@ -94,36 +92,10 @@ public sealed class DeptService(BasicRepository<Sys_Dept, long> rpo) //
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryDeptReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryDeptReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportDeptRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportDeptRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.部门导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryDeptReq, ExportDeptRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />
@ -160,7 +132,12 @@ public sealed class DeptService(BasicRepository<Sys_Dept, long> rpo) //
return UpdateAsync(req, [nameof(req.Enabled)]);
}
private ISelect<Sys_Dept> QueryInternal(QueryReq<QueryDeptReq> req, bool asTreeCte = false)
private ISelect<Sys_Dept> QueryInternal(QueryReq<QueryDeptReq> req)
{
return QueryInternal(req, false);
}
private ISelect<Sys_Dept> QueryInternal(QueryReq<QueryDeptReq> req, bool asTreeCte)
{
var ret = Rpo.Select.WhereDynamicFilter(req.DynamicFilter)
.WhereDynamic(req.Filter)

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Dto.Dependency;
@ -96,36 +94,10 @@ public sealed class DicContentService(BasicRepository<Sys_DicContent, long> rpo)
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryDicContentReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryDicContentReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportDicContentRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportDicContentRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.字典内容导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryDicContentReq, ExportDicContentRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Dto.Dependency;
@ -65,36 +63,10 @@ public sealed class JobRecordService(BasicRepository<Sys_JobRecord, long> rpo) /
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryJobRecordReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryJobRecordReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportJobRecordRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportJobRecordRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.计划作业执行记录导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryJobRecordReq, ExportJobRecordRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -1,7 +1,5 @@
using Cronos;
using CsvHelper;
using FreeSql.Internal;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Dto.Dependency;
@ -140,36 +138,10 @@ public sealed class JobService(BasicRepository<Sys_Job, long> rpo, IJobRecordSer
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryJobReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryJobReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportJobRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportJobRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.计划作业导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryJobReq, ExportJobRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Dto.Dependency;
@ -67,36 +65,10 @@ public sealed class RequestLogService(BasicRepository<Sys_RequestLog, long> rpo)
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryRequestLogReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryRequestLogReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportRequestLogRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportRequestLogRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.请求日志导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryRequestLogReq, ExportRequestLogRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Dto.Dependency;
@ -88,36 +86,10 @@ public sealed class RoleService(BasicRepository<Sys_Role, long> rpo) //
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryRoleReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryRoleReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportRoleRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportRoleRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.角色导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryRoleReq, ExportRoleRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Contexts;
@ -112,36 +110,10 @@ public sealed class SiteMsgService(
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QuerySiteMsgReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QuerySiteMsgReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportSiteMsgRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportSiteMsgRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.站内信导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QuerySiteMsgReq, ExportSiteMsgRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.Contexts;
@ -160,40 +158,10 @@ public sealed class UserService(
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryUserReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryUserReq> req)
{
req.ThrowIfInvalid();
#pragma warning disable VSTHRD103, S6966
// ReSharper disable once MethodHasAsyncOverload
var data = await QueryInternal(req)
#pragma warning restore S6966, VSTHRD103
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<ExportUserRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<ExportUserRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.用户导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryUserReq, ExportUserRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -1,5 +1,3 @@
using CsvHelper;
using Microsoft.Net.Http.Headers;
using NetAdmin.Application.Repositories;
using NetAdmin.Application.Services;
using NetAdmin.Domain.DbMaps.Tpl;
@ -65,36 +63,10 @@ public sealed class ExampleService(BasicRepository<Tpl_Example, long> rpo) //
}
/// <inheritdoc />
public async Task<IActionResult> ExportAsync(QueryReq<QueryExampleReq> req)
public Task<IActionResult> ExportAsync(QueryReq<QueryExampleReq> req)
{
req.ThrowIfInvalid();
var data = await QueryInternal(req)
#if DBTYPE_SQLSERVER
.WithLock(SqlServerLock.NoLock | SqlServerLock.NoWait)
#endif
.Take(Numbers.MAX_LIMIT_EXPORT)
.ToListAsync()
.ConfigureAwait(false);
var list = data.Adapt<List<QueryExampleRsp>>();
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
csv.WriteHeader<QueryExampleRsp>();
await csv.NextRecordAsync().ConfigureAwait(false);
foreach (var item in list) {
csv.WriteRecord(item);
await csv.NextRecordAsync().ConfigureAwait(false);
}
await csv.FlushAsync().ConfigureAwait(false);
_ = stream.Seek(0, SeekOrigin.Begin);
App.HttpContext.Response.Headers.ContentDisposition
= new ContentDispositionHeaderValue(Chars.FLG_HTTP_HEADER_VALUE_ATTACHMENT) {
FileNameStar = $"{Ln.示例导出}_{DateTime.Now:yyyy.MM.dd-HH.mm.ss}.csv"
}.ToString();
return new FileStreamResult(stream, Chars.FLG_HTTP_HEADER_VALUE_APPLICATION_OCTET_STREAM);
return ExportAsync<QueryExampleReq, QueryExampleRsp>(QueryInternal, req, Ln.);
}
/// <inheritdoc />

View File

@ -10,14 +10,14 @@
},
"dependencies": {
"@element-plus/icons-vue": "^2.3.1",
"ace-builds": "^1.35.0",
"aieditor": "^1.0.10",
"ace-builds": "^1.35.2",
"aieditor": "^1.0.12",
"axios": "^1.7.2",
"clipboard": "^2.0.11",
"core-js": "^3.37.1",
"cropperjs": "^1.6.2",
"crypto-js": "^4.2.0",
"echarts": "^5.5.0",
"echarts": "^5.5.1",
"element-plus": "^2.7.6",
"json-bigint": "^1.0.0",
"json5-to-table": "^0.1.8",
@ -28,7 +28,7 @@
"qrcodejs2": "^0.0.2",
"sortablejs": "^1.15.2",
"vkbeautify": "^0.99.3",
"vue": "^3.4.30",
"vue": "^3.4.31",
"vue-i18n": "^9.13.1",
"vue-router": "^4.4.0",
"vue3-ace-editor": "^2.2.4",
@ -42,7 +42,7 @@
"prettier-plugin-organize-attributes": "^1.0.0",
"sass": "^1.77.6",
"terser": "^5.31.1",
"vite": "^5.3.1"
"vite": "^5.3.3"
},
"browserslist": [
"> 1%",