init datamate

This commit is contained in:
Dallas98
2025-10-21 23:00:48 +08:00
commit 1c97afed7d
692 changed files with 135442 additions and 0 deletions

View File

@@ -0,0 +1,80 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>datax-all</artifactId>
<groupId>com.alibaba.datax</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>nfsreader</artifactId>
<name>nfsreader</name>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>com.alibaba.datax</groupId>
<artifactId>datax-core</artifactId>
<version>${datax-project-version}</version>
</dependency>
<dependency>
<groupId>com.alibaba.datax</groupId>
<artifactId>datax-common</artifactId>
<version>${datax-project-version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/java</directory>
<includes>
<include>**/*.properties</include>
</includes>
</resource>
</resources>
<plugins>
<!-- compiler plugin -->
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${jdk-version}</source>
<target>${jdk-version}</target>
<encoding>${project-sourceEncoding}</encoding>
</configuration>
</plugin>
<!-- assembly plugin -->
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptors>
<descriptor>src/main/assembly/package.xml</descriptor>
</descriptors>
<finalName>datax</finalName>
</configuration>
<executions>
<execution>
<id>dwzip</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,35 @@
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id></id>
<formats>
<format>dir</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<fileSets>
<fileSet>
<directory>src/main/resources</directory>
<includes>
<include>plugin.json</include>
<include>plugin_job_template.json</include>
</includes>
<outputDirectory>plugin/reader/nfsreader</outputDirectory>
</fileSet>
<fileSet>
<directory>target/</directory>
<includes>
<include>nfsreader-0.0.1-SNAPSHOT.jar</include>
</includes>
<outputDirectory>plugin/reader/nfsreader</outputDirectory>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
<useProjectArtifact>false</useProjectArtifact>
<outputDirectory>plugin/reader/nfsreader/libs</outputDirectory>
<scope>runtime</scope>
</dependencySet>
</dependencySets>
</assembly>

View File

@@ -0,0 +1,121 @@
package com.modelengine.edatamate.plugin.reader.nfsreader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.DirectoryNotEmptyException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
/**
* 一个简单的 Linux NAS 挂载工具类
* 仅适用于 Linux,需具备 sudo 权限或 root。
*/
public final class MountUtil {
private static final Logger LOG = LoggerFactory.getLogger(MountUtil.class);
private MountUtil() {
}
/**
* 挂载远程目录
*
* @param remote 远程地址,如 192.168.1.1:/test
* @param mountPoint 本地挂载点,如 /mnt/nas
* @param type 文件系统类型:nfs、cifs ...
* @param options 额外挂载参数,如 ro,vers=3 或 username=xxx,password=xxx
*/
public static void mount(String remote, String mountPoint, String type, String options) {
try {
Path mp = Paths.get(mountPoint);
if (isMounted(mountPoint)) {
throw new IOException("Already mounted: " + mountPoint);
}
Files.createDirectories(mp);
ProcessBuilder pb = new ProcessBuilder();
if (options == null || options.isEmpty()) {
pb.command("mount", "-t", type, remote, mountPoint);
} else {
pb.command("mount", "-t", type, "-o", options, remote, mountPoint);
}
LOG.info(pb.command().toString());
pb.redirectErrorStream(true);
Process p = pb.start();
StringBuilder output = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()))) {
String line;
while ((line = reader.readLine()) != null) {
output.append(line).append(System.lineSeparator());
}
}
int rc = p.waitFor();
if (rc != 0) {
throw new RuntimeException("Mount failed, exit=" + rc + ", output: " + output);
}
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
/**
* 卸载挂载点
*
* @param mountPoint 挂载点路径
* @throws IOException 卸载失败
* @throws InterruptedException 进程等待中断
*/
public static void umount(String mountPoint) throws IOException, InterruptedException {
if (!isMounted(mountPoint)) {
return;
}
ProcessBuilder pb = new ProcessBuilder("umount", "-l", mountPoint);
pb.redirectErrorStream(true);
Process p = pb.start();
StringBuilder output = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()))) {
String line;
while ((line = reader.readLine()) != null) {
output.append(line).append(System.lineSeparator());
}
}
int rc = p.waitFor();
if (rc != 0) {
throw new RuntimeException("Mount failed, exit=" + rc + ", output: " + output);
}
// 清理空目录
try {
Files.deleteIfExists(Paths.get(mountPoint));
} catch (DirectoryNotEmptyException ignore) {
// 目录非空,保留
}
}
/**
* 判断挂载点是否已挂载
*
* @param mountPoint 挂载点路径
* @return true 表示已挂载
* @throws IOException 读取 /proc/mounts 失败
*/
public static boolean isMounted(String mountPoint) throws IOException {
Path procMounts = Paths.get("/proc/mounts");
if (!Files.exists(procMounts)) {
throw new IOException("/proc/mounts not found");
}
String expected = mountPoint.trim();
List<String> lines = Files.readAllLines(procMounts);
return lines.stream()
.map(l -> l.split("\\s+"))
.filter(a -> a.length >= 2)
.anyMatch(a -> a[1].equals(expected));
}
}

View File

@@ -0,0 +1,112 @@
package com.modelengine.edatamate.plugin.reader.nfsreader;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.alibaba.datax.common.element.Record;
import com.alibaba.datax.common.element.StringColumn;
import com.alibaba.datax.common.plugin.RecordSender;
import com.alibaba.datax.common.spi.Reader;
import com.alibaba.datax.common.util.Configuration;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class NfsReader extends Reader {
private static final Logger LOG = LoggerFactory.getLogger(NfsReader.class);
public static class Job extends Reader.Job {
private Configuration jobConfig = null;
private String mountPoint;
@Override
public void init() {
this.jobConfig = super.getPluginJobConf();
}
@Override
public void prepare() {
this.mountPoint = "/dataset/mount/" + UUID.randomUUID();
this.jobConfig.set("mountPoint", this.mountPoint);
MountUtil.mount(this.jobConfig.getString("ip") + ":" + this.jobConfig.getString("path"),
mountPoint, "nfs", StringUtils.EMPTY);
}
@Override
public List<Configuration> split(int adviceNumber) {
return Collections.singletonList(this.jobConfig);
}
@Override
public void post() {
try {
MountUtil.umount(this.mountPoint);
new File(this.mountPoint).deleteOnExit();
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public void destroy() {
}
}
public static class Task extends Reader.Task {
private Configuration jobConfig;
private String mountPoint;
private Set<String> fileType;
@Override
public void init() {
this.jobConfig = super.getPluginJobConf();
this.mountPoint = this.jobConfig.getString("mountPoint");
this.fileType = new HashSet<>(this.jobConfig.getList("fileType", Collections.emptyList(), String.class));
}
@Override
public void startRead(RecordSender recordSender) {
try (Stream<Path> stream = Files.list(Paths.get(this.mountPoint))) {
List<String> files = stream.filter(Files::isRegularFile)
.filter(file -> fileType.isEmpty() || fileType.contains(getFileSuffix(file)))
.map(path -> path.getFileName().toString())
.collect(Collectors.toList());
files.forEach(filePath -> {
Record record = recordSender.createRecord();
record.addColumn(new StringColumn(filePath));
recordSender.sendToWriter(record);
});
this.jobConfig.set("columnNumber", 1);
} catch (IOException e) {
LOG.error("Error reading files from mount point: {}", this.mountPoint, e);
throw new RuntimeException(e);
}
}
private String getFileSuffix(Path path) {
String fileName = path.getFileName().toString();
int lastDotIndex = fileName.lastIndexOf('.');
if (lastDotIndex == -1 || lastDotIndex == fileName.length() - 1) {
return "";
}
return fileName.substring(lastDotIndex + 1);
}
@Override
public void destroy() {
}
}
}

View File

@@ -0,0 +1,6 @@
{
"name": "nfsreader",
"class": "com.modelengine.edatamate.plugin.reader.nfsreader.NfsReader",
"description": "read from nas file system",
"developer": "modelengine"
}

View File

@@ -0,0 +1,7 @@
{
"name": "nfsreader",
"parameter": {
"ip": "127.0.0.1",
"path": "/test"
}
}

View File

@@ -0,0 +1,77 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>datax-all</artifactId>
<groupId>com.alibaba.datax</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>nfswriter</artifactId>
<name>nfswriter</name>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>com.alibaba.datax</groupId>
<artifactId>datax-core</artifactId>
<version>${datax-project-version}</version>
</dependency>
<dependency>
<groupId>com.alibaba.datax</groupId>
<artifactId>datax-common</artifactId>
<version>${datax-project-version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/java</directory>
<includes>
<include>**/*.properties</include>
</includes>
</resource>
</resources>
<plugins>
<!-- compiler plugin -->
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${jdk-version}</source>
<target>${jdk-version}</target>
<encoding>${project-sourceEncoding}</encoding>
</configuration>
</plugin>
<!-- assembly plugin -->
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptors>
<descriptor>src/main/assembly/package.xml</descriptor>
</descriptors>
<finalName>datax</finalName>
</configuration>
<executions>
<execution>
<id>dwzip</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,35 @@
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id></id>
<formats>
<format>dir</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<fileSets>
<fileSet>
<directory>src/main/resources</directory>
<includes>
<include>plugin.json</include>
<include>plugin_job_template.json</include>
</includes>
<outputDirectory>plugin/writer/nfswriter</outputDirectory>
</fileSet>
<fileSet>
<directory>target/</directory>
<includes>
<include>nfswriter-0.0.1-SNAPSHOT.jar</include>
</includes>
<outputDirectory>plugin/writer/nfswriter</outputDirectory>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
<useProjectArtifact>false</useProjectArtifact>
<outputDirectory>plugin/writer/nfswriter/libs</outputDirectory>
<scope>runtime</scope>
</dependencySet>
</dependencySets>
</assembly>

View File

@@ -0,0 +1,121 @@
package com.modelengine.edatamate.plugin.writer.nfswriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.DirectoryNotEmptyException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
/**
* 一个简单的 Linux NAS 挂载工具类
* 仅适用于 Linux,需具备 sudo 权限或 root。
*/
public final class MountUtil {
private static final Logger LOG = LoggerFactory.getLogger(MountUtil.class);
private MountUtil() {
}
/**
* 挂载远程目录
*
* @param remote 远程地址,如 192.168.1.1:/test
* @param mountPoint 本地挂载点,如 /mnt/nas
* @param type 文件系统类型:nfs、cifs ...
* @param options 额外挂载参数,如 ro,vers=3 或 username=xxx,password=xxx
*/
public static void mount(String remote, String mountPoint, String type, String options) {
try {
Path mp = Paths.get(mountPoint);
if (isMounted(mountPoint)) {
throw new IOException("Already mounted: " + mountPoint);
}
Files.createDirectories(mp);
ProcessBuilder pb = new ProcessBuilder();
if (options == null || options.isEmpty()) {
pb.command("mount", "-t", type, remote, mountPoint);
} else {
pb.command("mount", "-t", type, "-o", options, remote, mountPoint);
}
LOG.info(pb.command().toString());
pb.redirectErrorStream(true);
Process p = pb.start();
StringBuilder output = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()))) {
String line;
while ((line = reader.readLine()) != null) {
output.append(line).append(System.lineSeparator());
}
}
int rc = p.waitFor();
if (rc != 0) {
throw new RuntimeException("Mount failed, exit=" + rc + ", output: " + output);
}
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
/**
* 卸载挂载点
*
* @param mountPoint 挂载点路径
* @throws IOException 卸载失败
* @throws InterruptedException 进程等待中断
*/
public static void umount(String mountPoint) throws IOException, InterruptedException {
if (!isMounted(mountPoint)) {
return;
}
ProcessBuilder pb = new ProcessBuilder("umount", "-l", mountPoint);
pb.redirectErrorStream(true);
Process p = pb.start();
StringBuilder output = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()))) {
String line;
while ((line = reader.readLine()) != null) {
output.append(line).append(System.lineSeparator());
}
}
int rc = p.waitFor();
if (rc != 0) {
throw new RuntimeException("Mount failed, exit=" + rc + ", output: " + output);
}
// 清理空目录
try {
Files.deleteIfExists(Paths.get(mountPoint));
} catch (DirectoryNotEmptyException ignore) {
// 目录非空,保留
}
}
/**
* 判断挂载点是否已挂载
*
* @param mountPoint 挂载点路径
* @return true 表示已挂载
* @throws IOException 读取 /proc/mounts 失败
*/
public static boolean isMounted(String mountPoint) throws IOException {
Path procMounts = Paths.get("/proc/mounts");
if (!Files.exists(procMounts)) {
throw new IOException("/proc/mounts not found");
}
String expected = mountPoint.trim();
List<String> lines = Files.readAllLines(procMounts);
return lines.stream()
.map(l -> l.split("\\s+"))
.filter(a -> a.length >= 2)
.anyMatch(a -> a[1].equals(expected));
}
}

View File

@@ -0,0 +1,100 @@
package com.modelengine.edatamate.plugin.writer.nfswriter;
import com.alibaba.datax.common.element.Record;
import com.alibaba.datax.common.exception.CommonErrorCode;
import com.alibaba.datax.common.exception.DataXException;
import com.alibaba.datax.common.plugin.RecordReceiver;
import com.alibaba.datax.common.spi.Writer;
import com.alibaba.datax.common.util.Configuration;
import org.apache.commons.lang3.StringUtils;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
public class NfsWriter extends Writer {
public static class Job extends Writer.Job {
private Configuration jobConfig;
private String mountPoint;
@Override
public void init() {
this.jobConfig = super.getPluginJobConf();
}
@Override
public void prepare() {
this.mountPoint = "/dataset/mount/" + UUID.randomUUID();
this.jobConfig.set("mountPoint", this.mountPoint);
new File(this.mountPoint).mkdirs();
MountUtil.mount(this.jobConfig.getString("ip") + ":" + this.jobConfig.getString("path"),
mountPoint, "nfs", StringUtils.EMPTY);
String destPath = this.jobConfig.getString("destPath");
new File(destPath).mkdirs();
}
@Override
public List<Configuration> split(int mandatoryNumber) {
return Collections.singletonList(this.jobConfig);
}
@Override
public void post() {
try {
MountUtil.umount(this.mountPoint);
new File(this.mountPoint).deleteOnExit();
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public void destroy() {
}
}
public static class Task extends Writer.Task {
private Configuration jobConfig;
private String mountPoint;
private String destPath;
private List<String> files;
@Override
public void init() {
this.jobConfig = super.getPluginJobConf();
this.destPath = this.jobConfig.getString("destPath");
this.mountPoint = this.jobConfig.getString("mountPoint");
this.files = this.jobConfig.getList("files", Collections.emptyList(), String.class);
}
@Override
public void startWrite(RecordReceiver lineReceiver) {
try {
Record record;
while ((record = lineReceiver.getFromReader()) != null) {
String fileName = record.getColumn(0).asString();
if (StringUtils.isBlank(fileName)) {
continue;
}
if (!files.isEmpty() && !files.contains(fileName)) {
continue;
}
String filePath = this.mountPoint + "/" + fileName;
ShellUtil.runCommand("rsync", Arrays.asList("--no-links", "--chmod=750", "--", filePath,
this.destPath + "/" + fileName));
}
} catch (Exception e) {
throw DataXException.asDataXException(CommonErrorCode.RUNTIME_ERROR, e);
}
}
@Override
public void destroy() {
}
}
}

View File

@@ -0,0 +1,43 @@
package com.modelengine.edatamate.plugin.writer.nfswriter;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
public class ShellUtil {
/**
* 执行 rsync 命令
*
* @param cmd 命令
* @param extraArgs 额外参数,可为空
* @return 命令完整输出(stdout + stderr)
* @throws Exception 如果 rsync 返回非 0 或发生 IO 异常
*/
public static String runCommand(String cmd, List<String> extraArgs) throws Exception {
List<String> commands = new ArrayList<>();
commands.add(cmd);
if (extraArgs != null && !extraArgs.isEmpty()) {
commands.addAll(extraArgs);
}
ProcessBuilder pb = new ProcessBuilder(commands);
pb.redirectErrorStream(true); // 合并 stdout & stderr
Process p = pb.start();
StringBuilder sb = new StringBuilder();
try (BufferedReader br = new BufferedReader(
new InputStreamReader(p.getInputStream()))) {
String line;
while ((line = br.readLine()) != null) {
sb.append(line).append(System.lineSeparator());
}
}
int exit = p.waitFor();
if (exit != 0) {
throw new RuntimeException("rsync exited with code " + exit + System.lineSeparator() + sb);
}
return sb.toString();
}
}

View File

@@ -0,0 +1,6 @@
{
"name": "nfswriter",
"class": "com.modelengine.edatamate.plugin.writer.nfswriter.NfsWriter",
"description": "write to local",
"developer": "modelengine"
}

View File

@@ -0,0 +1,8 @@
{
"name": "nfswriter",
"parameter": {
"ip": "127.0.0.1",
"path": "/test",
"destPath": ""
}
}

585
runtime/datax/package.xml Normal file
View File

@@ -0,0 +1,585 @@
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id></id>
<formats>
<format>tar.gz</format>
<format>dir</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<fileSets>
<fileSet>
<directory>transformer/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
<fileSet>
<directory>core/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
<!-- reader -->
<fileSet>
<directory>mysqlreader/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
<!-- <fileSet>-->
<!-- <directory>oceanbasev10reader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>obhbasereader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>drdsreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>oraclereader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>sqlserverreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<fileSet>
<directory>postgresqlreader/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
<!-- <fileSet>-->
<!-- <directory>kingbaseesreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>rdbmsreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>odpsreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>otsreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>otsstreamreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>txtfilereader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>ossreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>mongodbreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>tdenginereader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>streamreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>ftpreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>clickhousereader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hdfsreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hbase11xreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hbase094xreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>opentsdbreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>cassandrareader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>gdbreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hbase11xsqlreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hbase20xsqlreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>tsdbreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>datahubreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>loghubreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>starrocksreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>dorisreader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>sybasereader/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<fileSet>
<directory>gaussdbreader/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
<fileSet>
<directory>nfsreader/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
<!-- writer -->
<fileSet>
<directory>mysqlwriter/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
<!-- <fileSet>-->
<!-- <directory>tdenginewriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>starrockswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>drdswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>odpswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>doriswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>txtfilewriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>ftpwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>osswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>adswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>streamwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>otswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>mongodbwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>oraclewriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>sqlserverwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<fileSet>
<directory>postgresqlwriter/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
<!-- <fileSet>-->
<!-- <directory>kingbaseeswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>rdbmswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>ocswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hdfswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hbase11xwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hbase094xwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hbase11xsqlwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>elasticsearchwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hbase20xsqlwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>tsdbwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>adbpgwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>cassandrawriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>clickhousewriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>databendwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>oscarwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>oceanbasev10writer/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>obhbasewriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>gdbwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>kuduwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>hologresjdbcwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>datahubwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>loghubwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>selectdbwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>neo4jwriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<!-- <fileSet>-->
<!-- <directory>sybasewriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<fileSet>
<directory>gaussdbwriter/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
<!-- <fileSet>-->
<!-- <directory>milvuswriter/target/datax/</directory>-->
<!-- <includes>-->
<!-- <include>**/*.*</include>-->
<!-- </includes>-->
<!-- <outputDirectory>datax</outputDirectory>-->
<!-- </fileSet>-->
<fileSet>
<directory>nfswriter/target/datax/</directory>
<includes>
<include>**/*.*</include>
</includes>
<outputDirectory>datax</outputDirectory>
</fileSet>
</fileSets>
</assembly>

308
runtime/datax/pom.xml Normal file
View File

@@ -0,0 +1,308 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.alibaba.datax</groupId>
<artifactId>datax-all</artifactId>
<version>0.0.1-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
<version>1.3</version>
</dependency>
</dependencies>
<name>datax-all</name>
<packaging>pom</packaging>
<properties>
<jdk-version>1.8</jdk-version>
<datax-project-version>0.0.1-SNAPSHOT</datax-project-version>
<commons-lang3-version>3.3.2</commons-lang3-version>
<commons-configuration-version>1.10</commons-configuration-version>
<commons-cli-version>1.2</commons-cli-version>
<fastjson-version>2.0.23</fastjson-version>
<guava-version>16.0.1</guava-version>
<diamond.version>3.7.2.1-SNAPSHOT</diamond.version>
<!--slf4j 1.7.10 和 logback-classic 1.0.13 是好基友 -->
<slf4j-api-version>1.7.10</slf4j-api-version>
<logback-classic-version>1.0.13</logback-classic-version>
<commons-io-version>2.4</commons-io-version>
<junit-version>4.13.1</junit-version>
<tddl.version>5.1.22-1</tddl.version>
<swift-version>1.0.0</swift-version>
<project-sourceEncoding>UTF-8</project-sourceEncoding>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<maven.compiler.encoding>UTF-8</maven.compiler.encoding>
<mysql.driver.version>8.0.33</mysql.driver.version>
</properties>
<modules>
<module>common</module>
<module>core</module>
<module>transformer</module>
<!-- reader -->
<module>mysqlreader</module>
<!-- <module>drdsreader</module>-->
<!-- <module>sqlserverreader</module>-->
<module>postgresqlreader</module>
<!-- <module>kingbaseesreader</module>-->
<!-- <module>oraclereader</module>-->
<!-- <module>cassandrareader</module>-->
<!-- <module>oceanbasev10reader</module>-->
<!-- <module>obhbasereader</module>-->
<!-- <module>rdbmsreader</module>-->
<!-- <module>odpsreader</module>-->
<!-- <module>otsreader</module>-->
<!-- <module>otsstreamreader</module>-->
<!-- <module>hbase11xreader</module>-->
<!-- <module>hbase094xreader</module>-->
<!-- <module>hbase11xsqlreader</module>-->
<!-- <module>hbase20xsqlreader</module>-->
<!-- <module>ossreader</module>-->
<!-- <module>hdfsreader</module>-->
<!-- <module>ftpreader</module>-->
<!-- <module>txtfilereader</module>-->
<!-- <module>streamreader</module>-->
<!-- <module>clickhousereader</module>-->
<!-- <module>mongodbreader</module>-->
<!-- <module>tdenginereader</module>-->
<!-- <module>gdbreader</module>-->
<!-- <module>tsdbreader</module>-->
<!-- <module>opentsdbreader</module>-->
<!-- <module>loghubreader</module>-->
<!-- <module>datahubreader</module>-->
<!-- <module>starrocksreader</module>-->
<!-- <module>sybasereader</module>-->
<!-- <module>dorisreader</module>-->
<module>nfsreader</module>
<!-- writer -->
<module>mysqlwriter</module>
<!-- <module>starrockswriter</module>-->
<!-- <module>drdswriter</module>-->
<!-- <module>databendwriter</module>-->
<!-- <module>oraclewriter</module>-->
<!-- <module>sqlserverwriter</module>-->
<module>postgresqlwriter</module>
<!-- <module>kingbaseeswriter</module>-->
<!-- <module>adswriter</module>-->
<!-- <module>oceanbasev10writer</module>-->
<!-- <module>obhbasewriter</module>-->
<!-- <module>adbpgwriter</module>-->
<!-- <module>hologresjdbcwriter</module>-->
<!-- <module>rdbmswriter</module>-->
<!-- <module>odpswriter</module>-->
<!-- <module>osswriter</module>-->
<!-- <module>otswriter</module>-->
<!-- <module>hbase11xwriter</module>-->
<!-- <module>hbase094xwriter</module>-->
<!-- <module>hbase11xsqlwriter</module>-->
<!-- <module>hbase20xsqlwriter</module>-->
<!-- <module>kuduwriter</module>-->
<!-- <module>ftpwriter</module>-->
<!-- <module>hdfswriter</module>-->
<!-- <module>txtfilewriter</module>-->
<!-- <module>streamwriter</module>-->
<!-- <module>elasticsearchwriter</module>-->
<!-- <module>mongodbwriter</module>-->
<!-- <module>tdenginewriter</module>-->
<!-- <module>ocswriter</module>-->
<!-- <module>tsdbwriter</module>-->
<!-- <module>gdbwriter</module>-->
<!-- <module>oscarwriter</module>-->
<!-- <module>loghubwriter</module>-->
<!-- <module>datahubwriter</module>-->
<!-- <module>cassandrawriter</module>-->
<!-- <module>clickhousewriter</module>-->
<!-- <module>doriswriter</module>-->
<!-- <module>selectdbwriter</module>-->
<!-- <module>adbmysqlwriter</module>-->
<!-- <module>sybasewriter</module>-->
<!-- <module>neo4jwriter</module>-->
<!-- <module>milvuswriter</module>-->
<module>nfswriter</module>
<!-- common support module -->
<module>plugin-rdbms-util</module>
<module>plugin-unstructured-storage-util</module>
<module>gaussdbreader</module>
<module>gaussdbwriter</module>
<!-- <module>datax-example</module>-->
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>${commons-lang3-version}</version>
</dependency>
<dependency>
<groupId>com.alibaba.fastjson2</groupId>
<artifactId>fastjson2</artifactId>
<version>${fastjson-version}</version>
</dependency>
<!--<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava-version}</version>
</dependency>-->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons-io-version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j-api-version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback-classic-version}</version>
</dependency>
<dependency>
<groupId>com.taobao.tddl</groupId>
<artifactId>tddl-client</artifactId>
<version>${tddl.version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<exclusion>
<groupId>com.taobao.diamond</groupId>
<artifactId>diamond-client</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.taobao.diamond</groupId>
<artifactId>diamond-client</artifactId>
<version>${diamond.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba.search.swift</groupId>
<artifactId>swift_client</artifactId>
<version>${swift-version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit-version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.9.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.17.1</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.17.1</version>
</dependency>
</dependencies>
</dependencyManagement>
<repositories>
<repository>
<id>central</id>
<name>Nexus aliyun</name>
<url>https://maven.aliyun.com/repository/central</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
<repository>
<id>spring</id>
<name>spring</name>
<url>https://maven.aliyun.com/repository/spring</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>central</id>
<name>Nexus aliyun</name>
<url>https://maven.aliyun.com/repository/central</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</pluginRepository>
</pluginRepositories>
<build>
<resources>
<resource>
<directory>src/main/java</directory>
<includes>
<include>**/*.properties</include>
</includes>
</resource>
</resources>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.2-beta-5</version>
<configuration>
<finalName>datax</finalName>
<descriptors>
<descriptor>package.xml</descriptor>
</descriptors>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>${jdk-version}</source>
<target>${jdk-version}</target>
<encoding>${project-sourceEncoding}</encoding>
</configuration>
</plugin>
</plugins>
</build>
</project>