Unverified Commit e6dec8d2 authored by lgcareer's avatar lgcareer Committed by GitHub
Browse files

fix #2442 when the resource name contains '$',need translate it to '\$' (#2524)



* fix #2442 when the resource name contains '$',need translate it to '\$'

* fix #2442 when the resource name contains '$',need translate it to '/$'

Co-authored-by: default avatarqiaozhanwei <qiaozhanwei@outlook.com>
Co-authored-by: default avatardailidong <dailidong66@gmail.com>
parent c50ad124
Loading
Loading
Loading
Loading
+24 −13
Original line number Diff line number Diff line
@@ -45,8 +45,10 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import java.io.IOException;
import java.text.MessageFormat;
import java.util.*;
import java.util.regex.Matcher;
import java.util.stream.Collectors;

import static org.apache.dolphinscheduler.common.Constants.*;
@@ -315,7 +317,6 @@ public class ResourcesService extends BaseService {
            return result;
        }


        if (name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) {
            putMsg(result, Status.SUCCESS);
            return result;
@@ -323,9 +324,10 @@ public class ResourcesService extends BaseService {

        //check resource aleady exists
        String originFullName = resource.getFullName();
        String originResourceName = resource.getAlias();

        String fullName = String.format("%s%s",originFullName.substring(0,originFullName.lastIndexOf("/")+1),name);
        if (!resource.getAlias().equals(name) && checkResourceExists(fullName, 0, type.ordinal())) {
        if (!originResourceName.equals(name) && checkResourceExists(fullName, 0, type.ordinal())) {
            logger.error("resource {} already exists, can't recreate", name);
            putMsg(result, Status.RESOURCE_EXIST);
            return result;
@@ -336,8 +338,22 @@ public class ResourcesService extends BaseService {
        if (StringUtils.isEmpty(tenantCode)){
            return result;
        }
        // verify whether the resource exists in storage
        // get the path of origin file in storage
        String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName);
        try {
            if (!HadoopUtils.getInstance().exists(originHdfsFileName)) {
                logger.error("{} not exist", originHdfsFileName);
                putMsg(result,Status.RESOURCE_NOT_EXIST);
                return result;
            }
        } catch (IOException e) {
            logger.error(e.getMessage(),e);
            throw new ServiceException(Status.HDFS_OPERATION_ERROR);
        }

        String nameWithSuffix = name;
        String originResourceName = resource.getAlias();

        if (!resource.isDirectory()) {
            //get the file suffix
            String suffix = originResourceName.substring(originResourceName.lastIndexOf("."));
@@ -361,10 +377,11 @@ public class ResourcesService extends BaseService {
        try {
            resourcesMapper.updateById(resource);
            if (resource.isDirectory() && CollectionUtils.isNotEmpty(childrenResource)) {
                String matcherFullName = Matcher.quoteReplacement(fullName);
                List<Resource> childResourceList = new ArrayList<>();
                List<Resource> resourceList = resourcesMapper.listResourceByIds(childrenResource.toArray(new Integer[childrenResource.size()]));
                childResourceList = resourceList.stream().map(t -> {
                    t.setFullName(t.getFullName().replaceFirst(oldFullName, fullName));
                    t.setFullName(t.getFullName().replaceFirst(oldFullName, matcherFullName));
                    t.setUpdateTime(now);
                    return t;
                }).collect(Collectors.toList());
@@ -389,19 +406,13 @@ public class ResourcesService extends BaseService {
            return result;
        }

        // get the path of origin file in hdfs
        String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName);
        // get the path of dest file in hdfs
        String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName);


        try {
            if (HadoopUtils.getInstance().exists(originHdfsFileName)) {
                logger.info("hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName);
            logger.info("start hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName);
            HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true);
            } else {
                logger.error("{} not exist", originHdfsFileName);
                putMsg(result,Status.RESOURCE_NOT_EXIST);
            }
        } catch (Exception e) {
            logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e);
            putMsg(result,Status.HDFS_COPY_FAIL);
+30 −13
Original line number Diff line number Diff line
@@ -19,12 +19,16 @@ package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
@@ -37,7 +41,6 @@ import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.omg.CORBA.Any;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
@@ -172,10 +175,29 @@ public class ResourcesServiceTest {
        logger.info(result.toString());
        Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(),result.getMsg());

        //RESOURCE_NOT_EXIST
        user.setId(1);
        Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser());
        Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
        PowerMockito.when(HadoopUtils.getHdfsFileName(Mockito.any(), Mockito.any(),Mockito.anyString())).thenReturn("test1");

        try {
            Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(false);
        } catch (IOException e) {
            logger.error(e.getMessage(),e);
        }
        result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF);
        Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg());

        //SUCCESS
        user.setId(1);
        Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser());
        Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
        try {
            Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(true);
        } catch (IOException e) {
            logger.error(e.getMessage(),e);
        }

        result = resourcesService.updateResource(user,1,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE);
        logger.info(result.toString());
@@ -199,21 +221,16 @@ public class ResourcesServiceTest {
        logger.info(result.toString());
        Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg());

        //RESOURCE_NOT_EXIST
        Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
        PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test1");

        //SUCCESS
        Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
        PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test");
        try {
            Mockito.when(hadoopUtils.exists("test")).thenReturn(true);
        } catch (IOException e) {
            e.printStackTrace();
            PowerMockito.when(HadoopUtils.getInstance().copy(Mockito.anyString(),Mockito.anyString(),true,true)).thenReturn(true);
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
        result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF);
        logger.info(result.toString());
        Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg());

        //SUCCESS
        PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test");
        result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF);
        logger.info(result.toString());
        Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg());