FROM ubuntu-mygcc:V1 WORKDIR /usr/local/lib COPY lib* /usr/local/lib COPY /usr/local/bin/sqlite3 /usr/local/bin ENV LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH.报错COPY failed: no source files were specified
时间: 2023-06-28 21:13:10 浏览: 154
这个错误是因为你在使用 COPY 命令时没有指定源文件。你需要指定要复制的文件或文件夹的路径。
例如,如果你要将当前目录下的 lib 文件夹和 /usr/local/bin/sqlite3 文件复制到容器的 /usr/local/lib 和 /usr/local/bin 目录中,你可以使用以下命令:
```
FROM ubuntu-mygcc:V1
WORKDIR /usr/local/lib
COPY lib/ /usr/local/lib/
COPY /usr/local/bin/sqlite3 /usr/local/bin/
ENV LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
```
请注意,如果你要复制一个文件夹,你需要在路径末尾添加斜杠。如果你要复制一个文件,你不需要添加斜杠。
相关问题
我想将frontend 也是用volumes,将其映射到/app/frontend目录,在/app/frontend下install以及build,如何实现 docker-compose.yml文件: version: '3' services: frontend: build: context: ./frontend dockerfile: Dockerfile ports: - 8010:80 restart: always backend: build: context: ./backend dockerfile: Dockerfile volumes: - /app/backend:/app environment: - CELERY_BROKER_URL=redis://redis:6379/0 command: python manage.py runserver 0.0.0.0:8000 ports: - 8011:8000 restart: always celery-worker: build: context: ./backend dockerfile: Dockerfile volumes: - /app/backend:/app environment: - CELERY_BROKER_URL=redis://redis:6379/0 command: celery -A server worker -l info --pool=solo --concurrency=1 depends_on: - redis - backend restart: always celery-beat: build: context: ./backend dockerfile: Dockerfile volumes: - /app/backend:/app environment: - CELERY_BROKER_URL=redis://redis:6379/0 command: celery -A server beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler depends_on: - redis - backend restart: always redis: image: redis:latest ports: - 6379:6379 restart: always mysql: image: mysql:latest environment: - MYSQL_ROOT_PASSWORD=sacfxSql258147@ ports: - 8016:3306 volumes: - ./mysql:/var/lib/mysql restart: always frontend:dockerfile文件 FROM node:16.18.1 WORKDIR /app/frontend COPY package*.json ./ RUN npm install COPY . . RUN npm run build:prod FROM nginx:latest COPY --from=0 /app/frontend/dist/ /usr/share/nginx/html EXPOSE 80 CMD ["nginx", "-g", "daemon off;"]
要将前端的代码映射到容器内的`/app/frontend`目录,并在容器内进行安装和构建,可以通过在`docker-compose.yml`文件中为`frontend`服务添加一个卷来实现。更新后的`docker-compose.yml`文件如下所示:
```yaml
version: '3'
services:
frontend:
build:
context: ./frontend
dockerfile: Dockerfile
ports:
- 8010:80
volumes:
- ./frontend:/app/frontend
restart: always
backend:
build:
context: ./backend
dockerfile: Dockerfile
volumes:
- /app/backend:/app
environment:
- CELERY_BROKER_URL=redis://redis:6379/0
command: python manage.py runserver 0.0.0.0:8000
ports:
- 8011:8000
restart: always
celery-worker:
build:
context: ./backend
dockerfile: Dockerfile
volumes:
- /app/backend:/app
environment:
- CELERY_BROKER_URL=redis://redis:6379/0
command: celery -A server worker -l info --pool=solo --concurrency=1
depends_on:
- redis
- backend
restart: always
celery-beat:
build:
context: ./backend
dockerfile: Dockerfile
volumes:
- /app/backend:/app
environment:
- CELERY_BROKER_URL=redis://redis:6379/0
command: celery -A server beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
depends_on:
- redis
- backend
restart: always
redis:
image: redis:latest
ports:
- 6379:6379
restart: always
mysql:
image: mysql:latest
environment:
- MYSQL_ROOT_PASSWORD=sacfxSql258147@
ports:
- 8016:3306
volumes:
- ./mysql:/var/lib/mysql
restart: always
```
这样,将会把本地的`./frontend`目录映射到容器内的`/app/frontend`目录。然后,在容器内部构建过程中,使用`/app/frontend`目录中的代码进行安装和构建。
请注意,这里假设`docker-compose.yml`文件和`frontend`目录在同一级目录下。如果不是,请相应调整`volumes`中的路径。
WORKDIR /usr/share/emscommon ENTRYPOINT ["/usr/share/emscommon/bin/emscommon"]
这段代码是Dockerfile中的指令,用于设置工作目录和容器启动时的入口点。
`WORKDIR /usr/share/emscommon` 表示将工作目录设置为 `/usr/share/emscommon`,即在后续的指令中,默认路径都是以该目录为基础路径进行操作。
`ENTRYPOINT ["/usr/share/emscommon/bin/emscommon"]` 则表示容器启动时会执行 `/usr/share/emscommon/bin/emscommon` 这个命令,作为容器的入口点。可以将其理解为容器启动时默认执行的命令。
阅读全文