ic_banknote_smile_128ic_business_128ic_business_128ic_checkmark_128ic_client_team_manager_128ic_code_file_128ic_code_files_128ic_corporate_cloud_platforms_128ic_crossplatform_apps_128ic_developer_128ic_development_team_128ic_enterprise_128ic_faster_timeframe_128ic_graph_down_128ic_graph_down_128ic_information_finder_128ic_junior_developer_128ic_managed_team_128ic_message_128ic_mobile_app_startups_128ic_mobile_development_128ic_mobile_development_up_128ic_mobile_devices_128ic_multiplatform_128ic_multiplatform_white_128ic_pricetag_128ic_project_checklist_128ic_project_management_128ic_project_management_team_128ic_research_and_development_team_128ic_scalable_team_128ic_senior_developer_128ic_smaller_codebase_128ic_smaller_price_128ic_startup_128ic_team_manager_128ic_three_times_faster_128Arrow_Dropdownic_001_google+_16ic_002_xing_16Group 2ic_003_facebook_16ic_004_linkedIn_16Groupic_005_message_16ic_006_upload_16ic_007_remove_16ic_008_email_16ic_009_attachment_16ic_010_file_16ic_011_name_16ic_012_arrow_left_16ic_013_arrow_right_16ic_014_arrow_down_16ic_015_arrow_up_16ic_016_dropdown_arrow_down_16ic_016_dropdown_arrow_leftic_016_dropdown_arrow_rightic_017_K&C_dropdown_arrow_up_16ic_018_language_16ic_019_Quote_16ic_020_+_16ic_021_=_16ic_022_phone_16ic_023_twitter_16ic_024_position_16ic_025_company_16ic_026_search_16ic_027_mobile_16ic_028_fax_16ic_029_location_16ic_030_enlarge_16ic_031_downscale_16ic_032_contactic_download_normal_16pxic_033_skype_16ic_006_download_16 copyK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxK&C_Icons_32pxic_Interest_based_64ic_acrivate_card_64ic_api_client_64ic_application_architecture_64ic_application_architecture_ white_64ic_application_development_user_64ic_arrow_down_64ic_automated_backups_64ic_automated_infrastructure_provisioning_64ic_automated_infrastructure_provisioning_white_64ic_automated_storage_64ic_automated_storage_64ic_automation_64ic_avaliability_across_the_world_64ic_avaliability_across_the_world_white_64ic_brackets_64ic_brackets_64ic_build_64ic_build_64ic_business_64ic_business_partnership_64ic_business_partnership_white_64ic_business_64ic_calculator_64ic_calendar_64ic_car_rent_64ic_card_renewal_64ic_chat_64ic_chat_bubbles_64ic_chat_bubbles_64ic_chat_white_64ic_checklist_64ic_checkmark_64ic_checkmark_white_64ic_clock_64ic_clock_white_64ic_cloud_media_64ic_cluster_64ic_cluster_white_64ic_code_base_optimization_64ic_coding_64ic_coding_white_64ic_commenting_widget_64ic_commenting_widget_64ic_containers_64ic_containers_white_64ic_continious_64ic_continious_delivery_64ic_continious_delivery_white_64ic_continious_release_64ic_continious_release_white_64ic_continious_white_64ic_cost_saving_64ic_cost_saving_white_64ic_cpu_load_64ic_credit_card_64ic_crossplatform_app_development_64ic_crossplatform_app_development_white_64ic_custom_crm_64ic_custom_crm_64ic_independence_consulring_64ic_database_calls_64ic_database_calls_white_64ic_dedicated_teams_64ic_dedicated_teams_64ic_desktop_application_user_64ic_desktop_application_user_64ic_desktop_code_64ic_desktop_code_white_64ic_developer_64ic_developer_white_64ic_development_64ic_devops_64ic_documents_64ic_documents_graph_64ic_documents_graph_white_64ic_documents_white_64ic_download_presentation_64ic_education_64ic_email_open_64ic_email_open_white_64ic_environment_healthcheckic_euro_64ic_euro_white_64ic_failure_solved_64ic_globe_outlines_64ic_good_quality_64ic_high_load_websites_64ic_high_load_websites_white_64ic_hotel_booking_64ic_inability_64ic_inability_white_64ic_increase_64ic_increasing_team_64ic_independence_64ic_integration_64ic_it_outsourcing_64ic_it_outsourcing_64ic_knowledge_sharing_64ic_mobile_devices_64ic_laptop_user_64ic_laptop_user_64ic_launch_64ic_launch_white_64ic_learning_64ic_learning_two_white_64ic_lighthouse_64ic_link_64ic_load_balancer_64ic_load_balancer_64ic_load_card_64ic_lock_64ic_lock_white_64ic_low_cost_64ic_low_load_websites_64ic_maintenance_tools_64ic_maintenance_tools_white_64ic_media_player_64ic_messaging_platforms_64ic_microservice_architecture_64ic_microservices_64ic_microservices_64ic_mobile_app_64ic_mobile_content_64ic_mobile_development_64ic_mobile_development_white_64ic_mobile_devices_64ic_mobile_devices_white_64ic_mobile_payments_64ic_mobile_social_media_applications_64ic_mobile_workflows_64ic_money_transfers_64ic_multimedia_sharing_64ic_multimedia_sharing_white_64ic_my_garage_64ic_no_access_64ic_no_access_white_64ic_no_oldschool_64ic_online_marketplaces_64ic_online_trading_64ic_pair_device_64ic_parallels_64ic_parallels_white_64ic_passcode_64ic_performance_64ic_performance_issues_64ic_performance_issues_white_64ic_performance_white_64ic_plane_64ic_plane_white_64ic_plus_64ic_pricetags_64ic_product_64ic_product_search_64ic_product_white_64ic_productivity_tools_64ic_productivity_tools_64ic_project_delivery_64ic_project_delivery_white_64ic_project_management_64ic_project_management_collaboration_64ic_project_management_team_64ic_project_risks_reduced_64ic_quality_mark_64ic_quality_mark_white_64ic_question_64ic_react_native_64ic_response_time_64ic_response_time_white_64ic_rest_api_64ic_retail_64ic_transparency_consulting_64ic_scale_up_64ic_scale_up_white_64ic_self_healing_64ic_self_healing_64 copyic_send_money_64ic_server_64ic_server_white_64ic_shopping_64ic_sleep_mode_64ic_small_is_beautiful_64ic_smaller_price_64ic_social_benefits_64ic_social_connections_64ic_socket_64ic_spare_parts_for_cars_64ic_spare_parts_for_cars_white_64ic_speedometer_64ic_performance_consulting_64ic_speedometer_white_64ic_startup_64ic_target_64ic_team_64ic_testing_64ic_testing_checklist_64ic_testing_checklist_64ic_testing_white_64ic_three_times_faster_64ic_touch_64ic_touch_id_64ic_touch_white_64ic_transparency_64ic_ui_design_desktop_64ic_ui_design_mobile_64ic_ui_design_mobile_white_64ic_umbrella_64ic_umbrella_white_64ic_up_and_down_scaling_64ic_up_and_down_scaling_64ic_users_64ic_users_white_64ic_ux_design_64ic_ux_design_desktop_64ic_ux_design_white_64ic_vehicle_64ic_web_based_search_64ic_web_based_search_white_64ic_web_browser_code_64ic_web_browser_developer_mode_64ic_web_browser_user_64ic_web_development_64ic_web_development_white_64ic_web_portals_64ic_web_portals_64ic_web_user_64ic_web_user_white64ic_workflow_64ic_workflow_steps_64ic_workflow_steps_white_64ic_workflow_white_64ic_working_environment_64

Use case: how to build and run Docker containers with NVIDIA GPUs

GPU-accelerated computing is the use of a graphics processing unit to accelerate deep learning, analytics, and engineering applications. First introduced in 2007 by NVIDIA, today GPU accelerators power energy-efficient data-centers worldwide and play a key role in applications’ acceleration.

Containerizing GPU applications provides multiple benefits, such as ease of deployment, streamlined collaboration, isolation of individual devices and many more. However, Docker® containers are most commonly used to easily deploy CPU-based applications on several machines, where containers are both hardware- and platform-agnostic. Docker engine doesn’t support natively NDIVIA GPUs as it uses specialized hardware and requires installing he NVIDIA driver.

For one of our projects we had to use a graphics processing unit to build and run Docker containers. Further, we offer you a step-by-step description of how this was achieved.

To start, we’re going to need a server with NVIDIA GPU. Hetzner has a server with GeForce® GTX 1080

Requirements:

OS                                           CentOS 7.3 

Docker                                    Docker version 17.06.0-ce 

NVIDIA Drivers                      latest 


Let’s download and install necessary drivers for this graphic card:

After downloading, we need to install driver, performing all the steps

1./NVIDIA-Linux-x86_64-<major_version>.<minor_version>.run 

Here’s how Nvidia and Docker work together:

We will need to install nvidia-docker и nvidia-docker-plugin. You can learn more about how to do that on nvidia github

1 wget -P /tmp https://github.com/NVIDIA/nvidia-docker/releases/download/v1.0.1/nvidia-docker-1.0.1-1.x86_64.rpm 
2 sudo rpm -i /tmp/nvidia-docker*.rpm && rm /tmp/nvidia-docker*.rpm

Launching service:

1  sudo systemctl start nvidia-docker 

Testing:

1  nvidia-docker run --rm nvidia/cuda nvidia-smi 

Should get the following result:

1 Thu Jul 27 13:44:07 2017 
2 +-------------------------------------------------------------+
3 | NVIDIA-SMI 375.20 Driver Version: 375.20 |
4 |--------------------+----------------+-----------+
5 | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
6 | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
7 |==================+=====================+======================|
8 | 0 GeForce GTX 1080 Off | 0000:01:00.0 Off | N/A |
9 | 33% 36C P8 11W / 180W | 0MiB / 8145MiB | 0% Default |  
10 +------------------------+---------------------+----------------------+
11   
12 +-----------------------------------------------------------------------+
13 | Processes: GPU Memory |
14 | GPU PID Type Process name Usage |
15 |================================================================|
16 | No running processes found |
17 +-----------------------------------------------------------------------+

Docker container with GPU support in orchestrator.


* Docker Swarm is not suitable as in docker-compose V3 there is no possibility to get in the inside of the device.


From the official website:

Thus, we can use the resources of the graphic card, but if we need to use orchestration tools, then the nvidia-docker will not be able to start, since it is an add-on over the Docker.

We’ve just launched container in the Rancher claster.

Now let's dive into the details of what nvidia-docker actually is. Basically, this is a service that creates a Docker volume and mounts the devices into a container.

To find out what was created and mounted, we will need to run the following command:

1  curl -s http://localhost:3476/docker/cli 

Here’s the result:

—volume-driver=nvidia-docker
—volume=nvidia_driver_375.20:/usr/local/nvidia:ro
—device=/dev/nvidiactl
—device=/dev/nvidia-uvm
—device=/dev/nvidia-uvm-tools
—device=/dev/nvidia0

For mathematical calculations, we use a Python library - tensorflow-gpu (TensorFlow)



Let’s write Dockerfile, where the base image is taken from Docker Hub Nvidia/CUDA


1FROM nvidia/cuda:8.0-cudnn5-runtime-centos7
2
3 RUN pip install tensorflow-gpu
4
5 ENTRYPOINT ["python", "math.py"]

Then write docker-compose to build and run the compute container:

1 version: '2'
2 services:
3    math: 
4   build: .
5     volumes:
6       - nvidia_driver_375.20:/usr/local/nvidia:ro
7     devices:
8      - /dev/nvidiactl
9       - /dev/nvidia-uvm
10       - /dev/nvidia-uvm-tools
11       - /dev/nvidia0
12
13
14 volumes:
15 nvidia_driver_375.20:
16  river: nvidia-docker
17  external: true


Launching Docker container:

1  docker-compose up -d 

If everything is done correctly, then when you run the command:


1  nvidia-docker run --rm nvidia/cuda nvidia-smi 

You get the following result:

1 Thu Jul 27 15:12:40 2017
2 +-----------------------------------------------------------------------+
3| NVIDIA-SMI 375.20                 Driver Version: 375.20               |
4|--------------------------+-------------------+-------------------+
5| GPU  Name     Persistence-M| Bus-Id        Disp.A | Volatile Uncorr. ECC|
6| Fan  Temp  Perf  Pwr:Usage/Cap|     Memory-Usage | GPU-Util  Compute M.|
7|===============================+======================+========|
8|   0  GeForce GTX 1080    Off  | 0000:01:00.0     Off |             N/A |
9| 39%   53C   P2   86W / 180W |   7813MiB /  8145MiB |    56%    Default |
10 +-----------------------+----------------------+----------------------+
11
12 +----------------------------------------------------------------------+
13| Processes:                                              GPU Memory |
14|  GPU       PID  Type  Process name                      Usage      |
15|==============================================================|
16|    0     27798    C   python                             7803MiB |
17 +---------------------------------------------------------------------+

In the processes, you can see that python uses 56% of the GPU

Thus, we’ve just taught Docker, the leading container platform, to work with GeForce graphic cards, and it can now be used to containerize GPU-accelerated applications. This means you can easily containerize and isolate accelerated application without any modifications and deploy it on any supported GPU-enabled infrastructure.


Like it?
If you want to receive interesting information - subscribe!
SHARE WITH FRIENDS
You might find this interesting
Case Study
Bosch Classic Cars - Digital Engagement Platform for 19K Vintage Car Owners
Case Study
Liferay Portal Performance Tuning Services for a Major Online Gaming Software Supplier
SUCCESS-STORY
Reference: Major producer of auto electronics and spare parts
Blog post
How to Make Your Web Solution Rock: 7 Areas to Check
Blogpost
How to Motivate Your Dedicated Team to Work with Legacy Projects
Blogpost
SEO Tips & Tricks for Single Page Web Applications
Success Story
Drivelog.de — Web Marketplace for Car Owners and Service Providers
blog post
Docker: Virtualize Your Development Environment Right
eBook
How to Secure Web Product Development — FREE eBook
blog post
When Microservices Help Make Future-Ready Products
Case Study
Portal Performance Tuning For Major German Travel Agency
blog post
How to Build a Rancher & Docker Based Cloud
case study
Micro-service Architecture for New AngularJS Application - Case Study
blog post
How We Use Ansіble for Configuration of Our Environments
blog post
DevOps with Puppet: Tips on Setting it up for Configuring Servers
blog post
How We Manage Our Infrastructure with Chef
Success Story
Fast and Lightweight Mobile Application based on PhoneGap/ Cordova
blog post
Centralized Logging with Logstash, Elasticsearch & Kibana
blog post
How the QA Team Tests Your Project
blog post
4 Time-Saving Ways to Test Your Cross Platform Mobile App
blog post
Plan to Succeed: 4 Tips for Building Scalable Software
ebook
Top Tools for Cost-Effective Web Development — eBook
Blogpost
How to start services on Linux
Blog Post
Scaling software solutions - how it works
Blog Post
Angular 2.0 vs Angular 1.4. What fits you best?
Blog Post
Reasons to believe in Ionic hybrid app
Blog Post
Debunking imaginary shortcomings of cross-platform frameworks
E-book
Determining Approaches to Mobile App Development
Blog Post
Technologies that Foster Digital Transformation
Blog Post
A Guidance for Keeping Your Web Development Project Within the Budget: Three Key Pillars
Blog Post
Agile and DevOps are Key Drivers of Digital Transformation
Blog Post
Dedicated Teams for Web Development: Choice Criteria to be Checked
Blog post
Cost efficient technologies
Blog Post
Angular 4 vs React – what to choose in 2017
Blog Post
A secret formula of an agile dream team
Blog post
GoLang: Features, Pros and Cons
Blog post
K&C insights: how to make your workflow work for you
Blog post
Microservices… when do we need them?
Success Story
How to apply React Native while developing heavy cross-platform mobile apps
Blog post
Cloud Deployment: Overview of Options
Success Story
Reformation of Deployment Cycle for Bosch Classic Cars Portal
Blog post
Three Authentication Approaches to Keep Your Clients Safe
Blog post
Native or Hybrid Apps: A Quick Comparison
Blog post
Big Data: Why Your Business Needs it ASAP
Blog post
Setting Up: Traefik Balancer In Rancher Cloud
Blog post
White Label: A Customized Software Solution from a Business and Tech Perspective
Blog post
Installation and setting up: Nextcloud as a local network storage on CentOS7
Blog post
How a Company Can Benefit from White Label: K&C experience
Blog post
ROCKET.CHAT as an internal messaging system and helpdesk platform
Stay tuned!
We'll gladly share fresh blog updates and our best practices to your email.
We protect your data and will notify you on important updates only.