Pcper Website Analysis

Pcper.com

Rank: 71660
Content Type: text/html; charset=utf-8
google site verification: 0tTkwmE8v4_GXW-4CgLvqu1sgHZlSs9ORLAewr9hH1U

Ads analysis

  • 8197100734064809
  • UA-74320-1
  • Title attribute

  • PC Perspective RSS
  • PC Perspective
  • Enter the terms you wish to search for.
  • This field is required.
  • This field is required.
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: title.jpg
  • title.jpg
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: image.php_.jpg
  • image.php_.jpg
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: Raspberry Pi 3 Model B+.jpg
  • Raspberry Pi 3 Model B+.jpg
  • View: Raspberry Pi 3 Model B+ PoE HAT.jpg
  • Raspberry Pi 3 Model B+ PoE HAT.jpg
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: shotwithgeforce.PNG
  • shotwithgeforce.PNG
  • Read the rest of GDC 2018: NVIDIA Adds new Ansel and Highlights features to GeForce Experience.
  • Jump to the first comment of this posting
  • View: 057.jpg
  • 057.jpg
  • Read the rest of Wolfenstein II is new, but you don't necessarily need new hardware to enjoy it.
  • Add a new comment to this page.
  • View: 6772701e6c7f.jpg
  • 6772701e6c7f.jpg
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: QC_XR.png
  • QC_XR.png
  • View: 6DoF.png
  • 6DoF.png
  • View: Eye_Tracking.png
  • Eye_Tracking.png
  • View: ViveWave.png
  • ViveWave.png
  • Add a new comment to this page.
  • View: amd_02.jpg
  • amd_02.jpg
  • View: amdsec1.png
  • amdsec1.png
  • View: amdsec2.png
  • amdsec2.png
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: 2-Banner-1.jpg
  • 2-Banner-1.jpg
  • View: 3-AX1600i-ghost.jpg
  • 3-AX1600i-ghost.jpg
  • Read the rest of Corsair AX1600i Digital ATX Power Supply Review.
  • Jump to the first comment of this posting
  • View: microsoft-2015-directx12-logo.jpg
  • microsoft-2015-directx12-logo.jpg
  • View: microsoft-2018-gdc-directx12raytracing-rasterization.png
  • microsoft-2018-gdc-directx12raytracing-rasterization.png
  • View: microsoft-2018-gdc-directx12raytracing-multibounce.png
  • microsoft-2018-gdc-directx12raytracing-multibounce.png
  • View: microsoft-2018-gdc-PIX.png
  • microsoft-2018-gdc-PIX.png
  • View: ea-2018-SEED screenshot (002).png
  • ea-2018-SEED screenshot (002).png
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: microsoft-2018-winml-graphic.png
  • microsoft-2018-winml-graphic.png
  • View: nvidia-2018-deeplearningcarupscale.png
  • nvidia-2018-deeplearningcarupscale.png
  • View: microsoft-2018-gdc-PIX.png
  • microsoft-2018-gdc-PIX.png
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: 04.jpg
  • 04.jpg
  • View: 05.jpg
  • 05.jpg
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: vivepro.png
  • vivepro.png
  • Add a new comment to this page.
  • View: caldigit-tuff-hdd-1.jpg
  • caldigit-tuff-hdd-1.jpg
  • Read the rest of CalDigit Tuff Rugged External Drive: Take Your Data For a Swim.
  • Add a new comment to this page.
  • View: PNY CS900 960GB SATA SSD.png
  • PNY CS900 960GB SATA SSD.png
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • View: 180305-155833.jpg
  • 180305-155833.jpg
  • View: 180305-155754.jpg
  • 180305-155754.jpg
  • Read the rest of MyDigitalSSD SBX M.2 NVMe SSD Full Capacity Roundup - 128GB, 256GB, 512GB, 1TB Tested!.
  • Jump to the first comment of this posting
  • View: SVR845.jpg
  • SVR845.jpg
  • View: _MG_9854_2.JPG
  • _MG_9854_2.JPG
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • Jump to the first comment of this posting.
  • Jump to the first comment of this posting
  • Enter the terms you wish to search for.
  • Learn about PC Perspective
  • Follow PC Perspective on Twitter!
  • Subscribe to the PC Perspective RSS Feed
  • PC Perspective Reviews feed
  • Similar Website

    Website Content & Js Analysis

                                            
        PC Perspective | Your #1 Source for PC Hardware Reviews, News and Information!
    jwplayer.key="F4l3m9MBf5Il3HhBEH03r1S3Wm+nOx7R/zcF1Q=="
      GS_googleAddAdSenseService("ca-pub-8197100734064809");
      GS_googleEnableAllServices();
    var _sf_startpt=(new Date()).getTime()
    var timestamp = (new Date()).getTime();
        if (timestamp%2 == 0) {
    document.write(unescape("%3Cscript type='text/javascript'%3E GA_googleAddAttr('SiteSkin', 'MSI');   %3C/script%3E"));
    }
        else {
    document.write(unescape("%3Cscript type='text/javascript'%3E GA_googleAddAttr('SiteSkin', 'MSI');   %3C/script%3E"));
    }
        (function() {
            function async_load(script_url){
                var protocol = ('https:' == document.location.protocol ? 'https://' : 'http://');
                var s = document.createElement('script'); s.src = protocol + script_url;
                var x = document.getElementsByTagName('script')[0]; x.parentNode.insertBefore(s, x);
            }
            bm_website_code = '47149C9686DB4C2C';
            jQuery(document).ready(function(){async_load('asset.pagefair.com/measure.min.js')});
            jQuery(document).ready(function(){async_load('asset.pagefair.net/ads.min.js')});
        })();
    function changeimage(towhat,url){
    	if (document.images){
    	document.images.targetimage.src=towhat.src
    	gotolink=url
    	}
    }
    function warp(){
    	window.location=gotolink
    }
    var myimages=new Array()
    var gotolink="#"
    function preloadimages(){
    for (i=0;i
                    (function(global) {
                        if (typeof TTDUniversalPixelApi === 'function') {
                            var universalPixelApi = new TTDUniversalPixelApi();
                            universalPixelApi.init("ax523nj", ["wpzs870"], "https://insight.adsrvr.org/track/up", "ttdUniversalPixelTag08d09dd090344b55bd6c673eb5d29a18");
                        }
                    })(this);
    (function(a,b,c,d,e){e=a.createElement(b);a=a.getElementsByTagName(b)[0];e.async=1;e.src=c;a.parentNode.insertBefore(e,a)})(document,'script','//succeedscene.com/b703b07dd33cc94fae5bf438b29c611a75db85ea7ece02592d3885ea8842f5c08e99164fc92d729416ef1e7233818aa608d294ff536d316d8d7fb88b6dab');
            PC Perspective
      PCPER
      WEB
        Home
    Reviews
    News
    Forums
    Podcast
    HW Leaderboard
    PCPer Live!
        Graphics Cards
    Motherboards
    Cases/Cooling
    Processors
    Chipsets
    Memory
    Displays
    Systems
    Storage
    Mobile
    Networking
     Username: *
     Password: *
    Register  |  Password Reminder
        GA_googleFillSlot("home2_728x90");  
                       // var $j = jQuery.noConflict();
                        var files = new Array();
                        var types = new Array();
                        var titles = new Array();
                        var descriptions = new Array();
                        var links = new Array();
                        var current_ii = 0;
                        var autoswitch = true;
                        function parseXML(data, textStatus) {
                            var ii = 0;
                            $.each(data.getElementsByTagName("fileName"), function() {
                                files[ii] = this.childNodes[0].nodeValue;
                                ii++;
                            });
                            ii = 0;
                            $.each(data.getElementsByTagName("type"), function() {
                                types[ii] = this.childNodes[0].nodeValue;
                                ii++;
                            });
                            ii = 0;
                            $.each(data.getElementsByTagName("title"), function() {
                                titles[ii] = this.childNodes[0].nodeValue;
                                ii++;
                            });
                            ii = 0;
                            $.each(data.getElementsByTagName("description"), function() {
                                descriptions[ii] = this.childNodes[0].nodeValue;
                                ii++;
                            });
                            ii = 0;
                            $.each(data.getElementsByTagName("link"), function() {
                                links[ii] = this.childNodes[0].nodeValue;
                                ii++;
                            });
                            autoSwitch();
                        }
                        function autoSwitch() {
                            if(autoswitch) {
                                showItem(current_ii+1, false);
                                setTimeout('autoSwitch();', 5000); //do this again in 3 seconds
                            }
                        }
                        function showItem(num, stopauto) {
                            //verify data
                            if(num < 1 || num > 5) {
                                num = 1;
                            }
                            current_ii = num;
                            showstuff('jsfb_image').style.background = 'url('+files[num-1]+')';
                            showstuff('jsfb_title').innerHTML = titles[num-1];
                            showstuff('jsfb_type').innerHTML = types[num-1];
                            showstuff('jsfb_description').innerHTML = descriptions[num-1];
                            if(stopauto) {
                                autoswitch = false;
                            }
                        }
                        function itemClick() {
                            document.location = links[current_ii-1];
                        }
                        //let's get us some data
                        $.get('flashbox/xml/images.xml', null, parseXML);
                            Flash player not detected. Click here to install flash.
                            «
                            1
                            2
                            3
                            4
                            5
                            »
                           var so = new SWFObject("files/highlightbox3.swf", "mymovie", "500", "280", "9", "#336699");
                           so.addParam("wmode", "transparent");
                           so.write("flashBox");
        Latest Reviews
                    Corsair AX1600i Digital ATX Power Supply Review
                    GDC 2018: Microsoft Discusses WinML API for Games
                    GDC 2018: Microsoft Announces DirectX Raytracing (DXR)
        Latest Topics
                    CalDigit Tuff Rugged External Drive: Take Your Data For a Swim
                    MyDigitalSSD SBX M.2 NVMe SSD Full Capacity Roundup - 128GB, 256GB, 512GB, 1TB Tested!
                    Huawei MediaPad M3 Lite 10 Review
                    CTS-Labs Details Potential AMD Security Vulnerabilities
                    3840x1600 Ultrawide Monitors: How 160 Lines Can Make All the Difference
                    Quick Look: CalDigit Thunderbolt Station 3 Plus Dock
                    Intel Optane SSD 800P 58GB, 118GB, and RAID Review - 3D XPoint Goes Mainstream
                    The Khronos Group Releases Vulkan 1.1 and SPIR-V 1.3
                    Corsair Carbide 275R Tempered Glass Mid Tower Case Review
                  Most Recent
    Most Popular
    Most Commented
        PCPer Mailbag #36 - 3/23/2018
        Subject: Editorial | March 23, 2018 - 09:00 AM | Jim Tanous
         Tagged: video, pcper mailbag, Josh Walrath
            

    It's time for the PCPer Mailbag, our weekly show where Ryan and the team answer your questions about the tech industry, the latest and greatest GPUs, the process of running a tech review website, and more!

    On today's show:

    00:49 - User-replaceable batteries? 04:06 - Block GPUs from crypto mining? 07:52 - Custom game resolutions? 11:26 - What makes AIB GPUs special? 15:01 - Spectre patch for Ryzen? 16:27 - Security flaw disclosure windows? 20:04 - Ryzen CPU coolers? 23:14 - Thunderbolt 3 royalty free? 24:54 - SSDs replacing RAM? 27:19 - Gamepad and flight stick reviews? 27:59 - VR graphics and upscaling?

    Want to have your question answered on a future Mailbag? Leave a comment on this post or in the YouTube comments for the latest video. Check out new Mailbag videos each Friday!

    Be sure to subscribe to our YouTube Channel to make sure you never miss our weekly reviews and podcasts, and please consider supporting PC Perspective via Patreon to help us keep videos like our weekly mailbag coming!

    Source: YouTube 2 comments Comments (2) SteelSeries exclusive audio bundle, the Arctis Pro and GameDAC Subject: General Tech | March 22, 2018 - 04:47 PM | Jeremy Hellstrom Tagged: steelseries, Arctis Pro, GameDAC, 9018Q2C, DAC, audio, headset

    At first glance $250 seems a bit on the pricey side for a SteelSeries gaming headset, however it also ships with a seperate DAC which explains the pricing.  The build of the Arctis Pro will be familiar to anyone who has seen the wireless Arctis 7, inside are a pair of 40m neodymium drivers with a 20-40,000 Hz frequency response.  The DAC is an ESS Sabre 9018Q2C DAC and offers optical, USB and a 3.5mm line out, the second 3.5mm port labeled mobile is an input so you can play music on the run.  The one thing it does lack is a way to connect to other gaming headests, which may be a deal breaker for many.  TechPowerUp gave it very high marks for audio playback, calling it the best they've heard yet this year.

    "The $250 / €270 Arctis Pro + GameDAC is SteelSeries' new flagship gaming audio system. It consists of an amazing gaming headset based on an improved iteration of the Arctis 3, 5, and 7 and the GameDAC, a high-quality external USB sound card equipped with the ESS Sabre 9018Q2C DAC chip and a host of advanced features, fully usable and configurable without any drivers."

    Here is some more Tech News from around the web:

    Audio Corner

    ADATA EMIX H30 + SOLOX F30 Bundle @ TechPowerUp ASUS Strix Fusion 500 @ Guru of 3D HyperX CLOUD Alpha Pro Gaming Headset Review @ NikKTech Gamdias HEPHAESTUS P1 @ Benchmark Reviews ROCCAT Khan Pro Gaming Headset Review @ NikKTech Roccat Khan Pro @ TechPowerUp Sennheiser GSP 301 & 303 @ Kitguru Edifier S880DB Review: Hi-Res Audio Speakers @ Kitguru Great Wireless Audio For $49: A Review Of Sbode’s M400 Bluetooth Speaker @ Techgage Audioengine A5+ Wireless Speakers @ Techspot

     

    Source: TechPowerUp 1 comment Comments (1) Going back for a third serving of Raspberry Pi Subject: General Tech, Systems | March 22, 2018 - 04:10 PM | Jeremy Hellstrom Tagged: sbc, Raspberry Pi 3, Raspberry Pi, gigabit ethernet, dual band, bluetooth, 802.11ac

    Tim did a great write up of the new hardware found in the Raspberry Pi 3 Model B+ which you should check out below if you missed.  Technical specifications are only the first step as we still need to see how the new 1.4GHz Cortex A53's perform in benchmarks and Phoronix have published just that.  They compared the Pi 3 to a variety of chips including the previous model, ASUS' Tinkerboard, the two Jetson boards, a few Celerons and even a Core i3.  Overall the chip showed an advantage over the previous model; not earth shattering but as the price remains at $35 for the Pi 3 that is still a good deal.

    "I've been spending the past few days putting the Raspberry Pi 3 Model B+ through its paces the past few days with an array of benchmarks while comparing the performance to other ARM SBCs as well as a few lower-end Intel x86 systems too. Here is all you need to know about the Raspberry Pi 3 B+ performance."

    Here is some more Tech News from around the web:

    Tech Talk

    2 + 2 = 4, er, 4.1, no, 4.3... Nvidia's Titan V GPUs spit out 'wrong answers' in scientific simulations @ The Register Best Buy Stops Selling Huawei Smartphones @ Slashdot Apple to enter trial production of new iPhone series in 2Q18, say sources @ DigiTimes ICO still waiting for 'urgent' warrant to raid Cambridge Analytica's London HQ @ The Inquirer Mozilla Pulls Advertising from Facebook @ Slashdot Facebook's Zuck comes out of hiding, admits company 'made mistakes' @ The Inquirer Seagate's HAMR to drop in 2020: Multi-actuator disk drives on the way @ The Register Slack's GDPR changes means admins can now snoop on private chats @ The Inquirer Tomb Raider Remasters Have Been Cancelled @ [H]ard|OCP HITMAN Spring Pack Is FREE For A Limited Time! @ Tech ARP

     

    Source: Phoronix 1 comment Comments (1) Podcast #492 - MyDigitalSSD, CalDigit Tuff Drive, and more! Subject: General Tech | March 22, 2018 - 12:37 PM | Alex Lustenberg Tagged: winml, vive pro, video, Tobii, SBX, rtx, qualcomm, podcast, pny, MyDigitalSSD, logitech, htc, G560, G513, dxr, CS900, corsair, caldigit, AX1600i

    PC Perspective Podcast #492 - 03/22/18

    Join us this week for MyDigitalSSD, CalDigit Tuff Drive, and more!

    You can subscribe to us through iTunes and you can still access it directly through the RSS page HERE.

    The URL for the podcast is: http://pcper.com/podcast - Share with your friends!

    iTunes - Subscribe to the podcast directly through the iTunes Store (audio only) Video version on iTunes Google Play - Subscribe to our audio podcast directly through Google Play! RSS - Subscribe through your regular RSS reader (audio only) Video version RSS feed MP3 - Direct download link to the MP3 file

    Hosts: Jim Tanous, Jeremy Hellstrom, Josh Walrath

    Peanut Gallery: Alex Lustenberg

    Program length: 1:08:16

    Podcast topics of discussion: Join our spam list to get notified when we go live! Patreon PCPer Mailbag #35 - 3/16/2018 Merch! http://bit.ly/pcpermerch Week in Review: 0:07:25 MyDigitalSSD SBX M.2 NVMe SSD Full Capacity Roundup - 128GB, 256GB, 512GB, 1TB Tested! 0:13:55 CalDigit Tuff Rugged External Drive: Take Your Data For a Swim 0:20:30 Corsair AX1600i Digital ATX Power Supply Review RX Bar News items of interest: 0:26:08 Logitech Announces G560 Speakers and G513 Keyboard with LIGHTSYNC 0:31:35 PNY Adds CS900 960GB SATA SSD To Budget SSD Series 0:34:00 HTC announces VIVE Pro Pricing, Available now for Preorder 0:37:35 Tobii and Qualcomm Announce Collaboration on Mobile VR Headsets with Eye-Tracking GDC 2018: Qualcomm Talks Future of VR and AR with Upcoming Dev Kit 0:40:18 GDC 2018: Microsoft Announces DirectX Raytracing (DXR) 0:45:25 NVIDIA RTX Technology Accelerates Ray Tracing for Microsoft DirectX Raytracing API 0:47:30 GDC 2018: Microsoft Discusses WinML API for Games 0:51:00 April releases are coming from AMD and Intel 0:53:15 AMD finalizing fixes for Ryzen, EPYC security vulnerabilities 0:57:00 Intel promises 2018 processors with hardware mitigation for Spectre and Meltdown Picks of the Week: 0:59:15 Jeremy - Remember Al’s love of Obduction? 1:00:55 Josh My kid loves them. 1:03:30 Jim: Xbox Game Pass http://pcper.com/podcast http://twitter.com/ryanshrout and http://twitter.com/pcper Closing/outro   Source: 2 comments Comments (2) Pi Foundation Releases Refreshed $35 Raspberry Pi 3 Model B+ Subject: General Tech | March 21, 2018 - 11:48 PM | Tim Verry Tagged: sbc, Raspberry Pi 3, Raspberry Pi, gigabit ethernet, dual band, bluetooth, 802.11ac

    The Raspberry Pi Foundation recently released the Raspberry Pi 3 Model B+ with refreshed hardware. The new single board computer retains its predecessor's $35 price tag while including a tweaked SoC with faster clockspeeds and improved power management as well as moves to modern Gigabit Ethernet and dual band 802.11ac Wi-Fi networking. The Pi Foundation has further managed to shield the board such that it can be certified as a radio board under FCC rules which should make end product certification an easier process.

    On the outside, not much has changed as the Raspberry Pi 3 Model B+ has the same form factor and board layout and I/O options as previous models. Digging a bit deeper though, nestled under a new heatspreader lies the Broadcom BCM2837B0 which can run its four ARM Cortex A53 cores at up to 1.4 GHz or run at the same 1.2 GHz clocks as the Pi 3 Model B (BCM2837) while using less power. A MaxLinear MxL7704 power management IC regulates board power and processor clockspeeds to keep it from overheating. Below 70°C the SoC runs at 1.4 GHz, but if it heats up to above that it will reduce voltage and clocks to 1.2 GHz. If the chip continues to heat up past 80°C it trips the thermal throttle, and clockspeeds will be further reduced until temperatures fall. The Pi Foundation notes that the new heatspreader should help it run faster and for longer lengths of time than the Pi 3 Model B. On the networking side of things, the upgraded Wi-Fi is powered by a Cypress CYW4355 and a Proant PCB antenna (similar to the one used in the Pi Zero W) for 2.4 GHz and 5 GHz 802.11ac Wi-Fi and Bluetooth 4.2 Low Energy while the Gigabit Ethernet is powered by a LAN7515 chipset.

    Note than the wired networking is still limited by the USB 2.0 bus, and the board itself has not been upgraded with USB 3.0 support or any USB 3 ports unlike many of its competitors (which is unfortunate). According to the Pi Foundation, the new SBC can hit 102 Mbps over 5 GHz Wi-Fi and up to 315 Mbps over a wired connection which is a huge boost over the Pi 3 Model B's ~36 Mbps wireless and ~95 Mbps wired performance. Interestingly, the new board features PXE boot turned on by default and support for PoE (802.3af) using a POE HAT which has a switched power supply for converting the 37V DC from PoE sources to the 5V/2.5A needed by the Pi.

    The Raspberry Pi 3 with its POE HAT connected via the 40-pin GPIO header.

    The Videocore IV GPU, HDMI 1.3, 1GB LPDDR2, USB 2.0, and other features of the small form factor PC remain unchanged. The Pi Foundation plans to produce this model until 2023 and hints at "+" model refreshes for the Pi 3 Model A and Pi CM3 and CM3L compute modules coming soon. The Pi 3 Model B+ is listed for $35 (the same as the non-plus model) and joins the existing lineup of Pi 3s of which the foundation has sold 9 million of so far!

    What are your thoughts on the refreshed Pi 3?

    Source: Raspberry Pi Foundation 10 comments Comments (10) GDC 2018: NVIDIA Adds new Ansel and Highlights features to GeForce Experience Subject: Graphics Cards | March 21, 2018 - 09:37 PM | Ken Addison Tagged: GDC, GDC 2018, nvidia, geforce experience, ansel, nvidia highlights, call of duty wwii, fortnite, pubg, tekken 7

    Building upon the momentum of being included in the two most popular PC games in the world, PlayerUnknown's Battlegrounds and Fortnite, NVIDIA Highlights (previously known as ShadowPlay Highlights) is expanding to even more titles. Support for Call of Duty: WWII and Tekken 7 are available now, with Dying Light: Bad Blood and Escape from Tarkov coming soon.

    For those unfamiliar with NVIDIA Highlights, it’s a feature that when integrated into a game, allows for the triggering of automatic screen recording when specific events happen. For example, think of the kill cam in Call of Duty. When enabled, Highlights will save a recording whenever the kill cam is triggered, allowing you to share exciting gameplay moments without having to think about it.

    Animated GIF support has also been added to NVIDIA Highlights, allowing users to share shorter clips to platforms including Facebook, Google Photos, or Weibo.

    In addition to supporting more games and formats, NVIDIA has also released the NVIDIA Highlights SDK, as well as plugins for Unreal Engine and Unity platforms. Previously, NVIDIA was working with developers to integrate Highlights into their games, but now developers will have the ability to add the support themselves.

    Hopefully, these changes mean a quicker influx more titles with Highlights support, compared to the 16 currently supported titles.

    In addition to enhancements in Highlights, NVIDIA has also launched a new sharing site for screen captures performed with the Ansel in-game photography tool.

    The new ShotWithGeforce.com lets users upload and share their captures from any Ansel supported game.

    Screenshots uploaded to Shot With GeForce are tagged with the specific game the capture is from, making it easy for users to scroll through all of the uploaded captures from a given title.

    Source: NVIDIA Read more Comments (1) Wolfenstein II is new, but you don't necessarily need new hardware to enjoy it Subject: General Tech | March 21, 2018 - 03:27 PM | Jeremy Hellstrom Tagged: gaming, wolfenstein ii, the new colossus

    If you are on the fence about picking up the new Wolfenstein because you aren't sure your GPU can handle it?  Overclockers Club tested the game with some older hardware as well as the current generation, including the GTX 770 and GTX 980 in addition to a RX Vega 64 and GTX 1080.  After running through the benchmarks they find that the GTX 980 is more than capable of handling this game, so grab it if you have a GPU of that calibre.  If you are looking for the best possible experience, the Vega 64 is the way to go.

    "Having additional GPUs may have proven useful for this work since we leapt from barely playable on the GTX 770 to max settings on the GTX 980. The GTX 1080 naturally surpassed the GTX 980 and the RX Vega 64 beat them all, both at stock and with the undervolt and power limit. Based on this sampling of performance data, if you could pick any GPU to play Wolfenstein II: The New Colossus on, the RX Vega 64 would be the best of those test. However, you can very comfortably go with something older and cheaper, like the GTX 980 without compromising a setting. To my mind, that is pretty impressive for a modern game with modern graphics."

    Here is some more Tech News from around the web:

    Gaming

    HTC reveals Vive Pro availability, opens pre-orders at $799 @ HEXUS A Total War Saga: Thrones Of Britannia delayed into May @ Rock, Paper, SHOTGUN DirectX Raytracing tech demo video published by Futuremark @ HEXUS Kingdom Come: Deliverance Review @ OCC Final Fantasy XV PC Game Review & 25 card performance/IQ evaluation @ BabelTechReviews Final Fantasy XV Benchmark Performance Analysis @ TechPowerUp

    Source: Overclockers Club Read more Comments (0) April releases are coming from AMD and Intel Subject: General Tech | March 21, 2018 - 02:48 PM | Jeremy Hellstrom Tagged: H310, H370, B360, Q360, Q370, Intel, amd, ryzen 2000, x470, b450

    With both AMD and Intel scheduled to release new chips in a few weeks it looks like it will be a busy April for reviewers.  Motherboard manufacturers are hoping the retail market will also be a busy as they have all seen slower sales this quarter than they achieved a year ago.  Indeed total global motherboard shipments slipped 15% in 2017, a noticeable slowdown.  Intel will be refreshing Coffee Lake and adding several new chipsets while AMD will be introducing Ryzen 2000 as well as two new chipsets.

    From the looks of the names, which are listed at DigiTimes, the naming conventions for the two competing companies will remain annoyingly similar.

    "Asustek Computer, ASRock, Gigabyte Technology and Micro-Star International (MSI) have all begun making deployments, hoping their motherboard shipments in the second quarter can at least remain at levels similar to those a year ago, according market watchers."

    Here is some more Tech News from around the web:

    Tech Talk

    CUDA is Like Owning a Supercomputer @ Hack a Day Microsoft Says Windows 10 Spring Creators Update Will Install in 30 Minutes @ Slashdot Blackberry enlists those tired of life to promote its phones in exchange for swag @ The Inquirer CTS who? AMD brushes off chipset security bugs with firmware patches @ The Register How To Detect + Fix Sitemap Problems In Google Search Console @ TechARP NETGEAR Nighthawk XR500 Wireless Gaming Router @ Kitguru Nitro Concepts Series S300 Gaming Chair @ TechPowerUp

     

    Source: DigiTimes 9 comments Comments (9) GDC 2018: Qualcomm Talks Future of VR and AR with Upcoming Dev Kit Subject: General Tech | March 21, 2018 - 09:20 AM | Sebastian Peak Tagged: xr, VR, Tobii, qualcomm, HMD, GDC 2018, GDC, eye-tracking, developers, dev kit, AR

    We have recently covered news of Qualcomm's ongoing VR/AR efforts (the two terms now combine as "XR", for eXtended reality), with news of the Snapdragon 845-powered reference HMD and more recently the collaboration with Tobii to bring eye-tracking to the Qualcomm development platform. Today at GDC Qualcomm is mapping out their vision for the future of XR, and providing additional details about the Snapdragon 845 dev kit - and announcing support for the HTC Vive Wave SDK.

    From Qualcomm:

    For the first time, many new technologies that are crucial for an optimal and immersive VR user experience will be supported in the Snapdragon 845 Virtual Reality Development Kit. These include:

    Room-scale 6DoF SLAM: The Snapdragon 845 Virtual Reality Development Kit is engineered to help VR developers create applications that allow users to explore virtual worlds, moving freely around in a room, rather than being constrained to a single viewing position. Un-tethered mobile VR experiences like these can benefit from the Snapdragon 845 Virtual Reality Development Kit’s pre-optimized hardware and software for room-scale six degrees of freedom (6DoF) with “inside-out” simultaneous localization and mapping (SLAM). All of this is designed to be accomplished without any external setup in the room by the users, and without any cables or wires. Qualcomm® Adreno™ Foveation: Our eyes are only able to observe significant details in a very small center of our field of vision - this region is called the “fovea”. Foveated rendering utilizes this understanding to boost performance & save power, while also improving visual quality. This is accomplished through multiple technology advancements for multi-view, tile-based foveation with eye-tracking and fine grain preemption to help VR application developers deliver truly immersive visuals with optimal power efficiency.

    Eye Tracking: Users naturally convey intentions about how and where they want to interact within virtual worlds through their eyes. Qualcomm Technologies worked with Tobii AB to develop an integrated and optimized eye tracking solution for the Snapdragon 845 VR Development Kit. The cutting-edge eye tracking solution on Snapdragon 845 VR Development Kit is designed to help developers utilize Tobii’s EyeCore™ eye tracking algorithms to create content that utilizes gaze direction for fast interactions, and superior intuitive interfaces. Boundary System: The new SDK for the Snapdragon 845 VR Development Kit supports a boundary system that is engineered to help VR application developers accurately visualize real-world spatial constraints within virtual worlds, so that their applications can effectively manage notifications and play sequences for VR games or videos, as the user approaches the boundaries of the real-world play space.

    In addition to enhancing commercial reach for the VR developer community, Qualcomm Technologies is excited to announce support for the HTC Vive Wave™ VR SDK on the Snapdragon 845 Virtual Reality Development Kit, anticipated to be available later this year. The Vive Wave™ VR SDK is a comprehensive tool set of APIs that is designed to help developers create high-performance, Snapdragon-optimized content across diverse hardware vendors at scale, and offer a path to monetizing applications on future HTC Vive ready products via the multi-OEM Viveport™ application store.

    The Snapdragon 845 HMD/dev kit and SDK are expected to be available in Q2 2018.

    Source: Qualcomm Comments (0) AMD finalizing fixes for Ryzen, EPYC security vulnerabilities Subject: Processors | March 20, 2018 - 04:33 PM | Ryan Shrout Tagged: ryzenfall, masterkey, fallout, cts labs, chimera, amd

    AMD’s CTO Mark Papermaster released a blog today that both acknowledges the security vulnerabilities first shown by a CTS Labs report last week, while also laying the foundation for the mitigations to be released. Though the company had already acknowledged the report, and at least one other independent security company validated the claims, we had yet to hear from AMD officially on the potential impact and what fixes might be possible for these concerns.

    In the write up, Papermaster is clear to call out the short period of time AMD was given with this information, quoting “less than 24 hours” from the time it was notified to the time the story was public on news outlets and blogs across the world. It is important to detail for some that may not follow the security landscape clearly that this has no relation to the Spectre and Meltdown issues that are affecting the industry and what CTS did find has nothing to do with the Zen architecture itself. Instead, the problem revolves around the embedded security protocol processor; while an important distinction moving forward, from a practical view to customers this is one and the same.

    AMD states that it has “rapidly completed its assessment and is in the process of developing and staging the deployment of mitigations.” Rapidly is an understatement – going from blindsided to an organized response is a delicate process and AMD has proven its level of sincerity with the priority it placed on this.

    Papermaster goes on to mention that all these exploits require administrative access to the computer being infected, a key differentiator from the Spectre/Meltdown vulnerabilities. The post points out that “any attacker gaining unauthorized administrative access would have a wide range of attacks at their disposal well beyond the exploits identified in this research.” I think AMD does an excellent job threading the needle in this post balancing the seriousness of these vulnerabilities with the overzealous hype that was created upon their initial release and the accompanying financial bullshit that followed.

    AMD provides an easy to understand table with a breakdown of the vulnerabilities, the potential impact of the security risk, and what the company sees as its mitigation capability. Both sets that affect the secure processor in the Ryzen and EPYC designs are addressable with a firmware update for the secure unit itself, distributed through a standard BIOS update. For the Promontory chipset issue, AMD is utilizing a combination of a BIOS update and further work with ASMedia to further enhance the security updates.

    That is the end of the update from AMD at this point. In my view, the company is doing a satisfactory job addressing the problems in what must be an insanely accelerated time table. I do wish AMD was willing to offer more specific time tables for the distribution of those security patches, and how long we should expect to wait to see them in the form of BIOS updates for consumer and enterprise customers. For now, we’ll monitor the situation and look for other input from AMD, CTS, or secondary security firms to see if the risks laid out ever materialize.

    For what could have been a disastrous week for AMD, it has pivoted to provide a controlled, well-executed plan. Despite the hype and hysteria that might have started with stock-shorting and buzzwords, the plight of the AMD processor family looks stable.

    Source: AMD 25 comments Comments (25) Corsair AX1600i Digital ATX Power Supply Review Author: Lee Garbutt Date: March 20, 2018 Subject: Cases and Cooling Manufacturer: Corsair Tagged: Digital Power Supply, Corsair Link, AX1600i, 80 Plus Titanium Introduction and Features

    Introduction

    Corsair is a well-respected name in the PC industry and they continue to offer a complete line of products for enthusiasts, gamers, and professionals alike.  Today we are taking a detailed look at Corsair’s latest flagship power supply, the AX1600i Digital ATX power supply unit. This is the most technologically advanced power supply we have reviewed to date. Over time, we often grow numb to marketing terms like “most technologically advanced”, “state-of-the-art”, “ultra-stable”, “super-high efficiency”, etc., but in the case of the AX1600i Digital PSU, we have seen these claims come to life before our eyes.

    1,600 Watts: 133.3 Amps on the +12V outputs!

    The AX1600i Digital power supply is capable of delivering up to 1,600 watts of continuous DC power (133.3 Amps on the +12V rails) and is 80 Plus Titanium certified for super-high efficiency. If that’s not impressive enough, the PSU can do it while operating on 115 VAC mains and with an ambient temperature up to 50°C (internal case temperature). This beast was made for multiple power-hungry graphic adapters and overclocked CPUs.

    The AX1600i is a digital power supply, which provides two distinct advantages. First, it incorporates Digital Signal Processing (DSP) on both the primary and secondary sides, which allows the PSU to deliver extremely tight voltage regulation over a wide range of loads. And second, the AX1600i features the digital Corsair Link, which enables the PSU to be connected to the PC’s motherboard (via USB) for real-time monitoring (efficiency, voltage regulation, and power usage) and control (over-current protection and fan speed profiles).

    Quiet operation with a semi-fanless mode (zero-rpm fan mode up to ~40% load) might not be at the top of your feature list when shopping for a 1,600 watt PSU, but the AX1600i is up to the challenge.

    (Courtesy of Corsair)

    Corsair AX1600i Digital ATX PSU Key Features:

    •    Digital Signal Processor (DSP) for extremely clean and efficient power •    Corsair Link Interface for monitoring and adjusting performance •    1,600 watts continuous power output (50°C) •    Dedicated single +12V rail (133.3A) with user-configurable virtual rails •    80 Plus Titanium certified, delivering up to 94% efficiency •    Ultra-low noise 140mm Fluid Dynamic Bearing (FDB) fan •    Silent, Zero RPM mode up to ~40% load (~640W) •    Self-test switch to verify power supply functionality •    Premium components (GaN transistors and all Japanese made capacitors) •    Fully modular cable system •    Conforms to ATX12V v2.4 and EPS 2.92 standards •    Universal AC input (100-240V) with Active PFC •    Safety Protections: OCP, OVP, UVP, SCP, OTP, and OPP •    Dimensions: 150mm (W) x 86mm (H) x 200mm (L) •    10-Year warranty and legendary Corsair customer service •    $449.99 USD

    Please continue reading our review of the AX1600i Digital PSU !!!

    Read more Comments (2) GDC 2018: Microsoft Announces DirectX Raytracing (DXR) Author: Scott Michaud Date: March 19, 2018 Subject: Graphics Cards Manufacturer: Microsoft Tagged: raytracing, nvidia, microsoft, Intel, dxr, directx raytracing, DirectX 12, DirectX, amd O Rayly? Ya Rayly. No Ray!

    Microsoft has just announced a raytracing extension to DirectX 12, called DirectX Raytracing (DXR), at the 2018 Game Developer's Conference in San Francisco.

    The goal is not to completely replace rasterization… at least not yet. This effect will be mostly implemented for effects that require supplementary datasets, such as reflections, ambient occlusion, and refraction. Rasterization, the typical way that 3D geometry gets drawn on a 2D display, converts triangle coordinates into screen coordinates, and then a point-in-triangle test runs across every sample. This will likely occur once per AA sample (minus pixels that the triangle can’t possibly cover -- such as a pixel outside of the triangle's bounding box -- but that's just optimization).

    For rasterization, each triangle is laid on a 2D grid corresponding to the draw surface. If any sample is in the triangle, the pixel shader is run. This example shows the rotated grid MSAA case.

    A program, called a pixel shader, is then run with some set of data that the GPU could gather on every valid pixel in the triangle. This set of data typically includes things like world coordinate, screen coordinate, texture coordinates, nearby vertices, and so forth. This lacks a lot of information, especially things that are not visible to the camera. The application is free to provide other sources of data for the shader to crawl… but what?

    Cubemaps are useful for reflections, but they don’t necessarily match the scene. Voxels are useful for lighting, as seen with NVIDIA’s VXGI and VXAO.

    This is where DirectX Raytracing comes in. There’s quite a few components to it, but it’s basically a new pipeline that handles how rays are cast into the environment. After being queued, it starts out with a ray-generation stage, and then, depending on what happens to the ray in the scene, there are close-hit, any-hit, and miss shaders. Ray generation allows the developer to set up how the rays are cast, where they call an HLSL instrinsic instruction, TraceRay (which is a clever way of invoking them, by the way). This function takes an origin and a direction, so you can choose to, for example, cast rays only in the direction of lights if your algorithm was to, for instance, approximate partially occluded soft shadows from a non-point light. (There are better algorithms to do that, but it's just the first example that came off the top of my head.) The close-hit, any-hit, and miss shaders occur at the point where the traced ray ends.

    To connect this with current technology, imagine that ray-generation is like a vertex shader in rasterization, where it sets up the triangle to be rasterized, leading to pixel shaders being called.

    Even more interesting – the close-hit, any-hit, and miss shaders can call TraceRay themselves, which is used for multi-bounce and other recursive algorithms (see: figure above). The obvious use case might be reflections, which is the headline of the GDC talk, but they want it to be as general as possible, aligning with the evolution of GPUs. Looking at NVIDIA’s VXAO implementation, it also seems like a natural fit for a raytracing algorithm.

    Speaking of data structures, Microsoft also detailed what they call the acceleration structure. Each object is composed of two levels. The top level contains per-object metadata, like its transformation and whatever else data that the developer wants to add to it. The bottom level contains the geometry. The briefing states, “essentially vertex and index buffers” so we asked for clarification. DXR requires that triangle geometry be specified as vertex positions in either 32-bit float3 or 16-bit float3 values. There is also a stride property, so developers can tweak data alignment and use their rasterization vertex buffer, as long as it's HLSL float3, either 16-bit or 32-bit.

    As for the tools to develop this in…

    Microsoft announced PIX back in January 2017. This is a debugging and performance analyzer for 64-bit, DirectX 12 applications. Microsoft will upgrade it to support DXR as soon as the API is released (specifically, “Day 1”). This includes the API calls, the raytracing pipeline resources, the acceleration structure, and so forth. As usual, you can expect Microsoft to support their APIs with quite decent – not perfect, but decent – documentation and tools. They do it well, and they want to make sure it’s available when the API is.

    Example of DXR via EA's in-development SEED engine.

    In short, raytracing is here, but it’s not taking over rasterization. It doesn’t need to. Microsoft is just giving game developers another, standardized mechanism to gather supplementary data for their games. Several game engines have already announced support for this technology, including the usual suspects of anything top-tier game technology:

    Frostbite (EA/DICE) SEED (EA) 3DMark (Futuremark) Unreal Engine 4 (Epic Games) Unity Engine (Unity Technologies)

    They also said, “and several others we can’t disclose yet”, so this list is not even complete. But, yeah, if you have Frostbite, Unreal Engine, and Unity, then you have a sizeable market as it is. There is always a question about how much each of these engines will support the technology. Currently, raytracing is not portable outside of DirectX 12, because it’s literally being announced today, and each of these engines intend to support more than just Windows 10 and Xbox.

    Still, we finally have a standard for raytracing, which should drive vendors to optimize in a specific direction. From there, it's just a matter of someone taking the risk to actually use the technology for a cool work of art.

    If you want to read more, check out Ryan's post about the also-announced RTX, NVIDIA's raytracing technology.

    6 comments Comments (6) GDC 2018: Microsoft Discusses WinML API for Games Author: Scott Michaud Date: March 19, 2018 Subject: Graphics Cards Manufacturer: Microsoft Tagged: winml, microsoft, facebook, deep neural network, deep learning, amazon It's all fun and games until something something AI.

    Microsoft announced the Windows Machine Learning (WinML) API about two weeks ago, but they did so in a sort-of abstract context. This week, alongside the 2018 Game Developers Conference, they are grounding it in a practical application: video games!

    Specifically, the API provides the mechanisms for game developers to run inference on the target machine. The training data that it runs against would be in the Open Neural Network Exchange (ONNX) format from Microsoft, Facebook, and Amazon. Like the initial announcement suggests, it can be used for any application, not just games, but… you know. If you want to get a technology off the ground, and it requires a high-end GPU, then video game enthusiasts are good lead users. When run in a DirectX application, WinML kernels are queued on the DirectX 12 compute queue.

    We’ve discussed the concept before. When you’re rendering a video game, simulating an accurate scenario isn’t your goal – the goal is to look like you are. The direct way of looking like you’re doing something is to do it. The problem is that some effects are too slow (or, sometimes, too complicated) to correctly simulate. In these cases, it might be viable to make a deep-learning AI hallucinate a convincing result, even though no actual simulation took place.

    Fluid dynamics, global illumination, and up-scaling are three examples.

    Previously mentioned SIGGRAPH demo of fluid simulation without fluid simulation... ... just a trained AI hallucinating a scene based on input parameters.

    Another place where AI could be useful is… well… AI. One way of making AI is to give it some set of data from the game environment, often including information that a player in its position would not be able to know, and having it run against a branching logic tree. Deep learning, on the other hand, can train itself on billions of examples of good and bad play, and make results based on input parameters. While the two methods do not sound that different, the difference between logic being designed (vs logic being assembled from an abstract good/bad dataset) someone abstracts the potential for assumptions and programmer error. Of course, it abstracts that potential for error into the training dataset, but that’s a whole other discussion.

    The third area that AI could be useful is when you’re creating the game itself.

    There’s a lot of grunt and grind work when developing a video game. Licensing prefab solutions (or commissioning someone to do a one-off asset for you) helps ease this burden, but that gets expensive in terms of both time and money. If some of those assets could be created by giving parameters to a deep-learning AI, then those are assets that you would not need to make, allowing you to focus on other assets and how they all fit together.

    These are three of the use cases that Microsoft is aiming WinML at.

    Sure, these are smooth curves of large details, but the antialiasing pattern looks almost perfect.

    For instance, Microsoft is pointing to an NVIDIA demo where they up-sample a photo of a car, once with bilinear filtering and once with a machine learning algorithm (although not WinML-based). The bilinear algorithm behaves exactly as someone who has used Photoshop would expect. The machine learning algorithm, however, was able to identify the objects that the image intended to represent, and it drew the edges that it thought made sense.

    Like their DirectX Raytracing (DXR) announcement, Microsoft plans to have PIX support WinML “on Day 1”. As for partners? They are currently working with Unity Technologies to provide WinML support in Unity’s ML-Agents plug-in. That’s all the game industry partners they have announced at the moment, though. It’ll be interesting to see who jumps in and who doesn’t over the next couple of years.

    1 comment Comments (1) NVIDIA RTX Technology Accelerates Ray Tracing for Microsoft DirectX Raytracing API Subject: Graphics Cards | March 19, 2018 - 01:00 PM | Ryan Shrout Tagged: rtx, nvidia, dxr

    The big news from the Game Developers Conference this week was Microsoft’s reveal of its work on a new ray tracing API for DirectX called DirectX Raytracing. As the name would imply, this is a new initiative to bring the image quality improvements of ray tracing to consumer hardware with the push of Microsoft’s DX team. Scott already has a great write up on that news and current and future implications of what it will mean for PC gamers, so I highly encourage you all to read that over before diving more into this NVIDIA-specific news.

    For those you that might need a history lesson on ray tracing and its growth, check out this three-part series that ran on PC Perspective as far back as 2006! Ray Tracing and Gaming - Quake 4: Ray Traced Project Rendering Games with Raytracing Will Revolutionize Graphics Ray Tracing and Gaming - One Year Later

    Ray tracing has been the holy grail of real-time rendering. It is the gap between movies and games – though ray tracing continues to improve in performance it takes the power of offline server farms to render the images for your favorite flicks. Modern game engines continue to use rasterization, an efficient method for rendering graphics but one that depends on tricks and illusions to recreate the intended image. Ray tracing inherently solves the problems that rasterization works around including shadows, transparency, refraction, and reflection. But it does so at a prohibitive performance cost. But that will be changing with Microsoft’s enablement of ray tracing through a common API and technology like what NVIDIA has built to accelerate it.

    Alongside support and verbal commitment to DXR, NVIDIA is announcing RTX Technology. This is a combination of hardware and software advances to improve the performance of ray tracing algorithms on its hardware and it works hand in hand with DXR. NVIDIA believes this is the culmination of 10 years of development on ray tracing, much of which we have talked about on this side from the world of professional graphics systems. Think Iray, OptiX, and more.

    RTX will run on Volta GPUs only today, which does limit usefulness to gamers. With the only graphics card on the market even close to considered a gaming product the $3000 TITAN V, RTX is more of a forward-looking technology announcement for the company. We can obviously assume then that RTX technology will be integrated on any future consumer gaming graphics cards, be that a revision of Volta of something completely different. (NVIDIA refused to acknowledge plans for any pending Volta consumer GPUs during our meeting.)

    The idea I get from NVIDIA is that today’s RTX is meant as a developer enablement platform, getting them used to the idea of adding ray tracing effects into their games and engines and to realize that NVIDIA provides the best hardware to get that done.

    I’ll be honest with you – NVIDIA was light on the details of what RTX exactly IS and how it accelerates ray tracing. One very interesting example I was given was seen first with the AI-powered ray tracing optimizations for Optix from last year’s GDC. There, NVIDIA demonstrated that using the Volta Tensor cores it could run an AI-powered de-noiser on the ray traced image, effectively improving the quality of the resulting image and emulating much higher ray counts than are actually processed.

    By using the Tensor cores with RTX for DXR implementation on the TITAN V, NVIDIA will be able to offer image quality and performance for ray tracing well ahead of even the TITAN Xp or GTX 1080 Ti as those GPUs do not have Tensor cores on-board. Does this mean that all (or flagship) consumer graphics cards from NVIDIA will includ Tensor cores to enable RTX performance? Obviously, NVIDIA wouldn’t confirm that but to me it makes sense that we will see that in future generations. The scale of Tensor core integration might change based on price points, but if NVIDIA and Microsoft truly believe in the future of ray tracing to augment and significantly replace rasterization methods, then it will be necessary.

    Though that is one example of hardware specific features being used for RTX on NVIDIA hardware, it’s not the only one that is on Volta. But NVIDIA wouldn’t share more.

    The relationship between Microsoft DirectX Raytracing and NVIDIA RTX is a bit confusing, but it’s easier to think of RTX as the underlying brand for the ability to ray trace on NVIDIA GPUs. The DXR API is still the interface between the game developer and the hardware, but RTX is what gives NVIDIA the advantage over AMD and its Radeon graphics cards, at least according to NVIDIA.

    DXR will still run on other GPUS from NVIDIA that aren’t utilizing the Volta architecture. Microsoft says that any board that can support DX12 Compute will be able to run the new API. But NVIDIA did point out that in its mind, even with a high-end SKU like the GTX 1080 Ti, the ray tracing performance will limit the ability to integrate ray tracing features and enhancements in real-time game engines in the immediate timeframe. It’s not to say it is impossible, or that some engine devs might spend the time to build something unique, but it is interesting to hear NVIDIA infer that only future products will benefit from ray tracing in games.

    It’s also likely that we are months if not a year or more from seeing good integration of DXR in games at retail. And it is also possible that NVIDIA is downplaying the importance of DXR performance today if it happens to be slower than the Vega 64 in the upcoming Futuremark benchmark release.

    Alongside the RTX announcement comes GameWorks Ray Tracing, a colleciton of turnkey modules based on DXR. GameWorks has its own reputation, and we aren't going to get into that here, but NVIDIA wants to think of this addition to it as a way to "turbo charge enablement" of ray tracing effects in games.

    NVIDIA believes that developers are incredibly excited for the implementation of ray tracing into game engines, and that the demos being shown at GDC this week will blow us away. I am looking forward to seeing them and for getting the reactions of major game devs on the release of Microsoft’s new DXR API. The performance impact of ray tracing will still be a hindrance to larger scale implementations, but with DXR driving the direction with a unified standard, I still expect to see some games with revolutionary image quality by the end of the year. 

    Source: NVIDIA 15 comments Comments (15) HTC announces VIVE Pro Pricing, Available now for Preorder Subject: General Tech, Graphics Cards | March 19, 2018 - 12:09 PM | Ken Addison Tagged: vive pro, steamvr, rift, Oculus, Lighthouse, htc

    Today, HTC has provided what VR enthusiasts have been eagerly waiting for since the announcement of the upgraded VIVE Pro headset earlier in the year at CES—the pricing and availability of the new device.

    Available for preorder today, the VIVE Pro will cost $799 for the headset-only upgrade. As we mentioned during the VIVE Pro announcement, this first upgrade kit is meant for existing VIVE users who will be reusing their original controllers and lighthouse trackers to get everything up and running.

    The HMD-only kit, with it's upgraded resolution and optics, is set to start shipping very soon on April 3 and can be preordered now on the HTC website.

    Additionally, your VIVE Pro purchase (through June 3rd, 2018) will come with a free six-month subscription to HTC's VIVEPORT subscription game service, which will gain you access to up to 5 titles per month for free (chosen from the VIVEPORT catalog of 400+ games.)

    There is still no word on the pricing and availability of the full VIVE Pro kit including the updated Lighthouse 2.0 trackers, but it seems likely that it will come later in the Summer after the upgrade kit has saturated the market of current VIVE owners.

    As far as system requirements go, the HTC site doesn't list any difference between the standard VIVE and the VIVE Pro. One change, however, is the lack of an HDMI port on the new VIVE Pro link box, so you'll need a graphics card with an open DisplayPort 1.2 connector. 

    Source: HTC Comments (0) CalDigit Tuff Rugged External Drive: Take Your Data For a Swim Author: Jim Tanous Date: March 19, 2018 Subject: Storage Manufacturer: CalDigit Tagged: waterproof, usb-c, ruggedized, external hdd, external hard drive, external drive, caldigit CalDigit Tuff Rugged External Drive

    There are a myriad of options when it comes to portable external storage. But if you value durability just as much as portability, those options quickly dry up. Combining a cheap 2.5-inch hard drive with an AmazonBasics enclosure is often just fine for an external storage solution that sits in your climate controlled office all day, but it's probably not the best choice for field use during your national park photography trip, your scuba diving expedition, or on-site construction management.

    For situations like these where the elements become a factor and the chance of an accidental drop skyrockets, it's a good idea to invest in "ruggedized" equipment. Companies like Panasonic and Dell have long offered laptops custom-designed to withstand unusually harsh environments, and accessory makers have followed suit with ruggedized hard drives.

    Today we're taking a look at one such ruggedized hard drive, the CalDigit Tuff. Released in 2017, the CalDigit Tuff is a 2.5-inch bus-powered external drive available in both HDD and SSD options. CalDigit loaned us the 2TB HDD model for testing.

    Continue reading our review of the CalDigit Tuff rugged USB-C drive!

    Read more Comments (0) PNY Adds CS900 960GB SATA SSD To Budget SSD Series Subject: General Tech, Storage | March 18, 2018 - 12:20 AM | Tim Verry Tagged: ssd, sata 3, pny, 3d nand

    PNY has added a new solid-state drive to its CS900 lineup doubling the capacity to 960GB. The SATA-based SSD is a 2.5" 7mm affair suitable for use in laptops and SFF systems as well as a budget option for desktops.

    The CS900 960GB SSD uses 3D TLC NAND flash and offers ECC, end-to-end data protection, secure erase, and power saving features to protect data and battery life in mobile devices. Unfortunately, information on the controller and NAND flash manufacturer is not readily available though I suspect it uses a Phison controller like PNY's other drives.

    The 960GB capacity model is rated for sequential reads of 535 MB/s and sequential writes of 515 MB/s. PNY rates the drive at 2 million hours MTBF and they cover it with a 3-year warranty.

    We may have to wait for reviews (we know how Allyn loves to tear apart drives!) for more information on this drive especially where random read/write and latency percentile performance are concerned. The good news is that if the performance is there the budget price seems right with an MSRP of $249.99 and an Amazon sale price of $229.99 (just under 24 cents/GB) at time of writing. Not bad for nearly a terabyte of solid state storage (though if you don't need that much space you can alternatively find PCI-E based M.2 SSDs in this price range).

    Source: PNY 2 comments Comments (2) MyDigitalSSD SBX M.2 NVMe SSD Full Capacity Roundup - 128GB, 256GB, 512GB, 1TB Tested! Author: Allyn Malventano Date: March 16, 2018 Subject: Storage Manufacturer: MyDigitalSSD Tagged: ssd, SBX, PCIe 3.0 x2, NVMe, nand, MyDigitalSSD, M.2, 512GB, 256GB, 2280, 1TB, 128Gb, 1024GB Introduction, Specifications and Packaging

    Introduction:

    When one thinks of an M.2 SSD, we typically associate that with either a SATA 6GB/s or more recently with a PCIe 3.0 x4 link. The physical interface of M.2 was meant to accommodate future methods of connectivity, but it's easy to overlook the ability to revert back to something like a PCIe 3.0 x2 link. Why take a seemingly backward step on the interface of an SSD? Several reasons actually. Halving the number of lanes makes for a simpler SSD controller design, which lowers cost. Power savings are also a factor, as driving a given twisted pair lane at PCIe 3.0 speeds draws measurable current from the host and therefore adds to the heat production of the SSD controller. We recently saw that a PCIe 3.0 x2 can still turn in respectable performance despite lower bandwidth interface, but how far can we get the price down when pairing that host link with some NAND flash?

    Enter the MyDigitalSSD SBX series. Short for Super Boot eXpress, the aim of these parts is to offer a reasonably performant PCIe NVMe SSD at something closer to SATA SSD pricing.

    Specifications:

    Physical: M.2 2280 (single sided) Controller: Phison E8 (PS5008-E8) Capacities: 128GB, 256GB, 512GB, 1TB PCIe 3.0 x2, M.2 2280 Sequential: Up to 1.6/1.3 GB/s (R/W) Random: 240K+ / 180K+ IOPS (R/W) Weight: 8g Power: <5W

    Packaging:

    The MyDigitalDiscount guys keep things extremely simple with their SSD packaging, which is eaxctly how it should be. It doesn't take much to package and protect an M.2 SSD, and this does the job just fine. They also include a screwdriver and a screw just in case you run into a laptop that came without one installed.

    Read on for our full review of all capacities of the MyDigitalSSD SBX lineup!

    Read more Comments (8) Tobii and Qualcomm Announce Collaboration on Mobile VR Headsets with Eye-Tracking Subject: General Tech | March 16, 2018 - 09:45 AM | Sebastian Peak Tagged: xr, VR, Tobii, snapdragon 845, qualcomm, mobile, HMD, head mounted display, eye tracking, AR, Adreno 630

    Tobii and Qualcomm's collaboration in the VR HMD (head-mounted display) space is a convergence of two recent stories, with Tobii's impressing showing of a prototype HMD device at CES featuring their eye-tracking technology, and Qualcomm's unvieling last month of their updated mobile VR platform, featuring the new Snapdragon 845.

    The Qualcomm Snapdragon 845 Mobile VR Reference Platform

    What does this new collaboration mean for the VR industry? For now it means a new reference design and dev kit with the latest tech from Tobii and Qualcomm:

    "As a result of their collaboration, Tobii and Qualcomm are creating a full reference design and development kit for the Qualcomm Snapdragon 845 Mobile VR Platform, which includes Tobii's EyeCore eye tracking algorithms and hardware design. Tobii will license its eye tracking technologies and system and collaborate with HMD manufacturers on the optical solution for the reference design."

    The press release announcing this collaboration recaps the benefits of Tobii eye tracking in a mobile VR/AR device, which include:

    Foveated Rendering: VR/AR devices become aware of where you are looking and can direct high-definition graphics processing power to that exact spot in real time. This enables higher definition displays, more efficient devices, longer battery life and increased mobility. Interpupillary Distance: Devices automatically orient images to align with your pupils. This enables devices to adapt to the individual user, helping to increase the visual quality of virtual and augmented reality experiences. Hand-Eye Coordination: By using your eyes in harmony with your hands and associated controllers, truly natural interaction and immersion, not possible without the use of gaze, is realized. Interactive Eye Contact: Devices can accurately track your gaze in real time, enabling content creators to express one of the most fundamental dimensions of human interaction – eye contact. VR technologies hold the promise of enabling a new and immersive medium for social interaction. The addition of true eye contact to virtual reality helps deliver that promise.

    Tobii's prototype eye-tracking HMD

    For its part, Qualcomm's Snapdragon 845-powered VR mobile platform promises greater portability of a better VR experience, with expanded freedom on top of the improved graphics horsepower from the new Adreno 630 GPU in the Snapdragon 845. This portability includes 6DoF (6 degrees of freedom) using external cameras to identify location within a room, eliminating the need for external room sensors.

    "Together, 6DoF and SLAM deliver Roomscale - the ability to track the body and location within a room so you can freely walk around your XR environment without cables or separate room sensors – the first on a mobile standalone device. Much of this is processed on the new dedicated Qualcomm Hexagon Digital Signal Processor (DSP) and Adreno Graphics Processing Unit within the Snapdragon 845. Qualcomm Technologies’ reference designs have supported some of the first wave of standalone VR devices from VR ecosystem leaders like Google Daydream, Oculus and Vive."

    It is up to developers, and consumer interest in VR moving forward, to see what this collaboration will produce. To editorialize briefly, from first-hand experience I can vouch for the positive impact of eye-tracking with an HMD, and if future products live up to the promise of a portable, high-performance VR experience (with a more natural feel from less rapid head movement) a new generation of VR enthusiasts could be born.

    Source: PR Newswire 1 comment Comments (1) PCPer Mailbag #35 - 3/16/2018 Subject: Editorial | March 16, 2018 - 09:00 AM | Jim Tanous Tagged: video, Ryan Shrout, pcper mailbag

    It's time for the PCPer Mailbag, our weekly show where Ryan and the team answer your questions about the tech industry, the latest and greatest GPUs, the process of running a tech review website, and more!

    On today's show:

    00:50 - Rumble gaming chairs? 02:18 - SSD for VMs? 04:08 - Playing old games in a virtual machine? 05:54 - Socketed GPUs? 09:08 - PC middlemen? 11:02 - NVIDIA tech demos? 13:10 - Raven Ridge motherboard features? 14:45 - x8 and x16 PCIe SSDs? 17:50 - Ryzen 2800X and Radeon RX 600? 19:53 - GeForce Partner Program? 21:14 - Goo goo g'joob?

    Want to have your question answered on a future Mailbag? Leave a comment on this post or in the YouTube comments for the latest video. Check out new Mailbag videos each Friday!

    Be sure to subscribe to our YouTube Channel to make sure you never miss our weekly reviews and podcasts, and please consider supporting PC Perspective via Patreon to help us keep videos like our weekly mailbag coming!

    Source: YouTube 7 comments Comments (7)   1 of 1129 ›› See all news GA_googleFillSlot("ROS_300x250"); PC PerspectiveUpcoming Events3/28 @ 7:00pm PT / 10:00pm ETPC Perspective Podcast3/29 @ 12:30pm PT / 3:30pm ETThis Week in Computer Hardware2018-03-28T19:00:00-07:00Get notified when we go live!PC Perspective Live! Google Calendar RSS Latest News Latest Videos PCPer Mailbag #36 - 3/23/2018 SteelSeries exclusive audio bundle, the Arctis Pro and GameDAC Going back for a third serving of Raspberry Pi Podcast #492 - MyDigitalSSD, CalDigit Tuff Drive, and more! Pi Foundation Releases Refreshed $35 Raspberry Pi 3 Model B+ GDC 2018: NVIDIA Adds new Ansel and Highlights features to GeForce Experience Wolfenstein II is new, but you don't necessarily need new hardware to enjoy it April releases are coming from AMD and Intel GDC 2018: Qualcomm Talks Future of VR and AR with Upcoming Dev Kit AMD finalizing fixes for Ryzen, EPYC security vulnerabilities GA_googleFillSlot("ROS_300x600"); var ftClick = ""; var ftExpTrack_2327579 = ""; var ftX = ""; var ftY = ""; var ftZ = ""; var ftOBA = 1; var ftContent = ""; var ftCustom = ""; var ft300x250_OOBclickTrack = ""; var ftRandom = Math.random()*1000000; var ftBuildTag1 = "

    Follow Us